hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
87df4668217a77fd249d0ccb1840561c8017fb2d | 6,792 | py | Python | migration/management/commands/migratedata.py | dixonary/uwcs-zarya | e982f324c0a6aca156a15c6f597012f47bc070cc | [
"MIT"
] | 7 | 2016-09-30T20:33:46.000Z | 2020-03-16T15:04:20.000Z | migration/management/commands/migratedata.py | dixonary/uwcs-zarya | e982f324c0a6aca156a15c6f597012f47bc070cc | [
"MIT"
] | 46 | 2016-10-01T17:52:59.000Z | 2022-01-13T00:44:09.000Z | migration/management/commands/migratedata.py | dixonary/uwcs-zarya | e982f324c0a6aca156a15c6f597012f47bc070cc | [
"MIT"
] | 6 | 2016-10-15T02:51:18.000Z | 2020-10-02T12:47:54.000Z | from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from django.template.defaultfilters import slugify, linebreaks, date, truncatechars
from wagtail.core.models import Page
from wagtail.core.rich_text import RichText
from migration.models import *
from accounts.models import CompsocUser, ShellAccount, DatabaseAccount
from blog.models import BlogPage
from events.models import EventSignup, EventPage, EventType
import time
COMMS_DICT = {
'NL': 'Newsletter',
'M': 'Meeting Minutes',
'N': 'News Item'
}
def migrate_compsoc_memberinfo():
"""
Amalgamates the old user detail objects into the new CompsocUser and other models
"""
websites = WebsiteDetails.objects.using('old_data').all()
nicks = NicknameDetails.objects.using('old_data').all()
shell_accounts = OldShellAccount.objects.using('old_data').all()
db_accounts = OldDatabaseAccount.objects.using('old_data').all()
userinfo = {}
# Handle shell accounts
for account in shell_accounts:
user = get_user_model().objects.filter(id=account.user_id).first()
new_account = ShellAccount(name=account.name, user=user, status=account.status)
new_account.save()
# Handle DB accounts
for account in db_accounts:
user = get_user_model().objects.filter(id=account.user_id).first()
new_account = DatabaseAccount(name=account.name, user=user, status=account.status)
new_account.save()
# Handle transfer of Nickname info to CompsocUser model
for nick in nicks:
user_id = nick.user_id
userinfo[user_id] = {
'nickname': nick.nickname,
'website_title': '',
'website_url': ''
}
# Handle transfer of Website info to CompsocUser model
for site in websites:
user_id = site.user_id
if user_id in userinfo:
userinfo[user_id]['website_title'] = site.websiteTitle
userinfo[user_id]['website_url'] = site.websiteUrl
else:
userinfo[user_id] = {
'nickname': '',
'website_title': site.websiteTitle,
'website_url': site.websiteUrl
}
# Save new CompsocUser info
for uid, details in userinfo.items():
user = get_user_model().objects.filter(id=uid).first()
new_userinfo = CompsocUser(nickname=details['nickname'], website_title=details['website_title'],
website_url=details['website_url'], user=user)
print('Restoring {user}'.format(user=new_userinfo))
new_userinfo.save()
def migrate_old_posts():
"""
Converts all old posts from a simple page format to one Wagtail accepts
"""
# id=4 is the specific page ID for the news index page
index = Page.objects.get(id=4).specific
old_posts = Communication.objects.using('old_data').all().order_by('date')
user = get_user_model().objects.get(id=1)
for post in old_posts:
if post.title:
title = post.title
else:
title = 'Archived item from {date}'.format(date=date(post.date, 'D jS F Y'))
slug = slugify('{title} - {rand}'.format(title=title, rand=int(round(time.time() * 1000))))
if len(post.text) > 512:
intro = post.text[:512] + '...'
else:
intro = post.text
page = BlogPage(
search_description='',
seo_title=title,
show_in_menus=False,
slug=slug,
title=title,
date=post.date,
first_published_at=post.date,
intro=linebreaks(intro),
)
page.body.stream_data = [
('paragraph', RichText('<p>{body}</p>'.format(body=linebreaks(post.text))))
]
page.tags.add(COMMS_DICT[post.type])
print('Restoring article from {date}'.format(date=post.date))
index.add_child(instance=page)
revision = page.save_revision(
user=user,
submitted_for_moderation=False
)
revision.publish()
page.save()
def migrate_events():
event_index = Page.objects.get(id=6).specific
user = get_user_model().objects.get(id=1)
old_events = OldEvent.objects.using('old_data').all()
# Migrate events
for old_event in old_events:
old_event_type = old_event.type
try:
# We don't actually care about this - its a test to migrate the event across
event_type = EventType.objects.get(name=old_event_type.name, target=old_event_type.target)
except EventType.DoesNotExist:
event_type = EventType(name=old_event_type.name, target=old_event_type.target)
event_type.save()
title = '{type} on {date}'.format(type=old_event_type.name, date=date(old_event.start, 'D jS F Y'))
slug = slugify('{title} - {rand}'.format(title=title, rand=int(round(time.time() * 1000))))
if old_event.shortDescription:
description = old_event.shortDescription
else:
if old_event_type.info:
description = old_event_type.info
else:
description = old_event_type.name
new_event = EventPage(
title=title.strip(),
slug=slug,
description=description.strip(),
start=old_event.start,
finish=old_event.finish,
cancelled=old_event.cancelled,
category=event_type,
location=old_event.location.name
)
new_event.body.stream_data = [
('paragraph', RichText('<p>{body}</p>'.format(body=linebreaks(old_event.longDescription))))
]
print('Restoring event {type} from {date}'.format(type=old_event.type.name, date=old_event.start))
event_index.add_child(instance=new_event)
revision = new_event.save_revision(
user=user,
submitted_for_moderation=False
)
revision.publish()
new_event.save()
# Deal with signups
old_signups = Signup.objects.using('old_data').filter(event_id=old_event.id)
for old_signup in old_signups:
print('Restoring signup for {type} from {date}'.format(type=old_event.type.name, date=old_event.start))
new_signup = EventSignup(comment=truncatechars(old_signup.comment, 1024),
member=get_user_model().objects.get(id=old_signup.user_id),
event_id=new_event.id, signup_created=old_signup.time)
new_signup.save()
class Command(BaseCommand):
def handle(self, *args, **options):
migrate_compsoc_memberinfo()
migrate_old_posts()
migrate_events()
| 34.477157 | 115 | 0.626767 | 157 | 0.023115 | 0 | 0 | 0 | 0 | 0 | 0 | 1,088 | 0.160188 |
87e20d41ac964f6f9b382bbcfc612a4214737a5d | 114 | py | Python | env/Lib/site-packages/countdowntimer_model/apps.py | gtkacz/fantasytrashtalk | 24ed8ba6c4fae2eca5b15f66b62338a8c87debd2 | [
"MIT"
] | 4 | 2021-03-29T07:35:41.000Z | 2022-01-12T09:54:55.000Z | env/Lib/site-packages/countdowntimer_model/apps.py | gtkacz/fantasytrashtalk | 24ed8ba6c4fae2eca5b15f66b62338a8c87debd2 | [
"MIT"
] | 4 | 2020-08-06T14:51:06.000Z | 2021-09-22T18:53:50.000Z | env/Lib/site-packages/countdowntimer_model/apps.py | gtkacz/fantasytrashtalk | 24ed8ba6c4fae2eca5b15f66b62338a8c87debd2 | [
"MIT"
] | 3 | 2020-04-20T18:54:10.000Z | 2021-03-29T07:35:13.000Z | from django.apps import AppConfig
class CountdowntimerModelConfig(AppConfig):
name = 'countdowntimer_model'
| 19 | 43 | 0.807018 | 77 | 0.675439 | 0 | 0 | 0 | 0 | 0 | 0 | 22 | 0.192982 |
87e30755fa816cc3a2853bbeb8767db8ffb2216d | 3,206 | py | Python | restea/adapters/flaskwrap.py | kkszysiu/restea | 34ef2ee683a259a05e530514e743a9f69fa5e663 | [
"MIT"
] | null | null | null | restea/adapters/flaskwrap.py | kkszysiu/restea | 34ef2ee683a259a05e530514e743a9f69fa5e663 | [
"MIT"
] | null | null | null | restea/adapters/flaskwrap.py | kkszysiu/restea | 34ef2ee683a259a05e530514e743a9f69fa5e663 | [
"MIT"
] | null | null | null | import flask
import restea.formats as formats
from restea.adapters.base import (
BaseResourceWrapper,
BaseRequestWrapper,
)
class FlaskRequestWrapper(BaseRequestWrapper):
'''
Object wrapping Flask request context.
'''
@property
def data(self):
'''
Returns a payload sent to server
:returns: string -- raw value of payload sent to server
'''
return self._original_request.data.decode()
@property
def method(self):
'''
Returns HTTP method for the current request
:returns: string -- HTTP method name
'''
return self._original_request.method
@property
def headers(self):
'''
Returns a headers dict
:returns: dict -- received request headers
'''
return self._original_request.headers
def get(self, value):
'''
Returns a value from the HTTP GET "map"
:param value: string -- key from GET
:returns: string -- value from GET or None if anything is found
'''
return self._original_request.values.get(value)
class FlaskResourceWrapper(BaseResourceWrapper):
'''
FlaskResourceWrapper implements Flask 'view' API for the
`restea.Resource` object, aka routing and return values in Flask format
'''
@property
def app(self):
'''
Returns current Flask application
:returns: :class: `app.Flask` -- current Flask app
'''
return flask.current_app
def wrap_request(self, *args, **kwargs):
'''
Prepares data and pass control to `restea.Resource` object
:returns: :class: `flask.Response`
'''
data_format, kwargs = self._get_format_name(kwargs)
formatter = formats.get_formatter(data_format)
resource = self._resource_class(
FlaskRequestWrapper(flask.request), formatter
)
res, status_code, content_type = resource.dispatch(*args, **kwargs)
return flask.Response(
res,
mimetype=content_type,
status=status_code
)
def __adapt_path(self, path):
'''
Adapts the path to path Flask requirements for the url routes
:param path: string -- route path
:returns: string -- normalized route path
'''
if not path.startswith('/'):
return '/' + path
return path
def get_routes(self, path='', iden='<iden>'):
'''
Prepare routes for the given REST resource
:param path: string -- base path for the REST resource
:param iden: string -- format for identifier, for instance might be
used to make composite identifier
'''
path = self.__adapt_path(path)
routes = (
'{}'.format(path),
'{}/{}'.format(path, iden),
'{}.<data_format>'.format(path),
'{}/{}.<data_format>'.format(path, iden),
)
for route in routes:
self.app.add_url_rule(
route,
view_func=self.wrap_request,
methods=[m.upper() for m in self._resource_class.method_map]
)
| 27.401709 | 76 | 0.585465 | 3,067 | 0.956644 | 0 | 0 | 780 | 0.243294 | 0 | 0 | 1,449 | 0.451965 |
87e4656207a6810c62e813656c8a6d18731bd5ed | 3,265 | py | Python | Python/ldap/neo2open.py | ebouaziz/miscripts | 9520d31adfd8cf63a06d519b0c308f07dd107b90 | [
"MIT"
] | null | null | null | Python/ldap/neo2open.py | ebouaziz/miscripts | 9520d31adfd8cf63a06d519b0c308f07dd107b90 | [
"MIT"
] | null | null | null | Python/ldap/neo2open.py | ebouaziz/miscripts | 9520d31adfd8cf63a06d519b0c308f07dd107b90 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Create/update LDAP entries from custom directory to opendirectory schema
import binascii
import os
import re
import sys
cmtcre = re.compile(r'#.*$')
try:
filename = sys.argv[1]
except IndexError:
filename = os.path.join(os.path.expanduser('~'), 'Desktop', 'openldap.ldif')
def get_users(filename):
attributes = []
with open(filename, 'rt') as in_:
for (n,l) in enumerate(in_):
l = l.strip('\r\n')
l = cmtcre.sub('', l).rstrip('\t ')
if not l:
if attributes:
dattr = {}
for k,t,v in attributes:
dattr.setdefault(k, []).append((t,v))
try:
dn = dattr['dn'][0][1]
except KeyError:
print >> sys.stderr, "No DN: ", attributes
raise StopIteration
if 'ou=people' in [x.lower() for x in dn.split(',')]:
yield dattr
#raise StopIteration
else:
print >> sys.stderr, "Not a people DN"
attributes = []
continue
#print n,l
if l[0] in ' \t':
# continuation
attributes[-1] = (attributes[-1][0],
attributes[-1][1],
attributes[-1][2]+l[1:])
continue
items = l.split(':')
k,v = items[0], items[-1].lstrip(' \t')
b64 = len(items) > 2
attributes.append((k, b64, v))
OBJECTCLASSES = ['inetOrgPerson','posixAccount','shadowAccount',
#'apple-user',
'extensibleObject','organizationalPerson','top','person']
def update_user(attributes, uid, gid):
# add objectclass
delattrs = []
for attr in attributes:
if attr.lower().startswith('trac'):
delattrs.append(attr)
if attr.lower() in ['objectclass']:
delattrs.append(attr)
for attr in set(delattrs):
del attributes[attr]
attributes['objectclass'] = zip([False]*len(OBJECTCLASSES), OBJECTCLASSES)
attributes['structuralObjectClass'] = [(False, 'inetOrgPerson')]
attributes['uidNumber'] = [(False, str(uid))]
attributes['gidNumber'] = [(False, str(gid))]
attributes['homeDirectory'] = [(False, '/dev/null')]
attributes['loginShell'] = [(False, '/bin/bash')]
def export_user(dn, attrs):
lmax = 77
ndn = []
for it in dn.split(','):
k,v = it.split('=')
if k == 'ou':
k = 'cn'
v = 'users'
ndn.append('='.join([k,v]))
dn = ','.join(ndn)
print 'dn:', dn
for k in attrs:
for t,v in attrs[k]:
l = '%s:%s %s' % (k, t and ':' or '', v)
print '\n '.join([l[lmax*x:lmax*(x+1)] \
for x in xrange((len(v)+lmax-1)/lmax)])
print ''
uid = 1100
gid = 20
for attributes in get_users(filename):
uid += 1
(dn, ) = attributes['dn']
del attributes['dn']
update_user(attributes, uid, gid)
export_user(dn[1], attributes)
#import pprint
#pprint.pprint(attributes) | 32.326733 | 80 | 0.488821 | 0 | 0 | 1,350 | 0.413476 | 0 | 0 | 0 | 0 | 618 | 0.18928 |
87e4b92785c35afe3fe916f48cd07afc7baed2fa | 33 | py | Python | pyble/const/characteristic/time_with_dst.py | bgromov/PyBLEWrapper | 8a5d016e65b3c259391ddc97c371ab4b1b5c61b5 | [
"MIT"
] | 14 | 2015-03-30T23:11:36.000Z | 2020-04-07T00:57:12.000Z | pyble/const/characteristic/time_with_dst.py | bgromov/PyBLEWrapper | 8a5d016e65b3c259391ddc97c371ab4b1b5c61b5 | [
"MIT"
] | 3 | 2016-05-17T06:11:07.000Z | 2017-05-15T16:43:09.000Z | pyble/const/characteristic/time_with_dst.py | bgromov/PyBLEWrapper | 8a5d016e65b3c259391ddc97c371ab4b1b5c61b5 | [
"MIT"
] | 11 | 2016-03-11T08:53:03.000Z | 2019-03-11T21:32:13.000Z | NAME="Time with DST"
UUID=0x2A11
| 11 | 20 | 0.757576 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 0.454545 |
87e58bcf6dcfe22c279e06b2787f924dca81ae9f | 429 | py | Python | Practice Problem Solutions/5 - Lists/program.py | argosopentech/practical-programming-in-python | ae5aebcda6968ff327b6db3350840813d1c563ba | [
"CC0-1.0"
] | 1 | 2021-01-17T17:29:36.000Z | 2021-01-17T17:29:36.000Z | Practice Problem Solutions/5 - Lists/program.py | argosopentech/practical-programming-in-python | ae5aebcda6968ff327b6db3350840813d1c563ba | [
"CC0-1.0"
] | null | null | null | Practice Problem Solutions/5 - Lists/program.py | argosopentech/practical-programming-in-python | ae5aebcda6968ff327b6db3350840813d1c563ba | [
"CC0-1.0"
] | null | null | null | print('Grocery list:')
print('"add" to add items and "view" to view list')
grocery_list = []
while True:
command = input('Enter command: ')
if command == 'add':
to_add = input('Enter new item: ')
grocery_list.append(to_add)
# elif stands for "else if"
elif command == 'view':
for i in range(len(grocery_list)):
print(grocery_list[i])
else:
print('Invalid command')
| 28.6 | 51 | 0.596737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 149 | 0.347319 |
87e59bee00a7c9de9e64290a98d0644a47ad38c0 | 777 | py | Python | notes.py | ahmed-mo2nis/Aida-VA | a84a2cfb9193ee821bc529eccc459c0fe942ddfc | [
"MIT"
] | null | null | null | notes.py | ahmed-mo2nis/Aida-VA | a84a2cfb9193ee821bc529eccc459c0fe942ddfc | [
"MIT"
] | null | null | null | notes.py | ahmed-mo2nis/Aida-VA | a84a2cfb9193ee821bc529eccc459c0fe942ddfc | [
"MIT"
] | null | null | null | from datetime import datetime
from tts import tts
def take_notes(speech_text):
words_of_message = speech_text.split()
words_of_message.remove("note")
cleaned_message = ' '.join(words_of_message)
f = open("notes.txt", "a+")
f.write("'" + cleaned_message + "'" + " - note taken at: " + datetime.strftime(datetime.now(), "%d-%m-%y") + "\n")
f.close()
tts("Your note has been saved")
def show_all_notes():
tts("Your notes are as follows: ")
f = open("notes.txt", "r")
if f.mode == "r":
contents = f.read()
tts(contents)
f.close()
def delete_all_notes():
f = open("notes.txt", "w+") #+w means write/create. +a means append/create. r means read
f.write("")
f.close()
tts("All notes have been deleted")
| 29.884615 | 118 | 0.610039 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 242 | 0.311454 |
87ed12d5f9bc71d00ca1d48683542308a09965a9 | 1,813 | py | Python | entity_embed/evaluation.py | TheAngryGoldfish/entity-embed | 9493a9d194208a19aed5a40a3140245dbeea6b44 | [
"MIT"
] | 89 | 2021-04-06T23:07:06.000Z | 2022-03-27T18:27:13.000Z | entity_embed/evaluation.py | TheAngryGoldfish/entity-embed | 9493a9d194208a19aed5a40a3140245dbeea6b44 | [
"MIT"
] | 7 | 2021-04-13T18:51:43.000Z | 2022-01-25T12:39:22.000Z | entity_embed/evaluation.py | TheAngryGoldfish/entity-embed | 9493a9d194208a19aed5a40a3140245dbeea6b44 | [
"MIT"
] | 7 | 2021-04-21T15:08:23.000Z | 2021-12-11T22:41:17.000Z | import csv
import json
def pair_entity_ratio(found_pair_set_len, entity_count):
return found_pair_set_len / entity_count
def precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None):
# if a neg_pair_set is provided,
# consider the "universe" to be only the what's inside pos_pair_set and neg_pair_set,
# because this means a previous blocking was applied
if neg_pair_set is not None:
found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set)
true_positives = found_pair_set & pos_pair_set
false_positives = found_pair_set - pos_pair_set
if true_positives:
precision = len(true_positives) / (len(true_positives) + len(false_positives))
else:
precision = 0.0
recall = len(true_positives) / len(pos_pair_set)
return precision, recall
def f1_score(precision, recall):
if precision or recall:
return (2 * precision * recall) / (precision + recall)
else:
return 0.0
def evaluate_output_json(
unlabeled_csv_filepath, output_json_filepath, pos_pair_json_filepath, csv_encoding="utf-8"
):
with open(
unlabeled_csv_filepath, "r", newline="", encoding=csv_encoding
) as record_dict_csv_file:
record_count = sum(1 for __ in csv.DictReader(record_dict_csv_file))
with open(output_json_filepath, "r") as f:
found_pair_set = json.load(f)
found_pair_set = set(tuple(t) for t in found_pair_set)
with open(pos_pair_json_filepath, "r") as f:
pos_pair_set = json.load(f)
pos_pair_set = set(tuple(t) for t in pos_pair_set)
precision, recall = precision_and_recall(found_pair_set, pos_pair_set)
return (
precision,
recall,
f1_score(precision, recall),
pair_entity_ratio(len(found_pair_set), record_count),
)
| 32.375 | 94 | 0.710976 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 187 | 0.103144 |
87ed2b0b13d4a75ac2bea0089d5ba5a31824d2a7 | 5,084 | py | Python | src/replication.py | RipcordSoftware/avancedb-replication-monitor | bfa59cc36f5d8aa383a43ffeb6c82e938625ce34 | [
"MIT"
] | 8 | 2016-03-01T15:29:34.000Z | 2019-06-16T21:15:45.000Z | src/replication.py | RipcordSoftware/avancedb-replication-monitor | bfa59cc36f5d8aa383a43ffeb6c82e938625ce34 | [
"MIT"
] | 38 | 2015-12-27T20:34:25.000Z | 2016-02-19T15:48:12.000Z | src/replication.py | RipcordSoftware/replication-monitor | bfa59cc36f5d8aa383a43ffeb6c82e938625ce34 | [
"MIT"
] | null | null | null | from enum import Enum
from urllib.parse import urlparse
from src.couchdb import CouchDB
class Replication:
_RETRY_LIMIT = 3
class ReplType(Enum):
All = 1
Docs = 2
Designs = 3
def __init__(self, model, source, target, continuous=False, create=False, drop_first=False, repl_type=ReplType.All):
self._model = model
self._source = source
self._target = target
self._continuous = continuous
self._create = create
self._drop_first = drop_first
self._repl_type = repl_type
self._retry = self._RETRY_LIMIT
@property
def source(self):
return self._source
@property
def target(self):
return self._target
@property
def continuous(self):
return self._continuous
@property
def create(self):
return self._create
@property
def drop_first(self):
return self._drop_first
@property
def repl_type(self):
return self._repl_type
def replicate(self, couchdb=None):
couchdb = self._model.couchdb if not couchdb else couchdb
try:
# asking for the replicator database will force the user to give the right auth credentials
couchdb.get_database('_replicator')
if Replication._is_local(self._source) and Replication._is_local(self._target):
return self._replicate_local(couchdb)
else:
return self._replicate_remote(couchdb)
except:
self._retry -= 1
if self._retry > 1:
self.replicate(couchdb.clone())
else:
raise
def _replicate_local(self, couchdb):
source_name = self._source
target_name = self._target
# asking for the replicator database will force the user to give the right auth credentials
couchdb.get_docs('_replicator', limit=0)
if couchdb.auth and (couchdb.db_type is CouchDB.DatabaseType.Cloudant or couchdb.db_version.major >= 2):
url = couchdb.get_url()
url = self._get_auth_url(url, couchdb.auth.url_auth)
source = url + source_name
target = url + target_name
else:
source = source_name
target = target_name
if self._drop_first:
try:
couchdb.delete_database(target_name)
except:
pass
if not self._create:
couchdb.create_database(target_name)
return couchdb.create_replication(source, target, create_target=self._create, continuous=self._continuous)
def _replicate_remote(self, couchdb):
source = self._source
target = self._target
source_is_remote = not self._is_local(source)
target_is_remote = not self._is_local(target)
if source_is_remote:
source_couchdb = self._get_couchdb_from_url(source, couchdb.get_credentials_callback)
else:
source_couchdb = couchdb
if target_is_remote:
target_couchdb = self._get_couchdb_from_url(target, couchdb.get_credentials_callback)
target_name = self._get_database_from_url(target)
else:
target_couchdb = couchdb
# asking for the replicator database will force the user to give the right auth credentials
source_couchdb.get_docs('_replicator', limit=0)
target_couchdb.get_docs('_replicator', limit=0)
source_version = source_couchdb.db_version
target_version = target_couchdb.db_version
if (source_is_remote or source_version.major >= 2) and source_couchdb.auth:
if not source_is_remote:
source = source_couchdb.get_url() + source
source = self._get_auth_url(source, source_couchdb.auth.url_auth)
if (target_is_remote or target_version.major >= 2) and target_couchdb.auth:
if not target_is_remote:
target = target_couchdb.get_url() + target
target = self._get_auth_url(target, target_couchdb.auth.url_auth)
if self._drop_first:
try:
target_couchdb.delete_database(target_name)
except:
pass
if not self._create:
target_couchdb.create_database(target_name)
return couchdb.create_replication(source, target, create_target=self._create, continuous=self._continuous)
@staticmethod
def _is_local(db):
return type(db) == str and not db.startswith('http')
@staticmethod
def _get_couchdb_from_url(url, get_credentials=None):
u = urlparse(url)
secure = u.scheme == 'https'
port = u.port if u.port is not None else 443 if secure else 80
return CouchDB(u.hostname, port, secure, get_credentials=get_credentials)
@staticmethod
def _get_database_from_url(url):
u = urlparse(url)
return u.path[1::]
@staticmethod
def _get_auth_url(url, auth):
return url.replace('://', '://' + auth + '@')
| 32.589744 | 120 | 0.633753 | 4,992 | 0.981904 | 0 | 0 | 964 | 0.189614 | 0 | 0 | 351 | 0.06904 |
87eef1718dbdf89a820e95d72c449d6bdaa14af5 | 11,001 | py | Python | networks/isomera.py | andrewcpotter/holopy | e7eed9e2bdb0de61610c4d8c3455fcd00b76d896 | [
"MIT"
] | 1 | 2021-04-09T14:05:46.000Z | 2021-04-09T14:05:46.000Z | networks/isomera.py | andrewcpotter/holopy | e7eed9e2bdb0de61610c4d8c3455fcd00b76d896 | [
"MIT"
] | null | null | null | networks/isomera.py | andrewcpotter/holopy | e7eed9e2bdb0de61610c4d8c3455fcd00b76d896 | [
"MIT"
] | 2 | 2021-02-11T18:11:11.000Z | 2021-08-29T23:40:24.000Z | #!/usr/bin/env python
# coding: utf-8
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Jan 5 2021
@author: Yuxuan Zhang
based on the Iso-MPS codes
"""
#%% -- IMPORTS --
import sys
sys.path.append("..") # import one subdirectory up in files
# external packages
import numpy as np
import qiskit as qk
import networkx as nx
import tenpy
# custom things
from networks.isonetwork import IsoTensor, IsoNetwork, QKParamCircuit
import mps.mps as mps
#%%
class IsoMERA(IsoNetwork):
"""
MPS defined by
- number of physical and bond qubits (sets up associated quantum registers accordingly)
- l_uc - length of unit cell
- L number of times to repeat unit cell
- circuits for each site in the unit cell, and initial state of bond-qubits
"""
def __init__(self,
preg,
breg,
pcircs,
smax, #
**kwargs):
"""
inputs:
preg, list of lists of physical qubit registers on each site;
notice that in MERA setting we require len(preg) = 2^(smax-1)
breg, list of lists of physical qubit registers on each site;
notice that in MERA setting we require len(preg) = smax
(for qiskit: register= quantum register)
smax, # of layers; count from 0 to smax-1; total smax layers
pcircs, list, of parameterized circuit objects:
pcircs[0] - boundary circuit (acting only on bond-qubits)
pcircs[1...l_uc] for each site in unit-cell
param_names,list of sympy symbols, parameterized gate parameters (shared by all tensors)
L, int (default=1), Length of System (number of times to repeat unit cell)
bdry_circ, boundary vector circuit for prepping initial state of bond-qubits
circuit_format, str, (default='cirq'), type of circuit editor/simulator used
"""
# here, pcircs is a list of lists with length 1,2,4...2^(smax-1), respectively
# self.n_params = len(param_names)
# parse kwargs that don't depend on circuit_format
if 'circuit_format' in kwargs.keys():
self.circuit_format = kwargs['circuit_format']
else:
self.circuit_format = 'qiskit'
if 'L' in kwargs.keys():
self.L = kwargs['L']
else:
self.L=1
if self.circuit_format == 'qiskit':
# setup classical registers for measurement outcomes
self.cregs = [[qk.ClassicalRegister(len(preg[z]))for z in range(2**(smax-1))]#label the thing on each layer
for x in range(self.L)]
self.nphys = 0
self.nbond = 0
for i in range(len(preg)):
self.nphys += len(preg[i]) # number of physical qubits
for i in range(len(breg)):
self.nbond += len(breg[i]) # number of bond qubits
if 'boundary_circuit' in kwargs.keys():
bdry_circ = kwargs['boundary_circuit'] #this, as well, has to be a list
else:
bdry_circ = [QKParamCircuit(qk.QuantumCircuit(), []) for i in range(smax)]
# make the MPS/tensor-train -- same qubits used by each tensor
self.bdry_tensor = [IsoTensor('v_L'+str(i),
[breg[i]],
bdry_circ[i]) for i in range(smax)]
def mlist(preg,x,y,z):
if y == smax-1:
meas_list=[(preg,self.cregs[x][z],qk.QuantumCircuit())]
else:
meas_list=[]
return meas_list
self.sites= [[[IsoTensor('A'+str(x)+str(y)+str(z),
[preg[z],breg[y]],
pcircs[y][z],
meas_list=mlist(preg[z],x,y,z) )
for z in range(2**(y))]#label the nodes on each layer
for y in range(smax)]#label the layers
for x in range(self.L)]
# setup IsoNetwork
# make a flat list of nodes
self.nodes = self.bdry_tensor
for x in range(self.L):
for y in range(smax):
self.nodes += self.sites[x][y]
self.edges = [(self.bdry_tensor[i],self.sites[0][i][0],{'qreg':breg[i]}) for i in range(smax)]
self.edges+=[(self.sites[x][y][z],self.sites[x][y][z+1],{'qreg':breg[y]}) for x in range(self.L) for y in range(smax) for z in range (int(2**(y)-1))]
self.edges+=[(self.sites[x][y][z],self.sites[x][y+1][int(2*z)],{'qreg':preg[z]}) for x in range(self.L) for y in range(int(smax-1)) for z in range(int(2**(y)))]
self.edges+=[(self.sites[x][y][int(2**(y-1)-1)],self.sites[x+1][y][0],{'qreg':breg[y]})for x in range(self.L-1) for y in range(int(smax-1))]
self.qregs = breg+preg
# construct graph and check that is a DAG
# check for repeated node names
self.graph = nx.DiGraph()
self.graph.add_nodes_from(self.nodes)
self.graph.add_edges_from(self.edges)
# check that graph is directed & acyclic (DAG)
if nx.algorithms.dag.is_directed_acyclic_graph(self.graph) != True:
raise RuntimeError('Graph must be directed and acyclic')
# store node information
# self.creg_dict = creg_dict
self.node_names = [node.name for node in self.nodes]
if len(self.node_names) != len(set(self.node_names)):
raise ValueError('Tensor nodes must have unique names')
# store variational parameter info
self.param_assignments = {}
for node in self.nodes:
self.param_assignments[node]=node.param_names
# topologically sort nodes in order of execution
self.sorted_nodes = [node for node in nx.topological_sort(self.graph)]
else:
raise NotImplementedError('only qiskit implemented')
## cpu simulation ##
def left_bdry_vector(self,params):
"""
computes full unitaries for each state (any initial state for physicalqubit)
inputs:
params, dictionary of parameters {'name':numerical-value}
returns:
bdry_vec, unitary correspond to boundary
ulist, list of unitaries for tensors in unit cell
"""
bvec_l = self.bdry_tensor.unitary(params)[:,0] # boundary circuit tensor
return bvec_l
def unitaries(self,params):
"""
computes full unitaries for each state (any initial state for physicalqubit)
inputs:
params, dictionary of parameters {'name':numerical-value}
returns:
ulist, list of rank-4 tensors for each site in unit cell
"""
ulist = [self.sites[j].unitary(params) for j in range(self.l_uc)]
return ulist
def tensors(self,params):
"""
computes tensors for fixed initial state of physical qubit = |0>
inputs:
params, dictionary of parameters {'name':numerical-value}
returns:
tensors, list of rank-3 tensors for each site in unit cell
"""
tensors = [self.sites[j].unitary(params)[:,:,0,:] for j in range(self.l_uc)]
return tensors
## Convert to other format(s) ##
def to_tenpy(self,params,L=1):
"""
inputs:
params, dictionary of parameters {'name':numerical-value}
L, int, number of repetitions of unit cell,
set to np.inf for iMPS
TODO: add any other args needed to specify, symmetries, site-type etc...
outputs:
tenpy MPS object created from cirq description
"""
site = tenpy.networks.site.SpinHalfSite(conserve=None)
if (L==np.inf) and (self.l_uc==1) and (self.nphys==1):
B = np.swapaxes(self.tensors(params)[0],1,2)
psi = tenpy.networks.mps.MPS.from_Bflat([site],
[B],
bc='infinite',
dtype=complex,
form=None)
else:
B_arrs = [np.swapaxes(tensor,1,2) for tensor in self.tensors(params)]
B_arrs[0] = B_arrs[0][:,0:1,:]
B_arrs[-1] = B_arrs[-1][:,:,0:1]
psi = tenpy.networks.mps.MPS.from_Bflat([site]*L,
B_arrs,
bc = 'finite',
dtype=complex,
form=None)
psi.canonical_form()
psi.convert_form(psi.form)
return psi
def as_mps(self,params,L=1):
"""
converts to custom MPS class object
inputs:
params, dictionary of parameters {'name':numerical-value}
L, int, number of repetitions of unit cell,
set to np.inf for iMPS
outputs:
custom MPS object created from cirq description
"""
tensors = self.tensors(params)
bvecl = self.left_bdry_vector(params)
state = mps.MPS(tensors,L=L,bdry_vecs=[bvecl,None], rcf = True)
return state
def as_mpo(self,params):
"""
converts to custom MPO class object
inputs:
params, dictionary of parameters {'name':numerical-value}
outputs:
custom MPS object created from cirq description
"""
tensors = self.compute_unitaries(params)
bvecl = self.compute_left_bdry_vector(params)
op = mps.MPO(tensors,L=self.L,bdry_vecs=[bvecl,None], rcf = True)
return op
## correlation function sampling ##
def sample_correlations(self,L,bases,N_samples):
"""
basis: measurement basis for each site
possible formats:
- cirq circuit for physical qubits that maps physical qubits to measurement basis
- string of
possible backends:
'tenpy' - uses
'qasm' - output qasm script to measure
inputs:
options: dictionary with entries specifying:
burn-in length,
unit cell length,
basis to measure in for each site,
number of samples to take (could be infinite for cpu-simulations)
backend: whether to run as
"""
raise NotImplementedError
#%%
| 42.311538 | 172 | 0.533588 | 10,521 | 0.956368 | 0 | 0 | 0 | 0 | 0 | 0 | 5,099 | 0.463503 |
87ef94f2c86df3d553a5caae350054034bea8251 | 2,553 | py | Python | custom_components/docker_monitor/switch.py | aneisch/home-assistant | d8a866d75f512d23ffa7b9e5d82d7c575ece3c76 | [
"Apache-2.0"
] | 18 | 2016-08-10T01:02:27.000Z | 2017-10-26T04:19:49.000Z | custom_components/docker_monitor/switch.py | aneisch/home-assistant | d8a866d75f512d23ffa7b9e5d82d7c575ece3c76 | [
"Apache-2.0"
] | null | null | null | custom_components/docker_monitor/switch.py | aneisch/home-assistant | d8a866d75f512d23ffa7b9e5d82d7c575ece3c76 | [
"Apache-2.0"
] | 4 | 2017-04-20T19:41:21.000Z | 2017-05-16T17:10:05.000Z | '''
Docker Monitor component
For more details about this component, please refer to the documentation at
https://github.com/aneisch/docker_monitor
'''
import logging
from homeassistant.components.switch import (
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
SwitchDevice
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_NAME
)
from homeassistant.core import ServiceCall
from custom_components.docker_monitor import (
CONF_ATTRIBUTION,
CONF_CONTAINERS,
DATA_CONFIG,
DATA_DOCKER_API,
DOCKER_HANDLE
)
VERSION = '0.0.3'
DEPENDENCIES = ['docker_monitor']
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Set up the Docker Monitor Switch."""
api = hass.data[DOCKER_HANDLE][DATA_DOCKER_API]
config = hass.data[DOCKER_HANDLE][DATA_CONFIG]
clientname = config[CONF_NAME]
containers = [container.get_name() for container in api.get_containers()]
switches = [ContainerSwitch(api, clientname, name)
for name in config[CONF_CONTAINERS] if name in containers]
if switches:
add_devices_callback(switches, True)
else:
_LOGGER.info("No containers setup")
return False
class ContainerSwitch(SwitchDevice):
def __init__(self, api, clientname, container_name):
self._api = api
self._clientname = clientname
self._container_name = container_name
self._state = False
self._container = api.get_container(container_name)
def update_callback(stats):
_LOGGER.debug("Received callback with message: {}".format(stats))
if stats['info']['status'] == 'running':
state = True
else:
state = False
if self._state is not state:
self._state = state
self.schedule_update_ha_state()
self._container.stats(update_callback)
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._clientname, self._container_name)
@property
def should_poll(self):
return True
@property
def icon(self):
return 'mdi:docker'
@property
def device_state_attributes(self):
return {
ATTR_ATTRIBUTION: CONF_ATTRIBUTION
}
@property
def is_on(self):
return self._state
def turn_on(self, **kwargs):
self._container.start()
def turn_off(self, **kwargs):
self._container.stop()
| 24.548077 | 77 | 0.657658 | 1,309 | 0.51273 | 0 | 0 | 436 | 0.170779 | 0 | 0 | 348 | 0.13631 |
87f03243de75a73d47a72950b4975d8c48a99186 | 23,240 | py | Python | packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py | Surfict/osparc-simcore | 1e0b89574ec17ecb089674f9e5daa83d624430c8 | [
"MIT"
] | null | null | null | packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py | Surfict/osparc-simcore | 1e0b89574ec17ecb089674f9e5daa83d624430c8 | [
"MIT"
] | 16 | 2021-10-04T20:31:52.000Z | 2022-03-14T04:31:25.000Z | packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py | Surfict/osparc-simcore | 1e0b89574ec17ecb089674f9e5daa83d624430c8 | [
"MIT"
] | null | null | null | # pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
# pylint:disable=no-member
# pylint:disable=protected-access
# pylint:disable=too-many-arguments
import re
import shutil
import tempfile
import threading
from collections import namedtuple
from pathlib import Path
from typing import Any, Dict, Optional, Type, Union
import pytest
from aiohttp.client import ClientSession
from attr import dataclass
from pydantic.error_wrappers import ValidationError
from simcore_sdk.node_ports_v2 import exceptions, node_config
from simcore_sdk.node_ports_v2.links import DownloadLink, FileLink, PortLink
from simcore_sdk.node_ports_v2.port import Port
from utils_port_v2 import create_valid_port_config
from yarl import URL
##################### HELPERS
def camel_to_snake(name):
name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower()
PortParams = namedtuple(
"PortParams",
"port_cfg, exp_value_type, exp_value_converter, exp_value, exp_get_value, new_value, exp_new_value, exp_new_get_value",
)
def this_node_file_name() -> Path:
return Path(tempfile.gettempdir(), "this_node_file.txt")
def another_node_file_name() -> Path:
return Path(tempfile.gettempdir(), "another_node_file.txt")
def download_file_folder_name() -> Path:
return Path(tempfile.gettempdir(), "simcorefiles", f"{threading.get_ident()}")
def project_id() -> str:
return "cd0d8dbb-3263-44dc-921c-49c075ac0dd9"
def node_uuid() -> str:
return "609b7af4-6861-4aa7-a16e-730ea8125190"
def user_id() -> int:
return 666
def simcore_store_id() -> str:
return "0"
def datcore_store_id() -> str:
return "1"
def e_tag() -> str:
return "1212132546546321-1"
##################### FIXTURES
@pytest.fixture
def this_node_file(tmp_path: Path) -> Path:
file_path = this_node_file_name()
file_path.write_text("some dummy data")
assert file_path.exists()
yield file_path
if file_path.exists():
file_path.unlink()
@pytest.fixture
def another_node_file() -> Path:
file_path = another_node_file_name()
file_path.write_text("some dummy data")
assert file_path.exists()
yield file_path
if file_path.exists():
file_path.unlink()
@pytest.fixture
def download_file_folder() -> Path:
destination_path = download_file_folder_name()
destination_path.mkdir(parents=True, exist_ok=True)
yield destination_path
if destination_path.exists():
shutil.rmtree(destination_path)
@pytest.fixture(scope="module", name="project_id")
def project_id_fixture() -> str:
"""NOTE: since pytest does not allow to use fixtures inside parametrizations,
this trick allows to re-use the same function in a fixture with a same "fixture" name"""
return project_id()
@pytest.fixture(scope="module", name="node_uuid")
def node_uuid_fixture() -> str:
"""NOTE: since pytest does not allow to use fixtures inside parametrizations,
this trick allows to re-use the same function in a fixture with a same "fixture" name"""
return node_uuid()
@pytest.fixture(scope="module", name="user_id")
def user_id_fixture() -> int:
"""NOTE: since pytest does not allow to use fixtures inside parametrizations,
this trick allows to re-use the same function in a fixture with a same "fixture" name"""
return user_id()
@pytest.fixture
async def mock_download_file(
monkeypatch,
this_node_file: Path,
project_id: str,
node_uuid: str,
download_file_folder: Path,
):
async def mock_download_file_from_link(
download_link: URL,
local_folder: Path,
session: Optional[ClientSession] = None,
file_name: Optional[str] = None,
) -> Path:
assert str(local_folder).startswith(str(download_file_folder))
destination_path = local_folder / this_node_file.name
destination_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(this_node_file, destination_path)
return destination_path
from simcore_sdk.node_ports_common import filemanager
monkeypatch.setattr(
filemanager, "download_file_from_link", mock_download_file_from_link
)
@pytest.fixture(scope="session", name="e_tag")
def e_tag_fixture() -> str:
return "1212132546546321-1"
@pytest.fixture
async def mock_upload_file(mocker, e_tag):
mock = mocker.patch(
"simcore_sdk.node_ports_common.filemanager.upload_file",
return_value=(simcore_store_id(), e_tag),
)
yield mock
@pytest.fixture
def common_fixtures(
loop,
storage_v0_service_mock,
mock_download_file,
mock_upload_file,
this_node_file: Path,
another_node_file: Path,
download_file_folder: Path,
):
"""this module main fixture"""
node_config.STORAGE_ENDPOINT = "storage:8080"
##################### TESTS
@pytest.mark.parametrize(
"port_cfg, exp_value_type, exp_value_converter, exp_value, exp_get_value, new_value, exp_new_value, exp_new_get_value",
[
pytest.param(
*PortParams(
port_cfg=create_valid_port_config("integer", defaultValue=3),
exp_value_type=(int),
exp_value_converter=int,
exp_value=3,
exp_get_value=3,
new_value=7,
exp_new_value=7,
exp_new_get_value=7,
),
id="integer value with default",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config("number", defaultValue=-23.45),
exp_value_type=(float),
exp_value_converter=float,
exp_value=-23.45,
exp_get_value=-23.45,
new_value=7,
exp_new_value=7.0,
exp_new_get_value=7.0,
),
id="number value with default",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config("boolean", defaultValue=True),
exp_value_type=(bool),
exp_value_converter=bool,
exp_value=True,
exp_get_value=True,
new_value=False,
exp_new_value=False,
exp_new_get_value=False,
),
id="boolean value with default",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"boolean", defaultValue=True, value=False
),
exp_value_type=(bool),
exp_value_converter=bool,
exp_value=False,
exp_get_value=False,
new_value=True,
exp_new_value=True,
exp_new_get_value=True,
),
id="boolean value with default and value",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config("data:*/*", key="no_file"),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=None,
exp_get_value=None,
new_value=str(this_node_file_name()),
exp_new_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
e_tag=e_tag(),
),
exp_new_get_value=download_file_folder_name()
/ "no_file"
/ this_node_file_name().name,
),
id="file type with no payload",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="no_file_with_default",
defaultValue=str(this_node_file_name()),
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=None,
exp_get_value=None,
new_value=this_node_file_name(),
exp_new_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
e_tag=e_tag(),
),
exp_new_get_value=download_file_folder_name()
/ "no_file_with_default"
/ this_node_file_name().name,
),
id="file link with no payload and default value",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="some_file",
value={
"store": simcore_store_id(),
"path": f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
},
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
),
exp_get_value=download_file_folder_name()
/ "some_file"
/ this_node_file_name().name,
new_value=None,
exp_new_value=None,
exp_new_get_value=None,
),
id="file link with payload that gets reset",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="some_file_with_file_to_key_map",
fileToKeyMap={
"a_new_fancy_name.csv": "some_file_with_file_to_key_map"
},
value={
"store": simcore_store_id(),
"path": f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
},
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
),
exp_get_value=download_file_folder_name()
/ "some_file_with_file_to_key_map"
/ "a_new_fancy_name.csv",
new_value=None,
exp_new_value=None,
exp_new_get_value=None,
),
id="file link with fileToKeyMap with payload that gets reset",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="some_file_on_datcore",
value={
"store": datcore_store_id(),
"path": f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
"dataset": "some blahblah",
"label": "some blahblah",
},
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=FileLink(
store=datcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
dataset="some blahblah",
label="some blahblah",
),
exp_get_value=download_file_folder_name()
/ "some_file_on_datcore"
/ this_node_file_name().name,
new_value=this_node_file_name(),
exp_new_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
e_tag=e_tag(),
),
exp_new_get_value=download_file_folder_name()
/ "some_file_on_datcore"
/ this_node_file_name().name,
),
id="file link with payload on store 1",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="download_link",
value={
"downloadLink": "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/master/README.md"
},
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=DownloadLink(
downloadLink="https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/master/README.md"
),
exp_get_value=download_file_folder_name()
/ "download_link"
/ this_node_file_name().name,
new_value=this_node_file_name(),
exp_new_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
e_tag=e_tag(),
),
exp_new_get_value=download_file_folder_name()
/ "download_link"
/ this_node_file_name().name,
),
id="download link file type gets set back on store",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="download_link_with_file_to_key",
fileToKeyMap={
"a_cool_file_type.zip": "download_link_with_file_to_key"
},
value={
"downloadLink": "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/master/README.md"
},
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=DownloadLink(
downloadLink="https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/master/README.md"
),
exp_get_value=download_file_folder_name()
/ "download_link_with_file_to_key"
/ "a_cool_file_type.zip",
new_value=this_node_file_name(),
exp_new_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
e_tag=e_tag(),
),
exp_new_get_value=download_file_folder_name()
/ "download_link_with_file_to_key"
/ "a_cool_file_type.zip",
),
id="download link file type with filetokeymap gets set back on store",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="file_port_link",
value={
"nodeUuid": "238e5b86-ed65-44b0-9aa4-f0e23ca8a083",
"output": "the_output_of_that_node",
},
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=PortLink(
nodeUuid="238e5b86-ed65-44b0-9aa4-f0e23ca8a083",
output="the_output_of_that_node",
),
exp_get_value=download_file_folder_name()
/ "file_port_link"
/ another_node_file_name().name,
new_value=this_node_file_name(),
exp_new_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
e_tag=e_tag(),
),
exp_new_get_value=download_file_folder_name()
/ "file_port_link"
/ this_node_file_name().name,
),
id="file node link type gets set back on store",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"data:*/*",
key="file_port_link_with_file_to_key_map",
fileToKeyMap={
"a_cool_file_type.zip": "file_port_link_with_file_to_key_map"
},
value={
"nodeUuid": "238e5b86-ed65-44b0-9aa4-f0e23ca8a083",
"output": "the_output_of_that_node",
},
),
exp_value_type=(Path, str),
exp_value_converter=Path,
exp_value=PortLink(
nodeUuid="238e5b86-ed65-44b0-9aa4-f0e23ca8a083",
output="the_output_of_that_node",
),
exp_get_value=download_file_folder_name()
/ "file_port_link_with_file_to_key_map"
/ "a_cool_file_type.zip",
new_value=this_node_file_name(),
exp_new_value=FileLink(
store=simcore_store_id(),
path=f"{project_id()}/{node_uuid()}/{this_node_file_name().name}",
e_tag=e_tag(),
),
exp_new_get_value=download_file_folder_name()
/ "file_port_link_with_file_to_key_map"
/ "a_cool_file_type.zip",
),
id="file node link type with file to key map gets set back on store",
),
pytest.param(
*PortParams(
port_cfg=create_valid_port_config(
"number",
key="number_port_link",
value={
"nodeUuid": "238e5b86-ed65-44b0-9aa4-f0e23ca8a083",
"output": "the_output_of_that_node",
},
),
exp_value_type=(float),
exp_value_converter=float,
exp_value=PortLink(
nodeUuid="238e5b86-ed65-44b0-9aa4-f0e23ca8a083",
output="the_output_of_that_node",
),
exp_get_value=562.45,
new_value=None,
exp_new_value=None,
exp_new_get_value=None,
),
id="number node link type gets reset",
),
],
)
async def test_valid_port(
common_fixtures: None,
user_id: int,
project_id: str,
node_uuid: str,
port_cfg: Dict[str, Any],
exp_value_type: Type[Union[int, float, bool, str, Path]],
exp_value_converter: Type[Union[int, float, bool, str, Path]],
exp_value: Union[int, float, bool, str, Path, FileLink, DownloadLink, PortLink],
exp_get_value: Union[int, float, bool, str, Path],
new_value: Union[int, float, bool, str, Path],
exp_new_value: Union[int, float, bool, str, Path, FileLink],
exp_new_get_value: Union[int, float, bool, str, Path],
another_node_file: Path,
):
@dataclass
class FakeNodePorts:
user_id: int
project_id: str
node_uuid: str
@staticmethod
async def get(key):
# this gets called when a node links to another node we return the get value but for files it needs to be a real one
return (
another_node_file
if port_cfg["type"].startswith("data:")
else exp_get_value
)
@classmethod
async def _node_ports_creator_cb(cls, node_uuid: str) -> "FakeNodePorts":
return cls(user_id=user_id, project_id=project_id, node_uuid=node_uuid)
@staticmethod
async def save_to_db_cb(node_ports):
return
fake_node_ports = FakeNodePorts(
user_id=user_id, project_id=project_id, node_uuid=node_uuid
)
port = Port(**port_cfg)
port._node_ports = fake_node_ports
# check schema
for k, v in port_cfg.items():
camel_key = camel_to_snake(k)
if k == "type":
camel_key = "property_type"
if k != "value":
assert v == getattr(port, camel_key)
# check payload
assert port._py_value_type == exp_value_type
assert port._py_value_converter == exp_value_converter
assert port.value == exp_value
if isinstance(exp_get_value, Path):
# if it's a file let's create one there already
exp_get_value.parent.mkdir(parents=True, exist_ok=True)
exp_get_value.touch()
if exp_get_value is None:
assert await port.get() == None
else:
assert await port.get() == exp_get_value
if isinstance(exp_value, PortLink) and isinstance(exp_get_value, Path):
# as the file is moved internally we need to re-create it or it fails
another_node_file_name().touch(exist_ok=True)
# it should work several times
assert await port.get() == exp_get_value
# set a new value
await port.set(new_value)
assert port.value == exp_new_value
if isinstance(exp_new_get_value, Path):
# if it's a file let's create one there already
exp_new_get_value.parent.mkdir(parents=True, exist_ok=True)
exp_new_get_value.touch()
if exp_new_get_value is None:
assert await port.get() == None
else:
assert await port.get() == exp_new_get_value
assert await port.get() == exp_new_get_value
@pytest.mark.parametrize(
"port_cfg",
[
{
"key": "some.key",
"label": "some label",
"description": "some description",
"type": "integer",
"displayOrder": 2.3,
},
{
"key": "some:key",
"label": "",
"description": "",
"type": "integer",
"displayOrder": 2.3,
},
{
"key": "some_key",
"label": "",
"description": "",
"type": "blahblah",
"displayOrder": 2.3,
},
{
"key": "some_file_with_file_in_value",
"label": "",
"description": "",
"type": "data:*/*",
"displayOrder": 2.3,
"value": __file__,
},
],
)
def test_invalid_port(common_fixtures: None, port_cfg: Dict[str, Any]):
with pytest.raises(ValidationError):
Port(**port_cfg)
@pytest.mark.parametrize(
"port_cfg", [(create_valid_port_config("data:*/*", key="set_some_inexisting_file"))]
)
async def test_invalid_file_type_setter(
common_fixtures: None, project_id: str, node_uuid: str, port_cfg: Dict[str, Any]
):
port = Port(**port_cfg)
# set a file that does not exist
with pytest.raises(exceptions.InvalidItemTypeError):
await port.set("some/dummy/file/name")
# set a folder fails too
with pytest.raises(exceptions.InvalidItemTypeError):
await port.set(Path(__file__).parent)
| 34.947368 | 128 | 0.549871 | 703 | 0.03025 | 896 | 0.038554 | 21,358 | 0.919019 | 4,469 | 0.192298 | 5,607 | 0.241265 |
87f0863e9a634ae8445629a68e19dd2fc1288602 | 1,282 | py | Python | utilities_common/platform_sfputil_helper.py | deran1980/sonic-utilities | a6ae218238e7e552f49191f81451bd55ff56ba51 | [
"Apache-2.0"
] | null | null | null | utilities_common/platform_sfputil_helper.py | deran1980/sonic-utilities | a6ae218238e7e552f49191f81451bd55ff56ba51 | [
"Apache-2.0"
] | 4 | 2020-04-17T06:53:05.000Z | 2020-12-01T02:37:34.000Z | utilities_common/platform_sfputil_helper.py | deran1980/sonic-utilities | a6ae218238e7e552f49191f81451bd55ff56ba51 | [
"Apache-2.0"
] | null | null | null | import sys
import click
from sonic_py_common import multi_asic, device_info
platform_sfputil = None
def load_platform_sfputil():
global platform_sfputil
try:
import sonic_platform_base.sonic_sfp.sfputilhelper
platform_sfputil = sonic_platform_base.sonic_sfp.sfputilhelper.SfpUtilHelper()
except Exception as e:
click.echo("Failed to instantiate platform_sfputil due to {}".format(repr(e)))
sys.exit(1)
return 0
def platform_sfputil_read_porttab_mappings():
try:
if multi_asic.is_multi_asic():
# For multi ASIC platforms we pass DIR of port_config_file_path and the number of asics
(platform_path, hwsku_path) = device_info.get_paths_to_platform_and_hwsku_dirs()
# Load platform module from source
platform_sfputil.read_all_porttab_mappings(hwsku_path, multi_asic.get_num_asics())
else:
# For single ASIC platforms we pass port_config_file_path and the asic_inst as 0
port_config_file_path = device_info.get_path_to_port_config_file()
platform_sfputil.read_porttab_mappings(port_config_file_path, 0)
except Exception as e:
click.echo("Error reading port info (%s)" % str(e))
sys.exit(1)
return 0
| 32.05 | 99 | 0.713729 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 281 | 0.219189 |
87f14b098265544dbf022e3f54455436deb0ad4b | 24 | py | Python | structures/tree/__init__.py | spencerpomme/pyalgolib | d055287caa4a779ea833c7efc305cd4f966bd841 | [
"MIT"
] | null | null | null | structures/tree/__init__.py | spencerpomme/pyalgolib | d055287caa4a779ea833c7efc305cd4f966bd841 | [
"MIT"
] | null | null | null | structures/tree/__init__.py | spencerpomme/pyalgolib | d055287caa4a779ea833c7efc305cd4f966bd841 | [
"MIT"
] | null | null | null | # data structure module
| 12 | 23 | 0.791667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.958333 |
87f27b1f15ae50f75fc47c17a7b0976a09fb4c5a | 5,227 | py | Python | geco/mips/loading/miplib.py | FreestyleBuild/GeCO | 6db1a549b3145b3bc5d3025a9bccc03be6575564 | [
"MIT"
] | 8 | 2020-12-16T09:59:05.000Z | 2022-03-18T09:48:43.000Z | geco/mips/loading/miplib.py | FreestyleBuild/GeCO | 6db1a549b3145b3bc5d3025a9bccc03be6575564 | [
"MIT"
] | 101 | 2020-11-09T10:20:03.000Z | 2022-03-24T13:50:06.000Z | geco/mips/loading/miplib.py | FreestyleBuild/GeCO | 6db1a549b3145b3bc5d3025a9bccc03be6575564 | [
"MIT"
] | 3 | 2021-04-06T13:26:03.000Z | 2022-03-22T13:22:16.000Z | import tempfile
from urllib.request import urlretrieve, urlopen
from urllib.error import URLError
import pyscipopt as scip
import os
import pandas as pd
class Loader:
def __init__(self, persistent_directory=None):
"""
Initializes the MIPLIB loader object
Parameters
----------
persistent_directory: str or None
Path for directory to use for persistent files,
If set to None, resorts to default case of using temporary files
that get deleted after program execution
"""
self.instances_cache = {}
self.dir = persistent_directory
if persistent_directory:
self._load_instances_cache()
def load_instance(self, instance_name, with_solution=False):
if not self._instance_cached(instance_name):
self._download_instance(instance_name)
problem_path = self._instance_path(instance_name)
model = scip.Model()
model.readProblem(problem_path)
if with_solution:
self._add_solution(model, instance_name)
return model
def _instance_path(self, instance_name):
return self.instances_cache[instance_name]
def _generate_path_for_instance(self, instance_name):
if self.dir:
return self.dir + instance_name
else:
extension = instance_name[instance_name.index(".") :]
return tempfile.NamedTemporaryFile(suffix=extension, delete=False).name
def _download_instance(self, instance_name):
path = self._generate_path_for_instance(instance_name)
url = self._look_for_working_url(self._instance_urls(instance_name))
if url:
urlretrieve(url, path)
self.instances_cache[instance_name] = path
else:
raise ValueError(
"Was not able to find the instance in any of the MIPLIB sources"
)
def _look_for_working_url(self, urls):
for url in urls:
try:
response = urlopen(url)
except URLError:
continue
if self._successful_response(response):
return url
return None
@staticmethod
def _successful_response(response):
return response.status == 200 and "not_found" not in response.url
def _instance_cached(self, instance_name):
return instance_name in self.instances_cache
def _load_instances_cache(self):
for path in os.listdir(self.dir):
if path.endswith(".mps.gz"):
instance_name = path.split("/")[-1]
self.instances_cache[instance_name] = self.dir + path
def _add_solution(self, model, instance_name):
url = self._look_for_working_url(self._solution_urls(instance_name))
if url:
with tempfile.NamedTemporaryFile(suffix=".sol.gz") as sol_file:
urlretrieve(url, sol_file.name)
model.readSol(sol_file.name)
else:
raise ValueError(
"Was not able to find the solution in any of the MIPLIB sources"
)
@staticmethod
def _instance_urls(instance_name):
return [
f"https://miplib.zib.de/WebData/instances/{instance_name}", # 2017 instances
f"http://miplib2010.zib.de/download/{instance_name}", # 2010 instances
f"http://miplib2010.zib.de/miplib2003/download/{instance_name}", # 2003 instance
]
@staticmethod
def _solution_urls(instance_name):
name = instance_name[: instance_name.index(".")]
return [
f"https://miplib.zib.de/downloads/solutions/{name}/1/{name}.sol.gz", # 2017 solutions
f"http://miplib2010.zib.de/download/{name}.sol.gz", # 2010 solutions
f"http://miplib2010.zib.de/miplib2003/download/{name}.sol.gz", # 2003 solutions
]
def __del__(self):
if self.dir is None:
for path in self.instances_cache.values():
os.unlink(path)
def benchmark_instances():
for instance in custom_list("https://miplib.zib.de/downloads/benchmark-v2.test"):
yield instance
def easy_instances():
for instance in custom_list("https://miplib.zib.de/downloads/easy-v9.test"):
yield instance
def hard_instances():
for instance in custom_list("https://miplib.zib.de/downloads/hard-v15.test"):
yield instance
def open_instances():
for instance in custom_list("https://miplib.zib.de/downloads/open-v14.test"):
yield instance
def custom_list(source, with_solution=False, loader=None):
"""
Returns a generator of instances from the given list
Parameters
----------
source: str
Path or URL for the instance list source
with_solution: bool
Whether to return the instance with the known solutions or not
loader: Loader
Loader object to download instances with
Returns
-------
A generator for the instances
"""
df = pd.read_csv(source, names=["instance"])
if loader is None:
loader = Loader()
for instance in df["instance"]:
yield loader.load_instance(instance, with_solution=with_solution)
| 33.50641 | 98 | 0.639755 | 3,887 | 0.743639 | 1,169 | 0.223646 | 884 | 0.169122 | 0 | 0 | 1,540 | 0.294624 |
87f3786df52a1d8399072312a80407b63e5e8a0e | 40,723 | py | Python | plugin.py | uwsbel/blenderPlugin | beeab9850c4cc2ea6a3f514ce958a5a153c38f95 | [
"BSD-3-Clause"
] | 3 | 2015-08-24T20:34:33.000Z | 2021-01-03T10:49:33.000Z | plugin.py | uwsbel/blenderPlugin | beeab9850c4cc2ea6a3f514ce958a5a153c38f95 | [
"BSD-3-Clause"
] | null | null | null | plugin.py | uwsbel/blenderPlugin | beeab9850c4cc2ea6a3f514ce958a5a153c38f95 | [
"BSD-3-Clause"
] | null | null | null | /*******************************************************
* Copyright (C) 2013-2014 Daniel Kaczmarek <dankaczma@gmail.com>, Simulation Based Engineering Lab <sbel.wisc.edu>
* Some rights reserved. See LICENSE
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file at the top level of the distribution as well
* as well as at https://github.com/uwsbel/blenderPlugin/blob/master/LICENSE
*******************************************************/
import bpy
import math
import mathutils
import os
import yaml
import tarfile
import shutil
import stat
bl_info = {
"name": "Chrono::Render plugin",
"description": "Allows for easy graphical manipulation of simulated data before rendering with a powerful renderman renderer",
"author": "Daniel <Daphron> Kaczmarek",
"version": (0, 9),
"blender": (2, 67, 1),
"location": "File > Import > Import Chrono::Engine",
"warning": "",
"wiki_url": "TODO",
"tracker_url":"TODO",
"category": "Import-Export"}
DEFAULT_COLOR = (0.4, 0.4, 0.6)
MESH_IMPORT_FUNCTIONS = {"obj": bpy.ops.import_scene.obj,
"stl": bpy.ops.import_mesh.stl,
"ply": bpy.ops.import_mesh.ply}
fin = ""
objects = ""
proxyObjects = ""
changing_params = False
max_dim = 1
min_dim = 1
class AmbientLightProxy:
def __init__(self):
self.material = self.create_material()
self.obj = None
def update(self):
"""Grabs stuff like color, texture and stores them"""
#Color can be diffuse, specular, mirror, and subsurface scattering
if self.obj.active_material is None:
self.obj = bpy.context.scene.objects['Ambient Light Proxy']
self.color = (self.obj.active_material.diffuse_color[0], self.obj.active_material.diffuse_color[1], self.obj.active_material.diffuse_color[2])
def create_material(self):
mat = bpy.data.materials.new("Ambient light proxy material")
mat.diffuse_color = (0,0,0)
mat.diffuse_shader = 'LAMBERT'
mat.diffuse_intensity = 1.0
mat.specular_color = (1.0, 1.0, 1.0)
mat.specular_shader = 'COOKTORR'
mat.specular_intensity = 0.5
mat.alpha = 1.0
mat.ambient = 1
return mat
def addToBlender(self):
bpy.ops.mesh.primitive_monkey_add(location=(6, 6, 6))
bpy.context.active_object.name = "Ambient Light Proxy"
bpy.context.active_object.active_material = self.material
bpy.context.active_object["index"] = "AMBIENT_PROXY"
self.obj = bpy.context.active_object
class Object:
def __init__(self, data, currdir):
# print("DATA:",data)
self.group = data[0]
self.index = int(data[1]) #The objects unique ID/index number
#XYZ locations
self.x = float(data[2])
self.y = float(data[3])
self.z = float(data[4])
self.quat = mathutils.Quaternion((float(data[5]), float(data[6]), float(data[7]), float(data[8])))
# self.euler_zyx = self.quat.to_euler('ZYX')
self.euler = tuple(a for a in self.quat.to_euler())
self.obj_type = data[9].lower()
#Extra parameters (specific to each object type)
# test = []
# for x in range(10,len(data)):
# if data[x] is not '\n':
# test.append(float(data[x]))
# self.ep = [float(data[x]) for x in range(10,len(data)) if data[x] is not '\n']
self.ep = []
for x in range(10,len(data)):
if data[x] is not '\n':
try:
self.ep.append(float(data[x]))
except ValueError:
self.ep.append(data[x].strip("\n"))
self.color = DEFAULT_COLOR
self.currdir = currdir
self.material = self.create_material()
def create_material(self):
mat = bpy.data.materials.new("Object {}'s material".format(self.index))
mat.diffuse_color = self.color
mat.diffuse_shader = 'LAMBERT'
mat.diffuse_intensity = 1.0
mat.specular_color = (1.0, 1.0, 1.0)
mat.specular_shader = 'COOKTORR'
mat.specular_intensity = 0.5
mat.alpha = 1.0
mat.ambient = 1
return mat
def addToBlender(self):
# if self.index % 100 == 0:
# print("index = {}".format(self.index))
# Cube
if self.obj_type == "cube":
#ep[0] = length of one side
bpy.ops.mesh.primitive_cube_add(radius=self.ep[0], location=(self.x, self.y, self.z), rotation=self.euler)
#Box
elif self.obj_type == "box":
bpy.ops.mesh.primitive_cube_add(radius=1.0, location=(self.x, self.y, self.z))
bpy.ops.transform.resize(value=(self.ep[0], self.ep[1], self.ep[2]))
bpy.context.object.rotation_euler = mathutils.Euler(self.euler)
# Cylinder
elif self.obj_type == "cylinder":
# ep[0] = radius of top, 2*ep[1] = depth
bpy.ops.mesh.primitive_cylinder_add(radius=self.ep[0], depth=2*self.ep[1], location=(self.x, self.y, self.z), rotation=self.euler)
# Sphere
elif self.obj_type == "sphere":
# ep[0] = radius of the sphere
# uv sphere looks nicer but icosphere might be the better route
bpy.ops.mesh.primitive_uv_sphere_add(size=self.ep[0], location=(self.x, self.y, self.z), rotation=self.euler)
# Ellipsoid
elif self.obj_type == "ellipsoid":
#ep[0] is the radius, ep[1] is the length in the direction of rotation
bpy.ops.mesh.primitive_uv_sphere_add(size=1.0, location=(self.x, self.y, self.z))
#The right way?
bpy.ops.transform.resize(value=(self.ep[0],self.ep[1],self.ep[2]))
bpy.context.object.rotation_euler = mathutils.Euler(self.euler)
#Cone
elif self.obj_type == "cone":
# self.ep[0] = radius of cone bottom, self.ep[1] = half_height of cone
bpy.ops.mesh.primitive_cone_add(radius1=self.ep[0], depth=2*self.ep[1], location=(self.x, self.y, self.z), rotation=self.euler)
#Torus
elif self.obj_type == "torus":
bpy.ops.mesh.primitive_torus_add(rotation=self.euler, location=(self.x, self.y, self.z), major_radius=self.ep[0], minor_radius=self.ep[1])
#External Mesh
elif self.obj_type in MESH_IMPORT_FUNCTIONS:
filename = os.path.join(self.currdir, "meshes", self.ep[0])
MESH_IMPORT_FUNCTIONS[self.obj_type](filepath=filename, use_split_groups=False, use_split_objects=False)
# bpy.ops.object.join()
for o in bpy.context.selected_objects:
o.location = [self.x, self.y, self.z]
# Now rotate and move to match what renderman render looks like
o.rotation_euler = mathutils.Euler(self.euler)
# o.rotation_euler = self.euler_zyx
# o.rotation_euler.rotate(mathutils.Euler((math.pi, 0, 0)))
# o.rotation_quaternion = self.quat.rotate(mathutils.Euler((180, 0, 0)))
bpy.context.scene.objects.active = o
else:
print("Object type {} is not currently supported as a primitive in the blender plugin")
bpy.context.active_object.rotation_mode = 'ZYX'
bpy.context.active_object["index"] = self.index
bpy.context.active_object.name = "Obj # {}".format(self.index)
bpy.context.active_object.active_material = self.material
self.obj = bpy.context.active_object
#object.get("index") to get the value
#object["index"] doesn't work?
#TODO: it is taking the obj2 as active_object and then relabling it here. Fixed?
def update(self):
"""Grabs stuff like color, texture and stores them"""
try:
self.obj = bpy.context.scene.objects['Obj # {}'.format(self.index)]
self.color = (self.obj.active_material.diffuse_color[0], self.obj.active_material.diffuse_color[1], self.obj.active_material.diffuse_color[2])
self.mat = self.obj.active_material
except Exception as e:
print(e.strerror)
print("EXCEPTION! Dropping to pdb shell")
import pdb; pdb.set_trace()
class ProxyObject(Object):
def __init__(self, data, currdir, indicies):
""" data is a line of the input file, indicies is a list of lines
from the file that this obj represents whichAttribute is a num which
specifies the column of data on the line that decides proxyObjs and
group tells the specifica group which this proxyObj is for
(sphere, cube...) """
# print("MAKING PROXY OBJ")
Object.__init__(self, data, currdir)
self.indicies = indicies
# print(self.group)
self.color = DEFAULT_COLOR
self.material.name = "Group {}'s material".format(self.group)
def same_params(self, data):
other_ep = []
for x in range(10,len(data)):
if data[x] is not '\n':
try:
other_ep.append(float(data[x]))
except ValueError:
other_ep.append(data[x].strip("\n"))
return other_ep == self.ep
def addToBlender(self):
# print(self.ep)
bpy.ops.mesh.primitive_monkey_add(radius=self.ep[0], location=(self.x, self.y, self.z))
bpy.context.active_object["group"] = self.group
bpy.context.active_object["index"] = "PROXY"
bpy.context.active_object.name = "Proxy " + self.group
bpy.context.active_object.active_material = self.material
self.obj = bpy.context.active_object
def update(self):
try:
self.obj = bpy.context.scene.objects['Proxy {}'.format(self.group)]
self.color = (self.obj.active_material.diffuse_color[0], self.obj.active_material.diffuse_color[1], self.obj.active_material.diffuse_color[2])
self.mat = self.obj.active_material
except:
print("EXCEPTION! Dropping to pdb shell")
import pdb; pdb.set_trace()
# def update(self):
# """Grabs stuff like color, texture and stores them"""
# #Color can be diffuse, specular, mirror, and subsurface scattering
# if self.obj.active_material is not None:
# self.color = (self.obj.active_material.diffuse_color[0], self.obj.active_material.diffuse_color[1], self.obj.active_material.diffuse_color[2])
# self.mat = self.obj.active_material
def configInitialScene(fin_frame):
# bpy.ops.object.delete()
bpy.data.scenes["Scene"].frame_end = fin_frame
bpy.data.scenes["Scene"].frame_start = 0
bpy.data.scenes["Scene"].frame_current = bpy.data.scenes["Scene"].frame_start
class ImportChronoRender(bpy.types.Operator):
"""Import ChronoRender"""
bl_idname = "import.import_chrono_render"
bl_label = "Import ChronoRender"
filename = bpy.props.StringProperty(subtype='FILE_PATH')
directory = bpy.props.StringProperty(subtype='DIR_PATH')
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def process_max_dimensions(self, data):
global max_dim
global min_dim
max_length = 0
if data[9] in MESH_IMPORT_FUNCTIONS:
pass
#TODO: this could screw up some shadows. Fix. (because now sun shadows out of box)
else:
max_length = max(float(data[x]) for x in range(10,len(data)) if data[x] is not '\n')
for coord in (data[2:5]):
if float(coord) + max_length > max_dim:
max_dim = float(coord) + max_length
if float(coord) - max_length < min_dim:
min_dim = float(coord) - max_length
def import_mesh(self, data):
global extra_geometry_indicies
mesh_filename = os.path.join(self.directory, "meshes", data[10].strip("\n"))
MESH_IMPORT_FUNCTIONS["obj"](filepath=mesh_filename)
extra_geometry_indicies.append(int(data[1]))
for o in bpy.context.selected_objects:
o.location = [float(data[2]), float(data[3]), float(data[4])]
quat = mathutils.Quaternion((float(data[5]), float(data[6]), float(data[7]), float(data[8])))
euler = tuple(a for a in quat.to_euler())
for o in bpy.context.selected_objects:
o.rotation_euler = mathutils.Euler(euler)
def execute(self, context):
global fin_name
global objects
global proxyObjects
global changing_params
global ambient_proxy
global extra_geometry_indicies
global fin_dir
# filename = "/home/xeno/repos/blender-plugin/plugins/blender/blender_input_test.dat"
# individualObjectsIndicies = [1,2,3,4, 5, 6] #LINE NUMBERS
objects = []
proxyObjects = []
extra_geometry_indicies = []
fin_name = self.filename
fin_frame = 10
try:
fin_frame = self.filename.replace(".dat", "")
fin_frame = fin_frame.replace("data_", "")
fin_frame = int(fin_frame)
except:
print("Failed to automatically get the framerange from the file. You will likely need to set it manually.")
filepath = os.path.join(self.directory, self.filename)
fin_dir = self.directory
fin = open(filepath, "r")
for i, line in enumerate(fin):
index = line.split(",")[1]
# if line.split(",")[9].lower() == "extrageometry":
# extra_geometry_indicies.append(line.split(",")[1])
# if line.split(",")[9].lower() in MESH_IMPORT_FUNCTIONS:
# self.import_mesh(line.split(","))
# else:
self.process_max_dimensions(line.split(","))
if line.split(",")[0].lower() == "individual":
objects.append(Object(line.split(","), self.directory))
print("Object {}".format(index))
else:
data = line.split(",")
proxyExists = False
for obj in proxyObjects:
if obj.group == data[0]:
obj.indicies.append(index)
if not changing_params and not obj.same_params(data):
changing_params = True
proxyExists = True
if not proxyExists:
print("New Proxy obj num {}".format(index))
proxyObjects.append(ProxyObject(data, self.directory, [index]))
configInitialScene(fin_frame)
for obj in objects:
obj.addToBlender()
for obj in proxyObjects:
obj.addToBlender()
ambient_proxy = AmbientLightProxy()
ambient_proxy.addToBlender()
print("objects added")
return {'FINISHED'}
def add_importChronoRenderButton(self, context):
self.layout.operator(
ImportChronoRender.bl_idname,
text=ImportChronoRender.__doc__,
icon='PLUGIN')
class ExportChronoRender(bpy.types.Operator):
"""Exports to Chrono::Render"""
bl_idname = "export.export_chrono_render"
bl_label = "Export Chrono::Render"
filename = bpy.props.StringProperty(subtype='FILE_PATH')
directory = bpy.props.StringProperty(subtype='DIR_PATH')
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
self.context = context
return {'RUNNING_MODAL'}
def construct_condition(self, indicies):
"""docstring for construct_condition"""
#Very simple way
rtnd = "id == "
if len(indicies) <= 0:
raise Exception("No indicies in this proxy object")
for i in indicies:
rtnd += str(i) + " or id == "
rtnd = rtnd[:-10] # -10 to remove the trailing "or id =="
# Group by ranges
rtn = ""
max_elem = None
min_elem = None
for i in indicies:
i = int(i)
if min_elem == None:
min_elem = i
if max_elem == None:
max_elem = i
if i == max_elem + 1:
max_elem = i
elif i > max_elem + 1:
rtn += " or ({} <= id <= {})".format(min_elem, max_elem)
min_elem = i
max_elem = i
rtn += " or ({} <= id <= {})".format(min_elem, max_elem)
rtn = rtn[4:]
return min(rtnd, rtn)
def export_mesh(self, context, fout, obj):
#TODO: don't use just one file for the whole animation. One per frame. (per obj also?)
for face in obj.obj.data.polygons:
pgonstr = "Polygon "
vertices = '"P" ['
for v in face.vertices:
vert = obj.obj.data.vertices[v].co
vertices += " {} {} {}".format(vert.x, vert.y, vert.z)
vertices += ']\n'
pgonstr += vertices
# fout.write('AttributeBegin\n')
# fout.write('Surface "matte"\n')
# fout.write('Color [{} {} {}]\n'.format(obj.color[0], obj.color[1], obj.color[2]))
#TODO: get rotations to work with any blender rotation scheme
# fout.write('Rotate {} 0 0 1\n'.format(math.degrees(obj.rotation_euler[2])))
# fout.write('Rotate {} 0 1 0\n'.format(math.degrees(obj.rotation_euler[1])))
# fout.write('Rotate {} 1 0 0\n'.format(math.degrees(obj.rotation_euler[0])))
# fout.write('Translate {} {} {}\n'.format(obj.location[0], obj.location[2], -obj.location[1]))
fout.write(pgonstr)
# fout.write('AttributeEnd\n')
def write_object(self, objects, is_proxy=False):
global changing_params
renderobject = []
for obj in objects:
obj.update()
name = obj.group
#Start writing
color = "{} {} {}".format(obj.color[0], obj.color[1], obj.color[2])
data = dict()
data["name"] = str(name)
if is_proxy:
data["condition"] = self.construct_condition(obj.indicies)
else:
data["condition"] = "id == {}".format(obj.index)
# maxIndex = obj.index
# minIndex = obj.index
# data["condition"] = "id >= {} and id <= {}".format(minIndex, maxIndex)
data["color"] = color
if obj.obj_type in MESH_IMPORT_FUNCTIONS:
data["geometry"] = [{"type" : "archive"}]
else:
data["geometry"] = [{"type" : obj.obj_type}]
data["shader"] = [{"name" : "matte.sl"}] #TODO: not hardcoded
data["geometry"][0]["changingprams"] = changing_params
if obj.obj_type.lower() == "sphere":
data["geometry"][0]["radius"] = obj.ep[0]
elif obj.obj_type.lower() == "cube":
data["geometry"][0]["side"] = obj.ep[0]
elif obj.obj_type.lower() == "cone":
data["geometry"][0]["radius"] = obj.ep[0]
data["geometry"][0]["height"] = obj.ep[1]
elif obj.obj_type.lower() == "cylinder":
data["geometry"][0]["radius"] = obj.ep[0]
data["geometry"][0]["height"] = obj.ep[1]
elif obj.obj_type.lower() == "ellipsoid":
data["geometry"][0]["a"] = obj.ep[0]
data["geometry"][0]["b"] = obj.ep[1]
data["geometry"][0]["c"] = obj.ep[2]
elif obj.obj_type.lower() == "torus":
data["geometry"][0]["rmajor"] = obj.ep[0]
data["geometry"][0]["rminor"] = obj.ep[1]
elif obj.obj_type.lower() == "box":
data["geometry"][0]["xlength"] = obj.ep[0]
data["geometry"][0]["ylength"] = obj.ep[1]
data["geometry"][0]["zlength"] = obj.ep[2]
elif obj.obj_type.lower() in MESH_IMPORT_FUNCTIONS:
extra_rib_filename = "extra_geo_{}".format(obj.index) + ".rib"
data["geometry"][0]["filename"] = extra_rib_filename
renderman_dir = os.path.join(self.directory, "RENDERMAN")
if not os.path.exists(renderman_dir):
os.makedirs(renderman_dir)
ribarchives_dir = os.path.join(renderman_dir, "ribarchives")
if not os.path.exists(ribarchives_dir):
os.makedirs(ribarchives_dir)
fout_fullpath = os.path.join(ribarchives_dir, extra_rib_filename)
fout = open(fout_fullpath, "w")
self.export_mesh(self.context, fout, obj)
fout.close()
else:
print("Geometry type {} not supported by blender export at this time".format(obj.obj_type))
if not obj.obj.hide_render:
renderobject.append(data)
return renderobject
def write_extra_geometry(self, context, obj):
global extra_geometry_indicies
renderobject = []
data = dict()
# data["color"] = "{} {} {}".format(obj.color[0], obj.color[1], obj.color[2])
data["geometry"] = [{"type" : "archive"}]
# data["shader"] = [{"type" : "matte.sl"}]
data["geometry"][0]["filename"] = "extrageometry.rib"
data["name"] = "extrageometry"
id_str = ""
for i in extra_geometry_indicies:
id_str += "id == {} or ".format(i)
id_str = id_str[:-4]
data["condition"] = id_str
renderobject.append(data)
return renderobject
def camera_to_renderman(self, context, obj):
camera_matrix = obj.matrix_world
camera = obj
camera_loc = obj.location
camera_euler = obj.rotation_euler
fov = None
try:
cam_fov = math.degrees(obj.data.angle)
fov = 360.0*math.atan(16.0/camera.data.lens)/math.pi
except AttributeError:
if hasattr(obj.data, "spot_size"):
fov = math.degrees(obj.data.spot_size)
else:
pass
out = ''
if hasattr(obj.data, "type"):
if obj.data.type == 'SUN':
out += ('Projection "orthographic"\n')
else:
out += ('Projection "perspective" "fov" [{}]\n'.format(fov))
else:
out += ('Projection "perspective" "fov" [{}]\n'.format(fov))
out += ("Scale 1 1 -1\n")
out += ("Rotate {} 1 0 0\n".format(-math.degrees(camera_euler[0])))
out += ("Rotate {} 0 1 0\n".format(-math.degrees(camera_euler[1])))
out += ("Rotate {} 0 0 1\n".format(-math.degrees(camera_euler[2])))
out += ("Translate {} {} {}\n".format(-camera_matrix[0][3],
-camera_matrix[1][3],
-camera_matrix[2][3]))
return out
def write_shadowspot(self, context, renderpasses, light_file, obj, end_x, end_y, end_z, delta_angle, index):
name = "shadow_" + obj.data.name
name = name.replace(".", "_")
correct_name = obj.data.name.replace(".", "_")
shadowmap_name = name + ".rib"
shadowmap_file_path = os.path.join(self.fout_dir, shadowmap_name)
shadowmap_file = open(shadowmap_file_path, 'w')
shadowmap_file.write(self.camera_to_renderman(context, obj))
light_string = 'LightSource "shadowspot" {} "intensity" {} "coneangle" {} "conedeltaangle" {} "lightcolor" [{} {} {}] "from" [{} {} {}] "to" [{} {} {}] "shadowname" ["{}"]\n'.format(index, obj.data.energy*30, obj.data.spot_size/2.0, delta_angle, obj.data.color[0], obj.data.color[1], obj.data.color[2], obj.location.x, obj.location.y, obj.location.z, end_x+obj.location.x, end_y+obj.location.y, end_z+obj.location.z, name+".shd")
light_file.write(light_string)
#TODO: heuristic for resolution of pass
shadowpass = {
"name": "shadowpass" + str(index),
"type": "shadow",
"settings" : {
"resolution" : "512 512 1",
"shadingrate" : 1.0,
"pixelsamples" : "1 1",
"shadowfilepath" : "shadow_" + correct_name+ ".rib",
"display" : {"output" : "shadow_" + correct_name + ".z",
"outtype" : "zfile",
"mode" : "z"}}}
renderpasses.append(shadowpass)
def write_sun(self, context, renderpasses, light_file, obj, end_x, end_y, end_z, index):
global max_dim
global min_dim
name = "shadow_" + obj.data.name
name = name.replace(".", "_")
correct_name = obj.data.name.replace(".", "_")
shadowmap_name = name + ".rib"
shadowmap_file_path = os.path.join(self.fout_dir, shadowmap_name)
shadowmap_file = open(shadowmap_file_path, 'w')
shadowmap_file.write(self.camera_to_renderman(context, obj))
shadowmap_file.write('ScreenWindow {} {} {} {}'.format(min_dim, max_dim, min_dim, max_dim))
light_string = 'LightSource "shadowdistant" {} "intensity" {} "lightcolor" [{} {} {}] "from" [{} {} {}] "to" [{} {} {}] "shadowname" ["{}"]\n'.format(index, obj.data.energy, obj.data.color[0], obj.data.color[1], obj.data.color[2], 0, 0, 0, end_x, end_y, end_z, name+".shd")
light_file.write(light_string)
shadowpass = {
"name": "shadowpass" + str(index),
"type": "shadow",
"settings" : {
"resolution" : "512 512 1",
"shadingrate" : 1.0,
"pixelsamples" : "1 1",
"shadowfilepath" : "shadow_" + correct_name + ".rib",
"display" : {"output" : "shadow_" + correct_name + ".z",
"outtype" : "zfile",
"mode" : "z"}}}
renderpasses.append(shadowpass)
def write_shadowpoint(self, context, renderpasses, light_file, obj, index):
light_string = 'LightSource "shadowpoint" {} "intensity" {} "lightcolor" [{} {} {}] "from" [{} {} {}]'.format(index, obj.data.energy*20.0, obj.data.color[0], obj.data.color[1], obj.data.color[2], obj.location.x, obj.location.y, obj.location.z)
name = "shadow_" + obj.data.name
name = name.replace(".", "_")
correct_name = obj.data.name.replace(".", "_")
shadowmap_name_base = name + ".rib"
rotations = {'px': 'Rotate -90.0 0.0 1.0 0.0',
'py': 'Rotate 90.0 1.0 0.0 0.0',
'pz': 'Rotate 0.0 0.0 1.0 0.0',
'nx': 'Rotate 90.0 0.0 1.0 0.0',
'ny': 'Rotate -90.0 1.0 0.0 0.0',
'nz': 'Rotate 180 0.0 1.0 0.0'}
for end in ('px', 'py', 'pz', 'nx', 'ny', 'nz'):
shadowmap_name = end + shadowmap_name_base
shadowmap_file_path = os.path.join(self.fout_dir, shadowmap_name)
shadowmap_file = open(shadowmap_file_path, 'w')
light_string += ' "sf{}" ["{}"]'.format(end, end + "shadow_" + correct_name + ".shd")
shadowmap_file.write('Projection "perspective" "fov" [95.0]\n')
# shadowmap_file.write("Scale 1 1 -1\n")
shadowmap_file.write(rotations[end] + "\n")
shadowmap_file.write('Translate {} {} {}\n'.format(-obj.location.x, -obj.location.y, -obj.location.z))
shadowpass = {
"name": "shadowpass" + str(index) + "_" + end,
"type": "shadow",
"settings" : {
"resolution" : "512 512 1",
"shadingrate" : 1.0,
"pixelsamples" : "1 1",
"shadowfilepath" : shadowmap_name,
"display" : {"output" : end + "shadow_" + correct_name + ".z",
"outtype" : "zfile",
"mode" : "z"}}}
renderpasses.append(shadowpass)
light_string += '\n'
light_file.write(light_string)
def write_ambient_occlusion(self, context, renderpasses, shader):
resolution = "{} {}".format(bpy.data.scenes["Scene"].render.resolution_x,
bpy.data.scenes["Scene"].render.resolution_y)
shadowpass = {
"name": "ambientpass",
"type": "ao",
"settings": {
"resolution": resolution,
"bounces": bpy.context.scene.world.light_settings.indirect_bounces,
"display": {"output" : "out.tif"}},
"shader": {
"name": shader,
"samples": 256}} #TODO: some nice way of setting samples
renderpasses.append(shadowpass)
def execute(self, context):
global fin_name
global objects
global proxyObjects
global ambient_proxy
global fin_dir
#We will ignore the user given output file Chrono::Render is designed
#to accept out.yaml as the yaml file
self.filename = "out.yaml"
renderpasses = []
self.fout_dir = os.path.join(self.directory, "RENDERMAN")
if not os.path.exists(self.fout_dir):
os.makedirs(self.fout_dir)
filepath = os.path.join(self.fout_dir, self.filename)
fout = open(filepath, "w")
print("Export beginning")
##############
#Camera stuff#
##############
current_frame = bpy.context.scene.frame_current
fmax = bpy.data.scenes["Scene"].frame_end
fmin = 0
camera_moved = False
last_camera_output = None
for frame in range(fmin, fmax+1):
bpy.context.scene.frame_set(frame)
cam_file_name = "custom_camera_{}.rib".format(frame)
cam_file_path = os.path.join(self.fout_dir, cam_file_name)
cam_file = open(cam_file_path, 'w')
camera_output = self.camera_to_renderman(context, bpy.data.objects['Camera'])
if last_camera_output == None:
last_camera_output = camera_output
if camera_output != last_camera_output:
camrea_moved = True
cam_file.write(camera_output)
#TODO: only write the file if camera hasn't moved at all (would have to fix the one camera or indididual camera frames thing)
cam_file.close()
if not camera_moved and frame == fmax:
cam_file_name = "custom_camera.rib"
cam_file_path = os.path.join(self.fout_dir, cam_file_name)
cam_file = open(cam_file_path, 'w')
cam_file.write(camera_output)
cam_file.close()
moving_camera = {"moving_camera" : camera_moved}
cam_file_name = "custom_camera.rib"
bpy.context.scene.frame_current = current_frame
#############
#Light stuff#
#############
light_file_name = "custom_lighting.rib"
light_file_path = os.path.join(self.fout_dir, light_file_name)
light_file = open(light_file_path, 'w')
for i, obj in enumerate(bpy.context.scene.objects):
if obj.type == 'LAMP' and obj.hide_render == False:
light_string = None
e = obj.rotation_euler
M = e.to_matrix()
v = mathutils.Vector((0,0,-1)) #default direction of light
# v.rotate(e)
# end_x, end_y, end_z = v
end_x, end_y, end_z = M*v
# x20 for point and spot intensity as a rough heuristic to get them looking the same in blender and renderman(matte shader)
if obj.data.type == 'SUN':
# intensity = obj.data.energy*
if obj.data.shadow_method == 'NOSHADOW':
light_string = 'LightSource "distantlight" {} "intensity" {} "lightcolor" [{} {} {}] "from" [{} {} {}] "to" [{} {} {}]\n'.format(i, obj.data.energy, obj.data.color[0], obj.data.color[1], obj.data.color[2], 0, 0, 0, end_x, end_y, end_z)
else:
self.write_sun(context, renderpasses, light_file, obj, end_x, end_y, end_z, i)
elif obj.data.type == 'POINT':
if obj.data.shadow_method == 'NOSHADOW':
light_string = 'LightSource "pointlight" {} "intensity" {} "lightcolor" [{} {} {}] "from" [{} {} {}]\n'.format(i, obj.data.energy*20, obj.data.color[0], obj.data.color[1], obj.data.color[2], obj.location.x, obj.location.y, obj.location.z)
else:
self.write_shadowpoint(context, renderpasses, light_file, obj, i)
elif obj.data.type == 'SPOT':
delta_angle = obj.data.spot_size/2 * obj.data.spot_blend
if obj.data.shadow_method == 'NOSHADOW':
light_string = 'LightSource "spotlight" {} "intensity" {} "coneangle" {} "conedeltaangle" {} "lightcolor" [{} {} {}] "from" [{} {} {}] "to" [{} {} {}]\n'.format(i, obj.data.energy*20, obj.data.spot_size/2.0, delta_angle, obj.data.color[0], obj.data.color[1], obj.data.color[2], obj.location.x, obj.location.y, obj.location.z, end_x+obj.location.x, end_y+obj.location.y, end_z+obj.location.z)
else:
self.write_shadowspot(context, renderpasses, light_file, obj, end_x, end_y, end_z, delta_angle, i)
if light_string != None:
light_file.write(light_string)
ambient_proxy.update()
light_string = 'LightSource "ambientlight" {} "intensity" {} "lightcolor" [{} {} {}]\n'.format(i, ambient_proxy.obj.active_material.ambient, bpy.data.worlds["World"].ambient_color[0], bpy.data.worlds["World"].ambient_color[1], bpy.data.worlds["World"].ambient_color[2])
light_file.write(light_string)
light_file.close()
#Ambient Occlusion/Color Bleeding
if bpy.context.scene.world.light_settings.use_indirect_light:
self.write_ambient_occlusion(context, renderpasses, "colorbleedinglight.sl")
elif bpy.context.scene.world.light_settings.use_ambient_occlusion:
self.write_ambient_occlusion(context, renderpasses, "occlusionlight.sl")
##########
#The Rest#
##########
renderobject = self.write_object(objects, is_proxy = False)
renderobject += self.write_object(proxyObjects, is_proxy = True)
#Imported meshes
fout_extrageo = open(os.path.join(self.fout_dir, "extrageometry.rib"), "w")
for obj in bpy.data.objects:
if obj.type == 'MESH' and obj.name != "Ambient Light Proxy":
if not 'index' in obj:
self.export_mesh(context, fout_extrageo, obj)
renderobject += self.write_extra_geometry(context, obj)
fout_extrageo.close()
data_name = "./data/" + "_".join(fin_name.split("_")[:-1]) + "_*.dat"
resolution = "{} {}".format(bpy.data.scenes["Scene"].render.resolution_x,
bpy.data.scenes["Scene"].render.resolution_y)
defaultpass = {
"name": "defaultpass",
"settings" : {
"resolution" : resolution,
"display" : {"output" : "out.tif"}}}
if not bpy.context.scene.world.light_settings.use_ambient_occlusion and not bpy.context.scene.world.light_settings.use_indirect_light:
renderpasses.append(defaultpass)
data = {"chronorender" : {
"rendersettings" : {"searchpaths" : "./"},
"camera" : [{"filename" : cam_file_name}, moving_camera],
"lighting" : [{"filename" : "custom_lighting.rib"}],
# "scene" : [{"filename" : "default_scene.rib"}],
"renderpass" : renderpasses ,
"simulation" : {
"data" : {
"datasource" : [{
"type" : "csv",
"name" : "defaultdata",
"resource" : data_name,
"fields" : [
["group", "string"],
["id", "integer"],
["pos_x", "float"],
["pos_y", "float"],
["pos_z", "float"],
["quat_w", "float"],
["quat_x", "float"],
["quat_y", "float"],
["quat_z", "float"],
["ignore", "string"], #object type
["ep1", "string"], #extra params
["ep2", "string"], #need to modify if more than 4 extra params
["ep3", "string"],
["ep4", "string"],
]}]},
"renderobject" : renderobject}}}
# [{
# "name" : "particle",
# "condition" : "id >= 0",
# "color" : color,
# "geometry" : [{
# "radius" : 0.888,
# "type" : "sphere"}]}]}}}}
yaml.safe_dump(data, fout)
self.move_ribs(self.fout_dir)
print("Export complete! (yes really)")
print("Compression beginning")
self.compress(fin_name, fin_dir, self.filename, self.fout_dir)
print("Compression finished")
print("Cleanup Beginning")
self.cleanup(self.fout_dir)
print("Cleanup Ended")
return {'FINISHED'}
def cleanup(self, fout_dir):
shutil.rmtree(fout_dir, onerror=self.iferror)
def iferror(self, func, path, except_info):
os.chmod(path, stat.S_IWRITE)
func(path)
def move_ribs(self, fout_dir):
"""Moves all rib files to the ribarchive directory"""
ribarchives = os.path.join(fout_dir, "ribarchives")
if not os.path.isdir(ribarchives):
os.mkdir(ribarchives)
init_dir = os.getcwd()
os.chdir(fout_dir)
for f in os.listdir("."):
if f.endswith(".rib"):
dest = os.path.join(ribarchives, os.path.basename(f))
shutil.copy2(f, dest)
os.chdir(init_dir)
def compress(self, fin_name, fin_dir, fout_name, fout_dir, force_data=False):
#TODO: allow user to select force_data
#requires a SEPARATE data directory to work
data_zipped_path = os.path.join(self.directory, "data.tar.gz")
metadata_zipped_path = os.path.join(self.directory, fout_name.split(".")[0] + ".tar.gz")
if not os.path.exists(data_zipped_path) or force_data == True:
with tarfile.open(data_zipped_path, "w:gz") as tar:
for filename in os.listdir(fin_dir):
if filename[-4:] == ".dat":
filepath = os.path.join(fin_dir, filename)
aname = os.path.join(os.path.join("job", "data"), filename)
tar.add(filepath, arcname=aname)
with tarfile.open(metadata_zipped_path, "w:gz") as tar2:
tar2.add(fout_dir, arcname="")
def add_exportChronoRenderButton(self, context):
self.layout.operator(
ExportChronoRender.bl_idname,
text=ExportChronoRender.__doc__,
icon='PLUGIN')
def register():
print("Registering")
bpy.utils.register_class(ImportChronoRender)
# bpy.types.INFO_MT_file.append(add_object_button)
bpy.types.INFO_MT_file_import.append(add_importChronoRenderButton)
bpy.utils.register_class(ExportChronoRender)
bpy.types.INFO_MT_file_export.append(add_exportChronoRenderButton)
def unregister():
print("Unregistering")
bpy.utils.unregister_class(ImportChronoRender)
bpy.types.unregister_class(ExportChronoRender)
if __name__ == "__main__":
register()
| 44.12026 | 438 | 0.550107 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10,077 | 0.247452 |
87f3890301da3f9632807a211a2f02d6a412d63c | 22,139 | py | Python | datasets.py | perwin/s4g_barsizes | 08da334090141c68b008d63b2fa52ee5ef7e7937 | [
"BSD-3-Clause"
] | 2 | 2020-04-04T19:46:59.000Z | 2020-04-05T03:00:09.000Z | datasets.py | perwin/s4g_barsizes | 08da334090141c68b008d63b2fa52ee5ef7e7937 | [
"BSD-3-Clause"
] | null | null | null | datasets.py | perwin/s4g_barsizes | 08da334090141c68b008d63b2fa52ee5ef7e7937 | [
"BSD-3-Clause"
] | 1 | 2019-11-04T11:39:23.000Z | 2019-11-04T11:39:23.000Z | # Python code for assembling S$G-based local bar-size and fraction dataset
#
# ListDataFrame with
# name, M_star, B-V_tc, g-r_tc, a_max_obs[arcsec, kpc], amax_dp[arcsec, kpc], distance,
# distance_source, inclination
# distance_source = direct (Cepheids, SBF, TRGB, etc), T-F, redshift
#
# Two separate datasets:
# 1. All S4G from DG16 (unbarred and barred)
# This should be the 1334 galaxies with valid M_star and distances in DG16,
# out of the 1445 galaxies in their low-inc disk sample
#
# 2. Our local galaxies meeting S0--Sd, D < 25 Mpc, i = 45--70
# ED16b
# bardata_i40-70_smaller.txt
import math
import scipy.interpolate
#import scipy.stats
import numpy as np
import astro_utils, angles
import datautils as du
baseDir = "/Users/erwin/Documents/Working/Projects/Project_BarSizes/"
# Data tables (retrieved via Vizier) for Salo+2015, Herrera-Endoqui+2015, and
# Diaz-Garcia+2016
tableDir = "/Beleriand/Astronomy Papers/"
tableSalo15_1 = tableDir + "salo+15_tables/table1.dat"
tableSalo15_6 = tableDir + "salo+15_tables/table6.dat"
tableSalo15_7 = tableDir + "salo+15_tables/table7.dat"
tableHE15_2 = tableDir + "herrera-endoqui+15_tables/table2.dat"
tableDG16_A1 = tableDir + "diaz-garcia+16_tables/tablea1.dat"
tableDG16_A3 = tableDir + "diaz-garcia+16_tables/tablea3.dat"
# General S4G table, downloaded from IPAC on 11 Feb 2017
tableS4G = baseDir + "spitzer.s4gcat22704.tbl.txt"
# Consolandi+2016 SDSS data for Virgo Supercluster, etc.
tableC16_1 = tableDir + "consolandi+16_table1/table1.dat"
# Galaxy Zoo 2 bar sizes and extra data
tableGZ2_barsizes = baseDir+'GalaxyZoo2_barlengths_alldata.txt'
# Virgo and Fornax Cluster members
virgoNameFile = baseDir+"virgo-cluster-members.txt"
fornaxNameFile = baseDir+"fornax-cluster-members.txt"
# Cleaned-up data for computing spline interpolation of f(B-v) vs B_tc
# (fraction of galaxies with HyperLeda B-V_tc values as function of B_tc
#x_Btc = [7, 8.25, 8.75, 9.25, 9.75, 10.25, 10.75, 11.25, 11.75, 12.25, 12.75, 13.25, 13.75, 14.25, 14.75, 15.25, 15.75, 16.25, 17]
#y_fBmV = [1.0, 1.0, 1.0, 1.0, 1.0, 0.95, 0.9565217391304348, 0.84, 0.7692307692307693, 0.6145833333333334, 0.45535714285714285, 0.3434343434343434, 0.15841584158415842, 0.23076923076923078, 0.23404255319148937, 0.125, 0.05, 0.01, 0.0]
x_Btc = [7.0, 8.25, 8.75, 9.25, 9.75, 10.25, 10.75, 11.25, 11.75, 12.25, 12.75, 13.25, 13.75, 14.25, 14.75, 15.25, 15.75, 16.25]
y_fBmV = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.9722222222222222, 0.8840579710144928, 0.8125, 0.6222222222222222, 0.5632183908045977, 0.4074074074074074, 0.2727272727272727, 0.3442622950819672, 0.2978723404255319, 0.10714285714285714, 0.01, 0.0]
fBmV_akimaspline = scipy.interpolate.Akima1DInterpolator(x_Btc, y_fBmV)
hdrText = """# This table summarizes useful information about galaxies in the Parent Disc Sample of
# Erwin (2017, in prep).
#
# Except for the following columns, values are from Herrera-Endoqui et al. (2015: A&A, 582, A86)
# dist, logmstar -- distance in Mpc and log of galaxy stellar mass (in solar masses), from
# Munoz-Mateos et al. (2015: ApJS, 219, 3)
# bar_strength -- Buta et al. (2015, ApJS, 217, 32)
# A2, A4 -- Diaz-Garcia et al. (2016a, 587, A160)
# Re, Re_kpc, h_kpc -- Salo et al. (2015, ApJS, 219, 4)
# R25, etc.; t_s4g -- main S4G table
# B_tc, BmV_tc, m21c, W_gas, t_leda -- HyperLeda (as of 29 March 2017)
# gmr_tc -- total g-r color, based on BmV_tc as described in paper
# weight_BmVtc -- B_t-based weights for galaxy colors (see paper)
# w25, w30, w40 -- V/V_max weights assuming D_max = 25, 30, or 40 Mpc
# M_HI, logfgas -- based on dist + m21c, as described in paper
# inclination -- Munoz-Mateos et al. (2015)
# sma_dp_kpc2 -- deprojected bar semi-major axis (in kpc) using bar size and
# PA from Herrera-Endoqui et al. and galaxy inclination and PA from Munoz-Mateos et al.
#
# "No data" values:
# For the following columns "no data" is indicated by the value -99:
# BmV_tc, gmr_tc, m21c, M_HI, logfgas
# For the following columns "no data" is indicated by the value 0:
# sma, sma_kpc, sma_ell_kpc, sma_dp_kpc, sma_dp_kpc2, sma_ell_dp_kpc2
#
"""
# -99 -- BmV_tc, gmr_tc, m21c, M_HI, logfgas
# 0 -- sma, sma_kpc, sma_ell_kpc, sma_dp_kpc, sma_dp_kpc2, sma_ell_dp_kpc2
colNames_to_save = ["name", "logmstar", "dist", "B_tc", "BmV_tc", "weight_BmVtc", "gmr_tc",
"m21c", "M_HI", "logfgas", "w25", "w30", "w40", "sma", "sma_kpc", "sma_ell_kpc",
"sma_dp_kpc", "sma_dp_kpc2", "sma_ell_dp_kpc2", "bar_strength", "A2", "A4",
"inclination", "R25", "R25_5", "R25_kpc", "R25_5_kpc", "R25c_kpc", "Re", "Re_kpc",
"h_kpc", "W_gas", "t_s4g", "t_leda"]
def WriteTableToFile( dataFrame, outFilename ):
nEntries = len(dataFrame.name)
hdrLine = "#"
for cname in colNames_to_save:
hdrLine += " " + cname
outf = open(outFilename, 'w')
outf.write(hdrText)
outf.write(hdrLine + "\n")
for i in range(nEntries):
newLine = ""
for cname in colNames_to_save:
newLine += str(dataFrame[cname][i]) + " "
outf.write(newLine + "\n")
outf.close()
def VmaxWeight( distance, R_25, R_25_limit=30.0, maxSurveyDist=30.0 ):
"""Returns W = V_tot / V_max, where V_tot is the total survey volume
(out to a distance of maxSurveyDist in Mpc) and V_max is volume out to
distance = distMax in Mpc, with distMax = maximum distance object could
have been observed at, assuming an R_25 limiting radius of R_25_limit
and an observed radius of R_25 (both in arcsec).
If V_tot > V_max (i.e., the galaxy would have been observed regardless of
distance), then W = 1
For S4G, R25_limit = 30 arcsec
"""
V_tot = maxSurveyDist**3
distMax = distance * (R_25 / R_25_limit)
V_max = distMax**3
if V_max > V_tot:
return 1.0
else:
return (V_tot / V_max)
def BmV_to_gmr( BmV ):
"""Relation from Cook+2014 (2014MNRAS.445..890C) to transform
galaxy B-V color to g-r.
"""
ii_bad = [i for i in range(len(BmV)) if BmV[i] == -99.0]
gmr = 1.12*BmV - 0.18
gmr[ii_bad] = -99.0
return gmr
def IsDisk( htypeText ):
"""Simple function to take morphological type code from Buta+2015 and
determine if it refers to a disk galaxy (no Sm or Im)
"""
# simple way to get only proper disks: anything with "I" or "m" in text
if (htypeText.find("I") >= 0) or (htypeText.find("m") >=0):
return False
else:
return True
def GetConsolandiDataFromLine( dline ):
sdssName = dline[:20].strip()
vccName = dline[21:25].strip()
ngcName = dline[34:38].strip()
ra = float(dline[39:48])
dec = float(dline[49:58])
gmag = float(dline[59:64])
imag = float(dline[65:70])
gmi = float(dline[71:76])
logMstar = float(dline[77:81])
return (ngcName, vccName, sdssName, ra, dec, gmag, imag, gmi, logMstar)
def GetConsolandiData( fileName=tableC16_1 ):
dlines = [line for line in open(fileName) if len(line) > 1 and line[0] != "#"]
consolandiDict = {}
for dline in dlines:
x = GetConsolandiDataFromLine(dline)
if x[0] != "":
gname = "NGC" + x[0]
elif x[1] != "":
gname = "VCC" + x[1]
else:
gname = x[2]
consolandiDict[gname] = x[3:]
return consolandiDict
# *** Read in main tables
# Main S4G table (from IPAC)
ds4g = du.ReadCompositeTable(tableS4G, columnRow=71, dataFrame=True)
nS4GMain = len(ds4g.name)
s4gdict = { ds4g.name[i]: [ds4g.t[i], 0.5 * ds4g.sma1_25p5[i], 3 * 10**ds4g.logd25[i],
ds4g.c31_1[i], ds4g.c42_1[i], ds4g.vrad[i], ds4g.dmean[i]] for i in range(nS4GMain) }
# Read in Herrera-Endoqui+2015 and Diaz-Garcia+2016 tables: 1344 galaxies
# This is where we get M_star, distance -- also Hubble type?
# Diaz-Garcia+2016 tables
d16 = du.ReadCompositeTable(tableDG16_A1, columnRow=74, dataFrame=True)
d16a3 = du.ReadCompositeTable(tableDG16_A3, columnRow=26, dataFrame=True)
# Herrera-Endoqui+2015 Table 2: 2387 lines (one for each feature in each galaxy)
# 1146 galaxies with bars
# This is where we get bar measurements (e.g., sma, sma_ell, sma_ell_dp)
# Other potentially useful stuff: sma, sma_dp [latter only exists if ell-based
# measurements also exist]
# HType
d15 = du.ReadCompositeTable(tableHE15_2, columnRow=50, dataFrame=True)
# dict mapping galaxy names to bar sizes d15.sma_ell_dp[i]
d15dict = { d15.Name[i]: [d15.sma[i], d15.sma_dp[i], d15.sma_ell[i], d15.sma_ell_dp[i],
d15.PA[i], d15.quality[i], d15.HType[i]]
for i in range(len(d15.Name)) if d15.feature[i] == "bar" }
d15barnames = list(d15dict.keys())
# For reference: barred galaxies in Herrera-Endoqui+2015 that are *not* in
# Diaz-Garcia+2016
gnames_HE15_not_in_DG16 = [gname for gname in d15barnames if gname not in d16.Galaxy]
# *** DEFINITION OF "GOOD D16" SUBSAMPLE
# Lists of galaxies to exclude for various reasons: 22 galaxies total in parent S4G
# get names of galaxies with dubious distances (IMPORTANT because some of these can
# produce very large 1/Vmax weights)
dubiousDistNames = [ds4g.name[i] for i in range(nS4GMain) if ds4g.dmean[i] == 0 and ds4g.vrad[i] <= 500.0]
# get names of galaxies with D_25 < 1 arcmin
badR25Names = [ds4g.name[i] for i in range(nS4GMain) if ds4g.logd25[i] < 1.0]
badGalaxyNames = set(dubiousDistNames + badR25Names)
# construct list of indices for "good" galaxies (those with valid M_star and distance): 1334 galaxies
# index into the *parent* (1344-galaxy) D16 sample, above
# 1. Exclude galaxies with bad M_star and/or bad distances ==> 1334 galaxies left in sample
ii_d16parent_goodnames1 = [i for i in range(len(d16.Galaxy)) if d16['M*'][i] > 0 and d16['Dist'][i] > 0]
# 2. Exclude dubiousDistNames [defined above] ==> 1322 galaxies left in sample
ii_d16parent_goodnames = [i for i in ii_d16parent_goodnames1 if d16.Galaxy[i] not in badGalaxyNames]
d16_goodnames = [d16.Galaxy[i] for i in ii_d16parent_goodnames]
nDisksTot = nD16good = len(ii_d16parent_goodnames)
# *** Start constructing data vectors:
# Get names, Mstar, distances, etc., from Diaz-Garcia+16
d16_good_logmstar = [math.log10(d16['M*'][i]) for i in ii_d16parent_goodnames]
d16_good_dist = [d16['Dist'][i] for i in ii_d16parent_goodnames]
d16_good_VHI = [d16['VHI'][i] for i in ii_d16parent_goodnames]
d16_good_DM_ratio = [d16['Mh/M*'][i] for i in ii_d16parent_goodnames]
# Get B_tc and B-V values for S4G galaxies (from HyperLeda)
# 1334 galaxies (d16_goodnames) in file, 616 with B-V_tc values
dlines = [line for line in open(baseDir + "s4g_goodnames_ledadata.txt") if len(line) > 1 and line[0] != "#"]
BmV_e = []
BmV_tc =[]
B_tc = []
m21c = []
r_25c = []
W_gas = []
t_leda = []
# name | type | t | logd25 | bve | vmaxg | logdc | btc | bvtc | m21c |
i_logd25 = 3
i_bve = 4
i_vmaxg = 5
i_logdc = 6
i_btc = 7
i_bvtc = 8
i_m21c = 9
for dline in dlines:
pp = dline.split("|")
gname = pp[0].strip()
if gname in d16_goodnames:
btc = float(pp[i_btc])
B_tc.append(btc)
try:
bmv_e = float(pp[i_bve])
except ValueError:
bmv_e = -99.0
try:
bmv_tc = float(pp[i_bvtc])
except ValueError:
bmv_tc = -99.0
BmV_e.append(bmv_e)
BmV_tc.append(bmv_tc)
try:
W = float(pp[i_vmaxg])
except:
W = -99.0
W_gas.append(W)
try:
m21 = float(pp[i_m21c])
except ValueError:
m21 = -99.0
m21c.append(m21)
t_leda.append(float(pp[2]))
# radii in arc sec
logdc = float(pp[i_logdc])
r_25c.append(3 * 10**logdc)
d16_good_Btc = np.array(B_tc)
d16_good_BmV_e = np.array(BmV_e)
d16_good_BmV_tc = np.array(BmV_tc)
d16_weights_BmVtc = 1.0 / fBmV_akimaspline(d16_good_Btc)
good_gmr_tc = BmV_to_gmr(d16_good_BmV_tc)
d16_good_m21c = np.array(m21c)
# compute M_HI and log(M_HI/M_star)
d16_good_MHI = np.array([astro_utils.HIMass(d16_good_m21c[i],d16_good_dist[i],1) for i in range(nD16good)])
# M_baryon = 1.4*M_HI + Mstar
d16_good_logMbaryon = np.array([np.log10(1.4*d16_good_MHI[i] + 10**d16_good_logmstar[i]) for i in range(nD16good)])
d16_good_logfgas = np.array([np.log10(d16_good_MHI[i]) - d16_good_logmstar[i] for i in range(nD16good)])
# Redefine bad values to be = -99
ii_bad_HI = [i for i in range(nD16good) if d16_good_logfgas[i] > 10]
d16_good_logMbaryon[ii_bad_HI] = -99.0
d16_good_logfgas[ii_bad_HI] = -99.0
# Get NED extinction values
dlines = [line for line in open(baseDir + "s4g_goodnames_neddata.txt") if len(line) > 1 and line[0] != "#"]
A_B = []
A_V =[]
for dline in dlines:
pp = dline.split("|")
gname = pp[0].strip()
if gname in d16_goodnames:
A_B.append(float(pp[1]))
A_V.append(float(pp[2]))
A_B = np.array(A_B)
A_V = np.array(A_V)
# Extinction correction for B-V_e value
d16_good_BmV_ec = d16_good_BmV_e - (A_B - A_V)
d16a3dict = { d16a3.Galaxy[i]: [d16a3.A2[i], d16a3.A4[i]] for i in range(len(d16a3.Galaxy)) }
# collect or determine bar values (including deprojected values)
d16_bar_sma = []
d16_bar_sma_kpc = []
d16_bar_sma_ell_kpc = []
d16_bar_sma_dp_kpc = []
d16_bar_pa = []
d16_bar_quality = []
d16_bar_strength = []
d16_bar_a2 = []
d16_bar_a4 = []
for i in range(nD16good):
gname = d16_goodnames[i]
if gname in d15barnames:
sma, sma_dp, sma_ell, smal_ell_dp, bar_pa, quality, HType = d15dict[gname]
try:
a2, a4 = d16a3dict[gname]
except KeyError:
a2 = a4 = -999.999
d16_bar_a2.append(a2)
d16_bar_a4.append(a4)
d16_bar_pa.append(bar_pa)
distMpc = d16_good_dist[i]
kpcScale = astro_utils.pcperarcsec(distMpc)/1e3
sma_kpc = kpcScale * sma
# apply pixel-scale correction for sma_ell
sma_ell_kpc = kpcScale * sma_ell * 0.75
sma_dp_kpc = kpcScale * sma_dp
d16_bar_quality.append(int(quality))
if HType.find("SB") > -1:
d16_bar_strength.append(1)
else:
d16_bar_strength.append(2)
else:
sma = sma_dp = sma_kpc = sma_ell_kpc = sma_dp_kpc = 0.0
d16_bar_pa.append(-1000)
d16_bar_quality.append(0)
d16_bar_strength.append(3)
d16_bar_a2.append(-999.999)
d16_bar_a4.append(-999.999)
d16_bar_sma.append(sma)
d16_bar_sma_kpc.append(sma_kpc)
d16_bar_sma_ell_kpc.append(sma_ell_kpc)
d16_bar_sma_dp_kpc.append(sma_dp_kpc)
# Salo+2015 Table 1 (for diskPA, ell)
# Note that "PGC052336" has an empty row
salo15t1 = du.ReadCompositeTable(tableSalo15_1, columnRow=31, dataFrame=True)
# Table 6 -- single-Sersic fits -- has numerous blank lines where no fit succeeded
def GetRe( line ):
if len(line) < 40:
return 0.0
else:
return float(line.split()[5])
# make sure to get the *largest* disk-scale-length component value (for the
# 20 or 30 cases where a galaxy was fit by two or more "D" components)
def GetSalo15ScaleLengths( filename=tableSalo15_7 ):
dlines = [line for line in open(filename) if len(line) > 1 and line[0] != "#"]
gnameList = []
dlinesDict = {}
for line in dlines:
pp = line.split("|")
gname = pp[1].strip()
if gname not in gnameList:
gnameList.append(gname)
dlinesDict[gname] = [line]
else:
dlinesDict[gname].append(line)
scaleLengthDict = {}
for gname in gnameList:
thisGalaxyLines = dlinesDict[gname]
hvals = []
for line in thisGalaxyLines:
pp = line.split("|")
component = pp[5].strip()
function = pp[6].strip()
if (component == "D") and (function == "expdisk"):
hvals.append(float(pp[22]))
if len(hvals) > 0:
scaleLengthDict[gname] = max(hvals)
return scaleLengthDict
def GetSalo15BtoT( filename=tableSalo15_7 ):
dlines = [line for line in open(filename) if len(line) > 1 and line[0] != "#"]
gnameList = []
BotTDict = {}
bulgeFound = False
for line in dlines:
pp = line.split("|")
gname = pp[1].strip()
component = pp[5].strip()
function = pp[6].strip()
if gname not in gnameList:
bulgeFound = False
# new galaxy!
gnameList.append(gname)
if (component == "B") and (function == "sersic"):
BotTDict[gname] = float(pp[7])
bulgeFound = True
elif not bulgeFound:
if (component == "B") and (function == "sersic"):
BotTDict[gname] = float(pp[7])
bulgeFound = True
return BotTDict
# Get global, single-Sersic R_e from Table 6
salo15_Re = [GetRe(line) for line in open(tableSalo15_6) if line[0] != "#"]
# Get exp-disk scale length (if it exists) from Table 7
dd_s15t7 = GetSalo15ScaleLengths()
s15t7_gnames = list(dd_s15t7.keys())
salo15_h = [dd_s15t7[gname] if gname in s15t7_gnames else 0.0 for gname in salo15t1.Name ]
# Get Sersic-based B/T (if it exists) from Table 7
dd_s15t7 = GetSalo15BtoT()
s15t7_gnames = list(dd_s15t7.keys())
salo15_BtoT = [dd_s15t7[gname] if gname in s15t7_gnames else 0.0 for gname in salo15t1.Name ]
# dict mapping galaxy names to disk orientation params
salo15dict = { salo15t1.Name[i]: [salo15t1.PA[i], salo15t1.Ell[i], salo15_Re[i], salo15_h[i],
salo15_BtoT[i]] for i in range(len(salo15t1.Name)) }
salo15names = list(salo15dict.keys())
# Add R_25.5, R_25, inclinations and deprojected bar sma [using visual length]
R25_5 = []
R25 = []
R25_5kpc = []
R25kpc = []
r_25c_kpc = []
Re = []
Re_kpc = []
h_kpc = []
inclinations = []
d16_bar_sma_ell_dp_kpc2 = []
d16_bar_sma_dp_kpc2 = []
d16_bar_deltaPA_dp = []
T_s4g = []
c31 = []
c42 = []
BtoT = []
d16_good_Vrad = []
d16_good_dmean = []
d16_weights_vvmax_dmax25 = []
d16_weights_vvmax_dmax30 = []
d16_weights_vvmax_dmax40 = []
for i in range(nD16good):
gname = d16_goodnames[i]
diskPA, ellipticity, r_e, h, BtoT_s15 = salo15dict[gname]
inclination = angles.ifrome(ellipticity)
inclinations.append(inclination)
distMpc = d16_good_dist[i]
kpcScale = astro_utils.pcperarcsec(distMpc)/1e3
t_s4g, r25_5, r25, c3_1, c4_2, vrad, dmean = s4gdict[gname]
d16_weights_vvmax_dmax25.append(VmaxWeight(distMpc, r25, maxSurveyDist=25.0))
d16_weights_vvmax_dmax30.append(VmaxWeight(distMpc, r25, maxSurveyDist=30.0))
d16_weights_vvmax_dmax40.append(VmaxWeight(distMpc, r25, maxSurveyDist=40.0))
d16_good_Vrad.append(vrad)
d16_good_dmean.append(dmean)
T_s4g.append(t_s4g)
R25.append(r25)
R25_5.append(r25_5)
R25kpc.append(r25 * kpcScale)
R25_5kpc.append(r25_5 * kpcScale)
r_25c_kpc.append(r_25c[i] * kpcScale)
Re.append(r_e)
Re_kpc.append(r_e * kpcScale)
h_kpc.append(h * kpcScale)
c31.append(c3_1)
c42.append(c4_2)
BtoT.append(BtoT_s15)
if gname in d15barnames:
barPA = d16_bar_pa[i]
deprojFactor = angles.deprojectr(barPA - diskPA, inclination, 1)
deltaPA_dp = angles.deprojectpa(barPA - diskPA, inclination)
sma_kpc_dp = d16_bar_sma_kpc[i] * deprojFactor
sma_ell_kpc_dp = d16_bar_sma_ell_kpc[i] * deprojFactor
d16_bar_sma_dp_kpc2.append(sma_kpc_dp)
d16_bar_deltaPA_dp.append(deltaPA_dp)
if d16_bar_sma_ell_kpc[i] > 0:
d16_bar_sma_ell_dp_kpc2.append(sma_ell_kpc_dp)
else:
d16_bar_sma_ell_dp_kpc2.append(0.0)
else:
d16_bar_sma_dp_kpc2.append(0.0)
d16_bar_sma_ell_dp_kpc2.append(0.0)
d16_bar_deltaPA_dp.append(-99.0)
# 1/V_max weights
d16_weights_vvmax_dmax25 = np.array(d16_weights_vvmax_dmax25)
d16_weights_vvmax_dmax30 = np.array(d16_weights_vvmax_dmax30)
d16_weights_vvmax_dmax40 = np.array(d16_weights_vvmax_dmax40)
# Add Consolandi+2016 SDSS mags, colors, logMstar
consolandiDict = GetConsolandiData()
consolandiNames = list(consolandiDict.keys())
gmag = []
imag = []
gmi = []
logMstar_c16 = []
for j in range(nD16good):
gname = d16_goodnames[j]
if gname in consolandiNames:
g, i, gmi_color = consolandiDict[gname][2:5]
else:
g = i = gmi_color = -99.0
gmag.append(g)
imag.append(i)
gmi.append(gmi_color)
# Add environment coding
virgoNames = [line.strip() for line in open(virgoNameFile) if len(line) > 1 and line[0] != "#"]
fornaxNames = [line.strip() for line in open(fornaxNameFile) if len(line) > 1 and line[0] != "#"]
environment = []
for gname in d16_goodnames:
if gname in virgoNames:
environment.append("Virgo")
elif gname in fornaxNames:
environment.append("Fornax")
else:
environment.append("field")
# construct final ListDataFrame object
# Notes: R25 = mu_B = 25 radius from main S4G table at IPAC
# R25_5 = mu_3.6 = 25.5 radius
# R25c_kpc = extinction-corrected mu_B = 25 radius from HyperLeda
dataList = [ np.array(d16_goodnames), np.array(d16_good_logmstar), np.array(d16_good_dist),
np.array(d16_good_Vrad), np.array(d16_good_Btc), np.array(d16_good_BmV_tc),
np.array(d16_weights_BmVtc), np.array(d16_good_BmV_ec),
np.array(good_gmr_tc), np.array(gmi), np.array(d16_good_m21c),
np.array(d16_good_MHI), np.array(d16_good_logMbaryon), np.array(d16_good_logfgas),
np.array(d16_weights_vvmax_dmax25), np.array(d16_weights_vvmax_dmax30),
np.array(d16_weights_vvmax_dmax40), np.array(d16_bar_sma), np.array(d16_bar_sma_kpc),
np.array(d16_bar_sma_ell_kpc), np.array(d16_bar_sma_dp_kpc),
np.array(d16_bar_sma_dp_kpc2), np.array(d16_bar_sma_ell_dp_kpc2),
np.array(d16_bar_deltaPA_dp),
np.array(d16_bar_strength), np.array(d16_bar_quality),
np.array(d16_bar_a2), np.array(d16_bar_a4), np.array(W_gas), np.array(inclinations),
np.array(R25), np.array(R25_5), np.array(R25kpc), np.array(R25_5kpc),
np.array(r_25c_kpc), np.array(Re), np.array(Re_kpc), np.array(h_kpc), np.array(d16_good_VHI),
np.array(d16_good_DM_ratio), np.array(d16_good_dmean),
np.array(c31), np.array(c42), np.array(BtoT), np.array(T_s4g), np.array(t_leda),
np.array(environment) ]
colNames = ["name", "logmstar", "dist", "Vrad", "B_tc", "BmV_tc", "weight_BmVtc", "BmV_ec", "gmr_tc", "gmi",
"m21c", "M_HI", "logMbaryon", "logfgas", "w25", "w30", "w40", "sma", "sma_kpc", "sma_ell_kpc",
"sma_dp_kpc", "sma_dp_kpc2", "sma_ell_dp_kpc2", "deltaPA_bar_dp", "bar_strength", "quality",
"A2", "A4", "W_gas", "inclination", "R25", "R25_5", "R25_kpc", "R25_5_kpc", "R25c_kpc",
"Re", "Re_kpc", "h_kpc", "VHI", "DM_ratio", "dmean",
"c31", "c42", "BtoT", "t_s4g", "t_leda", "environment"]
s4gdata = du.ListDataFrame(dataList, colNames)
# index vector for all galaxies with bar feature (including those without sma_ell and
# deprojected sizes) = 749 galaxies
# bars with quality = 1 -- 305 galaxies
# bars with quality = 1 or 2 = 702 galaxies
ii_allbars = [i for i in range(nD16good) if s4gdata.sma[i] > 0]
ii_q1bars = [i for i in ii_allbars if s4gdata.quality[i] == 1]
ii_q12bars = [i for i in ii_allbars if s4gdata.quality[i] in [1,2]]
# GZoo bar sizes and related info (logMstar, z, etc.)
h11barsizes = du.ReadCompositeTable(tableGZ2_barsizes, columnRow=0, dataFrame=True)
| 36.533003 | 239 | 0.705497 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8,184 | 0.369664 |
87f41b6fabb95ec9e8a6026c4b7ef4ad3d3ed5cb | 2,166 | py | Python | Oled.py | Ths2-9Y-LqJt6/cattmate | 42e59b35f37f21546bed60d0e1886795ac1e7c0c | [
"MIT"
] | 1 | 2022-01-28T17:18:26.000Z | 2022-01-28T17:18:26.000Z | Oled.py | mrjones-plip/cattmate | 42e59b35f37f21546bed60d0e1886795ac1e7c0c | [
"MIT"
] | 5 | 2019-12-10T21:04:33.000Z | 2019-12-16T21:41:52.000Z | Oled.py | mrjones-plip/cattmate | 42e59b35f37f21546bed60d0e1886795ac1e7c0c | [
"MIT"
] | null | null | null | #!/usr/bin/python
import Adafruit_SSD1306
import os
from retrying import retry
from PIL import Image, ImageDraw, ImageFont
class Oled:
def __init__(self, display_bus, font_size):
# declare member variables
self.draw = None
self.font = None
self.disp = None
self.width = None
self.height = None
self.image = None
self.font_size = font_size
# display bus
# Rev 2 Pi, Pi 2 & Pi 3 uses bus 1
# Rev 1 Pi uses bus 0
# Orange Pi Zero uses bus 0 for pins 1-5 (other pins for bus 1 & 2)
self.display_bus = display_bus
# init
self.initialize()
def initialize(self):
# 128x64 display with hardware I2C:
self.disp = Adafruit_SSD1306.SSD1306_128_64(rst=None, i2c_bus=self.display_bus)
# Initialize library.
self.disp.begin()
# Clear display.
self.disp.clear()
self.disp.display()
# Create blank image for drawing.
# Make sure to create image with mode '1' for 1-bit color.
self.width = self.disp.width
self.height = self.disp.height
self.image = Image.new('1', (self.width, self.height))
# Get drawing object to draw on image.
self.draw = ImageDraw.Draw(self.image)
# set full puth for incling libs below
full_path = os.path.dirname(os.path.abspath(__file__)) + "/"
# Draw a black filled box to clear the image.
self.draw.rectangle((-20, -20, self.width, self.height), outline=0, fill=0)
self.font = ImageFont.truetype(full_path + "Lato-Heavy.ttf", self.font_size)
@retry()
def display(self, text):
# Draw some shapes.
# First define some constants to allow easy resizing of shapes.
padding = -2
top = padding
# bottom = self.height - padding
# Draw a black filled box to clear the image.
self.draw.rectangle((0, 0, self.width, self.height), outline=0, fill=0)
self.draw.text((0, top), str(text), font=self.font, fill=255)
# Display image.
self.disp.image(self.image)
self.disp.display()
| 30.507042 | 87 | 0.60711 | 2,040 | 0.941828 | 0 | 0 | 516 | 0.238227 | 0 | 0 | 665 | 0.307018 |
87f4710e0d278ffa4b65cd1fdbf57b6e8ed23f91 | 7,180 | py | Python | ArtGAN/data/ingest_stl10.py | rh01/caffe-model-for-category-artgan | 911b8fb44c62e8a2c71396099194d8925ed7c826 | [
"BSD-3-Clause"
] | 304 | 2018-07-17T00:18:54.000Z | 2022-03-31T22:26:42.000Z | ArtGAN/data/ingest_stl10.py | cs-chan/Artwork-Synthesis-Classification | ad9cd090c669ca636f6c048d97608092d52dd3e0 | [
"BSD-3-Clause"
] | 9 | 2018-10-16T14:42:51.000Z | 2022-01-13T11:22:02.000Z | ArtGAN/data/ingest_stl10.py | cs-chan/Artwork-Synthesis-Classification | ad9cd090c669ca636f6c048d97608092d52dd3e0 | [
"BSD-3-Clause"
] | 57 | 2018-07-19T02:38:29.000Z | 2022-03-17T11:12:17.000Z | from configargparse import ArgParser
from PIL import Image
import logging
import numpy as np
import os
def transform_and_save(img_arr, output_filename):
"""
Takes an image and optionally transforms it and then writes it out to output_filename
"""
img = Image.fromarray(img_arr)
img.save(output_filename)
class Ingest(object):
def __init__(self, input_dir, out_dir, target_size=96, skipimg=False):
np.random.seed(0)
self.skipimg = skipimg
self.out_dir = out_dir
self.input_dir = input_dir
self.manifests = dict()
for setn in ('train', 'val'):
self.manifests[setn] = os.path.join(self.out_dir, '{}-index.csv'.format(setn))
self.target_size = target_size
self.trainpairlist = {}
self.valpairlist = {}
self.labels = range(10)
if not os.path.exists(self.out_dir):
os.mkdir(self.out_dir)
self.outimgdir = os.path.join(self.out_dir, 'images')
if not os.path.exists(self.outimgdir):
os.mkdir(self.outimgdir)
os.mkdir(os.path.join(self.outimgdir, 'train'))
os.mkdir(os.path.join(self.outimgdir, 'val'))
self.outlabeldir = os.path.join(self.out_dir, 'labels')
if not os.path.exists(self.outlabeldir):
os.mkdir(self.outlabeldir)
def collectdata(self,):
print 'Start Collect Data...'
train_x_path = os.path.join(self.input_dir, 'train_X.bin')
train_y_path = os.path.join(self.input_dir, 'train_y.bin')
test_x_path = os.path.join(self.input_dir, 'test_X.bin')
test_y_path = os.path.join(self.input_dir, 'test_y.bin')
train_xf = open(train_x_path, 'rb')
train_x = np.fromfile(train_xf, dtype=np.uint8)
train_x = np.reshape(train_x, (-1, 3, 96, 96))
train_x = np.transpose(train_x, (0, 3, 2, 1))
train_yf = open(train_y_path, 'rb')
train_y = np.fromfile(train_yf, dtype=np.uint8)
test_xf = open(test_x_path, 'rb')
test_x = np.fromfile(test_xf, dtype=np.uint8)
test_x = np.reshape(test_x, (-1, 3, 96, 96))
test_x = np.transpose(test_x, (0, 3, 2, 1))
test_yf = open(test_y_path, 'rb')
test_y = np.fromfile(test_yf, dtype=np.uint8)
idx = np.zeros(10, dtype=np.int)
for i in xrange(train_x.shape[0]):
outdir = os.path.join(self.outimgdir, 'train', str(train_y[i]-1))
if not os.path.exists(outdir):
os.mkdir(outdir)
if not self.skipimg:
transform_and_save(img_arr=train_x[i], output_filename=os.path.join(outdir, str(idx[train_y[i]-1]) + '.jpg'))
self.trainpairlist[os.path.join('images', 'train', str(train_y[i]-1), str(idx[train_y[i]-1]) + '.jpg')] = \
os.path.join('labels', str(train_y[i] - 1) + '.txt')
idx[train_y[i]-1] += 1
idx = np.zeros(10, dtype=np.int)
for i in xrange(test_x.shape[0]):
outdir = os.path.join(self.outimgdir, 'val', str(test_y[i]-1))
if not os.path.exists(outdir):
os.mkdir(outdir)
if not self.skipimg:
transform_and_save(img_arr=test_x[i],
output_filename=os.path.join(outdir, str(idx[test_y[i]-1]) + '.jpg'))
self.valpairlist[os.path.join('images', 'val', str(test_y[i]-1), str(idx[test_y[i]-1]) + '.jpg')] = \
os.path.join('labels', str(test_y[i] - 1) + '.txt')
idx[test_y[i]-1] += 1
print 'Finished Collect Data...'
def write_label(self, ):
for i, l in enumerate(self.labels):
sdir = os.path.join(self.outlabeldir, str(i) + '.txt')
np.savetxt(sdir, [l], '%d')
def run(self):
"""
resize images then write manifest files to disk.
"""
self.write_label()
self.collectdata()
records = [(fname, tgt)
for fname, tgt in self.trainpairlist.items()]
np.savetxt(self.manifests['train'], records, fmt='%s,%s')
records = [(fname, tgt)
for fname, tgt in self.valpairlist.items()]
np.savetxt(self.manifests['val'], records, fmt='%s,%s')
class IngestUnlabeled(object):
def __init__(self, input_dir, out_dir, target_size=96, skipimg=False):
np.random.seed(0)
self.skipimg = skipimg
self.out_dir = out_dir
self.input_dir = input_dir
self.manifests = dict()
self.manifests = os.path.join(self.out_dir, 'unlabeled-index.csv')
self.target_size = target_size
self.trainpairlist = {}
if not os.path.exists(self.out_dir):
os.mkdir(self.out_dir)
self.outimgdir = os.path.join(self.out_dir, 'images')
if not os.path.exists(self.outimgdir):
os.mkdir(self.outimgdir)
self.unlabeldir = os.path.join(self.outimgdir, 'unlabeled')
if not os.path.exists(self.unlabeldir):
os.mkdir(self.unlabeldir)
def collectdata(self,):
print 'Start Collect Data...'
train_x_path = os.path.join(self.input_dir, 'unlabeled_X.bin')
train_xf = open(train_x_path, 'rb')
train_x = np.fromfile(train_xf, dtype=np.uint8)
train_x = np.reshape(train_x, (-1, 3, 96, 96))
train_x = np.transpose(train_x, (0, 3, 2, 1))
idx = 0
for i in xrange(train_x.shape[0]):
if not self.skipimg:
transform_and_save(img_arr=train_x[i], output_filename=os.path.join(self.unlabeldir, str(idx) + '.jpg'))
self.trainpairlist[os.path.join('images', 'unlabeled', str(idx) + '.jpg')] = 'labels/11.txt'
idx += 1
print 'Finished Collect Data...'
def write_label(self, ):
sdir = os.path.join(self.out_dir, 'labels', '11.txt')
np.savetxt(sdir, [11], '%d')
def run(self):
"""
resize images then write manifest files to disk.
"""
self.write_label()
self.collectdata()
records = [(fname, tgt)
for fname, tgt in self.trainpairlist.items()]
np.savetxt(self.manifests, records, fmt='%s,%s')
if __name__ == "__main__":
parser = ArgParser()
parser.add_argument('--input_dir', help='Directory to find input',
default='/hdd/Dataset/STL10')
parser.add_argument('--out_dir', help='Directory to write ingested files',
default='/home/william/PyProjects/TFcodes/dataset/stl10')
parser.add_argument('--target_size', type=int, default=96,
help='Size in pixels to scale shortest side DOWN to (0 means no scaling)')
parser.add_argument('--skipImg', type=bool, default=False,
help='True to skip processing and copying images')
args = parser.parse_args()
logger = logging.getLogger(__name__)
bw = Ingest(input_dir=args.input_dir, out_dir=args.out_dir, target_size=args.target_size, skipimg=args.skipImg)
# bw = IngestUnlabeled(input_dir=args.input_dir, out_dir=args.out_dir, target_size=args.target_size, skipimg=args.skipImg)
bw.run()
| 38.810811 | 126 | 0.59429 | 5,880 | 0.818942 | 0 | 0 | 0 | 0 | 0 | 0 | 1,147 | 0.159749 |
87f4e0add218b91c8358380aec15e53a0b7ec2cc | 615 | py | Python | working_example/python/hello_serverless/lambda/create.py | darko-mesaros/workshop-serverless-with-cdk | bbfd30de43d01251565c019a8ac259706bd6f1d0 | [
"MIT"
] | 33 | 2020-08-12T08:08:08.000Z | 2022-03-20T20:32:18.000Z | working_example/python/hello_serverless/lambda/create.py | darko-mesaros/workshop-serverless-with-cdk | bbfd30de43d01251565c019a8ac259706bd6f1d0 | [
"MIT"
] | 2 | 2020-08-12T09:54:53.000Z | 2020-08-12T13:37:22.000Z | working_example/python/hello_serverless/lambda/create.py | darko-mesaros/workshop-serverless-with-cdk | bbfd30de43d01251565c019a8ac259706bd6f1d0 | [
"MIT"
] | 17 | 2020-08-12T08:09:46.000Z | 2021-07-18T19:52:50.000Z | import os
import json
import boto3
def handler(event, context):
table = os.environ.get('table')
dynamodb = boto3.client('dynamodb')
item = {
"name":{'S':event["queryStringParameters"]["name"]},
"location":{'S':event["queryStringParameters"]["location"]},
"age":{'S':event["queryStringParameters"]["age"]}
}
response = dynamodb.put_item(TableName=table,
Item=item
)
message = 'Status of the write to DynamoDB {}!'.format(response)
return {
"statusCode": 200,
"body": json.dumps(message)
}
| 24.6 | 72 | 0.564228 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 192 | 0.312195 |
87f74a05f4408addae7f347f4c814a7bd1356155 | 13,528 | py | Python | pypeit/spectrographs/gemini_flamingos.py | ykwang1/PypeIt | a96cff699f1284905ce7ef19d06a9027cd333c63 | [
"BSD-3-Clause"
] | null | null | null | pypeit/spectrographs/gemini_flamingos.py | ykwang1/PypeIt | a96cff699f1284905ce7ef19d06a9027cd333c63 | [
"BSD-3-Clause"
] | null | null | null | pypeit/spectrographs/gemini_flamingos.py | ykwang1/PypeIt | a96cff699f1284905ce7ef19d06a9027cd333c63 | [
"BSD-3-Clause"
] | null | null | null | """
Module for Gemini FLAMINGOS.
.. include:: ../include/links.rst
"""
import os
from pkg_resources import resource_filename
from IPython import embed
import numpy as np
from pypeit import msgs
from pypeit import telescopes
from pypeit.core import framematch
from pypeit.images import detector_container
from pypeit.spectrographs import spectrograph
class GeminiFLAMINGOSSpectrograph(spectrograph.Spectrograph):
"""
Base class for the Gemini FLAMINGOS spectrograph.
"""
ndet = 1
telescope = telescopes.GeminiSTelescopePar()
def init_meta(self):
"""
Define how metadata are derived from the spectrograph files.
That is, this associates the ``PypeIt``-specific metadata keywords
with the instrument-specific header cards using :attr:`meta`.
"""
self.meta = {}
# Required (core)
self.meta['ra'] = dict(ext=0, card='RA')
self.meta['dec'] = dict(ext=0, card='DEC')
self.meta['target'] = dict(ext=0, card='OBJECT')
self.meta['decker'] = dict(ext=0, card='MASKNAME')
self.meta['dichroic'] = dict(ext=0, card='FILTER')
self.meta['binning'] = dict(ext=0, card=None, default='1,1')
self.meta['mjd'] = dict(ext=0, card='MJD-OBS')
self.meta['exptime'] = dict(ext=0, card='EXPTIME')
self.meta['airmass'] = dict(ext=0, card='AIRMASS')
# Extras for config and frametyping
self.meta['dispname'] = dict(ext=0, card='GRISM')
self.meta['idname'] = dict(ext=0, card='OBSTYPE')
class GeminiFLAMINGOS2Spectrograph(GeminiFLAMINGOSSpectrograph):
"""
Gemini/Flamingos2 Echelle spectrograph methods.
"""
name = 'gemini_flamingos2'
camera = 'FLAMINGOS'
supported = True
comment = 'Flamingos-2 NIR spectrograph'
def get_detector_par(self, hdu, det):
"""
Return metadata for the selected detector.
Args:
hdu (`astropy.io.fits.HDUList`_):
The open fits file with the raw image of interest.
det (:obj:`int`):
1-indexed detector number.
Returns:
:class:`~pypeit.images.detector_container.DetectorContainer`:
Object with the detector metadata.
"""
# Detector 1
detector_dict = dict(
binning = '1,1',
det = 1,
dataext = 1,
specaxis = 0,
specflip = True,
spatflip = False,
platescale = 0.1787,
darkcurr = 0.5,
saturation = 700000., #155400.,
nonlinear = 1.0,
mincounts = -1e10,
numamplifiers = 1,
gain = np.atleast_1d(4.44),
ronoise = np.atleast_1d(5.0), #8 CDS read
datasec = np.atleast_1d('[:,:]'),
oscansec = np.atleast_1d('[:,:]'),
)
return detector_container.DetectorContainer(**detector_dict)
@classmethod
def default_pypeit_par(cls):
"""
Return the default parameters to use for this instrument.
Returns:
:class:`~pypeit.par.pypeitpar.PypeItPar`: Parameters required by
all of ``PypeIt`` methods.
"""
par = super().default_pypeit_par()
# Image processing steps
turn_off = dict(use_illumflat=False, use_biasimage=False, use_overscan=False,
use_darkimage=False)
par.reset_all_processimages_par(**turn_off)
# Wavelengths
# 1D wavelength solution with arc lines
par['calibrations']['wavelengths']['rms_threshold'] = 0.5
par['calibrations']['wavelengths']['sigdetect']=5
par['calibrations']['wavelengths']['fwhm'] = 5
par['calibrations']['wavelengths']['n_first']=2
par['calibrations']['wavelengths']['n_final']=4
par['calibrations']['wavelengths']['lamps'] = ['OH_NIRES']
par['calibrations']['wavelengths']['match_toler']=5.0
# Set slits and tilts parameters
par['calibrations']['tilts']['tracethresh'] = 5
par['calibrations']['tilts']['spat_order'] = 4
par['calibrations']['slitedges']['trace_thresh'] = 10.
par['calibrations']['slitedges']['edge_thresh'] = 200.
par['calibrations']['slitedges']['fit_min_spec_length'] = 0.4
par['calibrations']['slitedges']['sync_predict'] = 'nearest'
# Set the default exposure time ranges for the frame typing
par['calibrations']['standardframe']['exprng'] = [None, 30]
par['calibrations']['tiltframe']['exprng'] = [50, None]
par['calibrations']['arcframe']['exprng'] = [50, None]
par['calibrations']['darkframe']['exprng'] = [20, None]
par['scienceframe']['exprng'] = [20, None]
# Scienceimage parameters
par['reduce']['findobj']['sig_thresh'] = 5.0
par['reduce']['skysub']['sky_sigrej'] = 5.0
par['reduce']['findobj']['find_trim_edge'] = [10,10]
# Do not correct for flexure
par['flexure']['spec_method'] = 'skip'
# Sensitivity function parameters
par['sensfunc']['algorithm'] = 'IR'
par['sensfunc']['polyorder'] = 8
# TODO: replace the telluric grid file for Gemini-S site.
par['sensfunc']['IR']['telgridfile'] \
= os.path.join(par['sensfunc']['IR'].default_root,
'TelFit_LasCampanas_3100_26100_R20000.fits')
return par
def config_specific_par(self, scifile, inp_par=None):
"""
Modify the ``PypeIt`` parameters to hard-wired values used for
specific instrument configurations.
Args:
scifile (:obj:`str`):
File to use when determining the configuration and how
to adjust the input parameters.
inp_par (:class:`~pypeit.par.parset.ParSet`, optional):
Parameter set used for the full run of PypeIt. If None,
use :func:`default_pypeit_par`.
Returns:
:class:`~pypeit.par.parset.ParSet`: The PypeIt parameter set
adjusted for configuration specific parameter values.
"""
par = super().config_specific_par(scifile, inp_par=inp_par)
# TODO: Should we allow the user to override these?
if self.get_meta_value(scifile, 'dispname') == 'JH_G5801':
par['calibrations']['wavelengths']['method'] = 'full_template'
par['calibrations']['wavelengths']['reid_arxiv'] = 'Flamingos2_JH_JH.fits'
elif self.get_meta_value(scifile, 'dispname') == 'HK_G5802':
par['calibrations']['wavelengths']['method'] = 'full_template'
par['calibrations']['wavelengths']['reid_arxiv'] = 'Flamingos2_HK_HK.fits'
return par
def check_frame_type(self, ftype, fitstbl, exprng=None):
"""
Check for frames of the provided type.
Args:
ftype (:obj:`str`):
Type of frame to check. Must be a valid frame type; see
frame-type :ref:`frame_type_defs`.
fitstbl (`astropy.table.Table`_):
The table with the metadata for one or more frames to check.
exprng (:obj:`list`, optional):
Range in the allowed exposure time for a frame of type
``ftype``. See
:func:`pypeit.core.framematch.check_frame_exptime`.
Returns:
`numpy.ndarray`_: Boolean array with the flags selecting the
exposures in ``fitstbl`` that are ``ftype`` type frames.
"""
good_exp = framematch.check_frame_exptime(fitstbl['exptime'], exprng)
if ftype in ['pinhole', 'bias']:
# No pinhole or bias frames
return np.zeros(len(fitstbl), dtype=bool)
if ftype in ['pixelflat', 'trace']:
return good_exp & (fitstbl['idname'] == 'FLAT')
if ftype == 'standard':
return good_exp & (fitstbl['idname'] == 'OBJECT')
if ftype == 'science':
return good_exp & (fitstbl['idname'] == 'OBJECT')
if ftype in ['arc', 'tilt']:
return good_exp & (fitstbl['idname'] == 'OBJECT')
msgs.warn('Cannot determine if frames are of type {0}.'.format(ftype))
return np.zeros(len(fitstbl), dtype=bool)
class GeminiFLAMINGOS1Spectrograph(GeminiFLAMINGOSSpectrograph):
"""
Gemini/Flamingos1 Echelle spectrograph methods.
.. todo::
This is a placeholder class that is not yet supported.
"""
name = 'gemini_flamingos1'
camera = 'FLAMINGOS'
def get_detector_par(self, hdu, det):
"""
Return metadata for the selected detector.
Args:
hdu (`astropy.io.fits.HDUList`_):
The open fits file with the raw image of interest.
det (:obj:`int`):
1-indexed detector number.
Returns:
:class:`~pypeit.images.detector_container.DetectorContainer`:
Object with the detector metadata.
"""
# Detector 1
detector_dict = dict(
binning='1,1',
det = 1,
dataext = 1,
specaxis = 0,
specflip = False,
spatflip = False,
platescale = 0.15,
darkcurr = 0.01,
saturation = 320000., #155400.,
nonlinear = 0.875,
mincounts = -1e10,
numamplifiers = 1,
gain = np.atleast_1d(3.8),
ronoise = np.atleast_1d(6.0), # SUTR readout
datasec= np.atleast_1d('[5:2044, 900:1250]'),
oscansec= np.atleast_1d('[:5, 900:1250]'),
)
return detector_container.DetectorContainer(**detector_dict)
@classmethod
def default_pypeit_par(cls):
"""
Return the default parameters to use for this instrument.
Returns:
:class:`~pypeit.par.pypeitpar.PypeItPar`: Parameters required by
all of ``PypeIt`` methods.
"""
par = super().default_pypeit_par()
# Image processing steps
turn_off = dict(use_illumflat=False, use_biasimage=False, use_overscan=False,
use_darkimage=False)
par.reset_all_processimages_par(**turn_off)
# Wavelengths
# 1D wavelength solution with arc lines
par['calibrations']['wavelengths']['rms_threshold'] = 1.0
par['calibrations']['wavelengths']['sigdetect']=3
par['calibrations']['wavelengths']['fwhm'] = 20
par['calibrations']['wavelengths']['n_first']=2
par['calibrations']['wavelengths']['n_final']=4
par['calibrations']['wavelengths']['lamps'] = ['ArI', 'ArII', 'ThAr', 'NeI']
par['calibrations']['wavelengths']['method'] = 'full_template'
par['calibrations']['wavelengths']['reid_arxiv'] = 'magellan_fire_long.fits'
par['calibrations']['wavelengths']['match_toler']=5.0
# Set slits and tilts parameters
par['calibrations']['tilts']['tracethresh'] = 5
par['calibrations']['slitedges']['trace_thresh'] = 5.
par['calibrations']['slitedges']['sync_predict'] = 'nearest'
# Scienceimage parameters
par['reduce']['findobj']['sig_thresh'] = 5.0
# TODO: I think this parameter was removed
par['reduce']['findobj']['find_trim_edge'] = [50,50]
# Do not correct for flexure
par['flexure']['spec_method'] = 'skip'
# Set the default exposure time ranges for the frame typing
par['calibrations']['standardframe']['exprng'] = [None, 60]
par['calibrations']['arcframe']['exprng'] = [1, 50]
par['calibrations']['darkframe']['exprng'] = [20, None]
par['scienceframe']['exprng'] = [20, None]
return par
def check_frame_type(self, ftype, fitstbl, exprng=None):
"""
Check for frames of the provided type.
Args:
ftype (:obj:`str`):
Type of frame to check. Must be a valid frame type; see
frame-type :ref:`frame_type_defs`.
fitstbl (`astropy.table.Table`_):
The table with the metadata for one or more frames to check.
exprng (:obj:`list`, optional):
Range in the allowed exposure time for a frame of type
``ftype``. See
:func:`pypeit.core.framematch.check_frame_exptime`.
Returns:
`numpy.ndarray`_: Boolean array with the flags selecting the
exposures in ``fitstbl`` that are ``ftype`` type frames.
"""
good_exp = framematch.check_frame_exptime(fitstbl['exptime'], exprng)
if ftype in ['pinhole', 'bias']:
# No pinhole or bias frames
return np.zeros(len(fitstbl), dtype=bool)
if ftype in ['pixelflat', 'trace']:
return good_exp & (fitstbl['idname'] == 'PixFlat')
if ftype == 'standard':
return good_exp & (fitstbl['idname'] == 'Telluric')
if ftype == 'science':
return good_exp & (fitstbl['idname'] == 'Science')
if ftype in ['arc', 'tilt']:
return good_exp & (fitstbl['idname'] == 'Arc')
msgs.warn('Cannot determine if frames are of type {0}.'.format(ftype))
return np.zeros(len(fitstbl), dtype=bool)
| 39.325581 | 86 | 0.572664 | 13,164 | 0.973093 | 0 | 0 | 4,562 | 0.337226 | 0 | 0 | 7,311 | 0.540435 |
87f770b1768d8e4d055b07b5c891b13729c6f2ab | 2,266 | py | Python | tests/test_scene.py | Lxinyuelxy/multi-label-learn | ab347e9c9ccac1503f22c7b76e0b3e9a4e8214da | [
"MIT"
] | 4 | 2018-11-19T13:34:53.000Z | 2020-01-11T11:58:13.000Z | tests/test_scene.py | Lxinyuelxy/multi-label-learn | ab347e9c9ccac1503f22c7b76e0b3e9a4e8214da | [
"MIT"
] | null | null | null | tests/test_scene.py | Lxinyuelxy/multi-label-learn | ab347e9c9ccac1503f22c7b76e0b3e9a4e8214da | [
"MIT"
] | 3 | 2019-04-14T18:13:33.000Z | 2021-04-05T14:45:56.000Z | import numpy as np
from mllearn.problem_transform import BinaryRelevance
from mllearn.problem_transform import CalibratedLabelRanking
from mllearn.problem_transform import ClassifierChain
from mllearn.problem_transform import RandomKLabelsets
from mllearn.alg_adapt import MLKNN
from mllearn.alg_adapt import MLDecisionTree
from mllearn.metrics import hamming_loss
from mllearn.metrics import subset_acc
import arff
train_data = arff.load(open('datasets/scene-train.arff'))
train_data = np.array(train_data['data'], dtype=float)
X_train = train_data[:, :-6]
y_train = train_data[:, -6:]
test_data = arff.load(open('datasets/scene-test.arff'))
test_data = np.array(test_data['data'], dtype=float)
X_test = test_data[:, :-6]
y_test = test_data[:, -6:]
classif = BinaryRelevance()
classif.fit(X_train, y_train)
predictions = classif.predict(X_test)
print('The hamming_loss of BinaryRelevance is %f' % hamming_loss(y_test, predictions))
print('The subset_acc of BinaryRelevance is %f' % subset_acc(y_test, predictions))
classif = ClassifierChain()
classif.fit(X_train, y_train)
predictions = classif.predict(X_test)
print('The hamming_loss of Classifier Chain is %f' % hamming_loss(y_test, predictions))
print('The subset_acc of Classifier Chain is %f' % subset_acc(y_test, predictions))
classif = CalibratedLabelRanking()
classif.fit(X_train, y_train)
predictions = classif.predict(X_test)
print('The hamming_loss of CalibratedLabelRanking is %f' % hamming_loss(y_test, predictions))
print('The subset_acc of CalibratedLabelRanking is %f' % subset_acc(y_test, predictions))
classif = RandomKLabelsets()
classif.fit(X_train, y_train)
predictions = classif.predict(X_test)
print('The hamming_loss of Random K-Labelsets is %f' % hamming_loss(y_test, predictions))
print('The subset_acc of Random K-Labelsets is %f' % subset_acc(y_test, predictions))
classif = MLKNN()
classif.fit(X_train, y_train)
predictions = classif.predict(X_test)
print('The hamming_loss of MLKNN is %f' % hamming_loss(y_test, predictions))
print('The subset_acc of MLKNN is %f' % subset_acc(y_test, predictions))
# classif = MLDecisionTree()
# classif.fit(X_train, y_train)
# predictions = classif.predict(X_test)
# print('The MLDecisionTree Result is %f' % hamming_loss(y_test, predictions)) | 41.962963 | 93 | 0.789497 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 663 | 0.292586 |
87f779c5a2c7a99fdb8f61360480f8c96b502a7e | 8,713 | py | Python | scripts/Steamwatcher.py | nicovanbentum/Utility-Scripts | bf2934058388138ea21ec368b48f9e1ad6864704 | [
"MIT"
] | null | null | null | scripts/Steamwatcher.py | nicovanbentum/Utility-Scripts | bf2934058388138ea21ec368b48f9e1ad6864704 | [
"MIT"
] | 1 | 2021-10-12T23:07:00.000Z | 2021-10-12T23:07:00.000Z | scripts/Steamwatcher.py | nicovanbentum/Utility-Scripts | bf2934058388138ea21ec368b48f9e1ad6864704 | [
"MIT"
] | null | null | null | """ This Python modules describes an application that checks for active steam downloads
and shuts down the computer when they are all finished. """
import os
import signal
import threading
import subprocess
import winreg as reg
import tkinter as tk
import tkinter.filedialog
import tkinter.messagebox
import tkinter.scrolledtext
import pyparsing as pp
class Watcher:
def __init__(self):
self.directories = set()
self.loaded_steam = False
def ask(self):
folder = tk.filedialog.askdirectory()
self.directories.add(folder)
def load_steam_folders(self):
if self.loaded_steam:
return
hkey = reg.OpenKey(reg.HKEY_CURRENT_USER, "Software\\Valve\\Steam")
steam_path = reg.QueryValueEx(hkey, "SteamPath")[0]
# Read the steam vdf file that contains path strings to all
# game install directories.
try:
file = open(steam_path + "/steamapps/LibraryFolders.vdf").read()
except OSError:
print("Unable to open {}.".format(
steam_path + "/steamapps/LibraryFolders.vdf"))
# parse Valve's weird cfg format (its like a shitty version of JSON)
# forward declare the value of a key
value = pp.Forward()
# expression for our dict structure that looks like: ["key1", value]
key_value = pp.Group(pp.QuotedString('"') + value)
# create a parse structure for value so value looks like: c
expression = pp.Suppress(
'{') + pp.Dict(pp.ZeroOrMore(key_value)) + pp.Suppress('}')
# set our value to be a quoted string follow by the structure we defined,
# looks like this in Python:
# ["outer_key", { ["inner_key1", value], ["inner_key2", value] } ]
# we can acess the above as either a dict or array.
value <<= pp.QuotedString('"') | expression
parser = pp.Dict(key_value)
content = parser.parseString(file)
for entry in content["libraryfolders"]:
for key, val in entry.items():
if key == "path":
dlpath = val + "\\steamapps\\downloading"
if os.path.isdir(dlpath):
self.directories.add(dlpath)
self.loaded_steam = True
def folder_size(self, start_path='.'):
total_size = 0
for dirpath, _, filenames in os.walk(start_path):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
if not os.path.islink(filepath):
total_size += os.path.getsize(filepath)
return total_size
def folder_sizes(self):
all_sizes = list()
for dirpath in self.directories:
all_sizes.append(self.folder_size(dirpath))
return all_sizes
def is_updating(self, last_sizes: list, new_sizes: list):
for last_size, new_size in zip(last_sizes, new_sizes):
if new_size != last_size:
return True
return False
def clear(self):
self.directories.clear()
self.loaded_steam = False
def get_folders(self):
return self.directories
class Application:
def __init__(self):
self.watcher = Watcher()
self.window = tk.Tk()
self.window.title("Download Watcher")
self.window.geometry('500x360')
button_frame = tk.Frame(self.window)
button_frame.pack(side=tk.BOTTOM)
self.logger = tk.scrolledtext.ScrolledText(
self.window, height=100, width=100, font="Courier 10")
self.logger.pack(side=tk.LEFT)
self.logger.tag_config(
"green", foreground="green", font="courier 10 bold")
self.logger.tag_config(
"orange", foreground="orange", font="courier 10 bold")
self.logger.tag_config("red", foreground="red", font="courier 10 bold")
self.steam_btn = tk.Button(button_frame, text="Add Steam",
command=self.load_steam_folders, padx=2, pady=2)
self.steam_btn.pack(side=tk.LEFT)
self.folder_btn = tk.Button(button_frame, text="Add Folder",
command=self.ask, padx=2, pady=2)
self.folder_btn.pack(side=tk.LEFT)
self.clear_btn = tk.Button(button_frame, text="Clear",
command=self.clear, padx=2, pady=2)
self.clear_btn.pack(side=tk.LEFT)
self.start_btn = tk.Button(button_frame, text="Start",
command=self.watch_thread, padx=2, pady=2)
self.start_btn.pack(side=tk.LEFT)
self.stop_btn = tk.Button(button_frame, text="Stop",
command=self.stop, padx=2, pady=2)
self.stop_btn.pack(side=tk.LEFT)
self.thread = None
self.running = False
self.timer = threading.Event()
self.window.protocol("WM_DELETE_WINDOW", self.on_exit)
signal.signal(signal.SIGINT, self.stop)
def log(self, text: str, color="black"):
self.logger.config(state=tk.NORMAL)
self.logger.insert(tk.END, text, (color))
self.logger.see("end")
self.logger.config(state=tk.DISABLED)
def ask(self):
if self.running:
tkinter.messagebox.showwarning(
"Warning", "Please stop the Watcher.")
return
folders_copy = self.watcher.get_folders().copy()
self.watcher.ask()
for folder in self.watcher.get_folders():
if folder not in folders_copy:
self.log("Added: ", color="green")
self.log(folder + '\n')
def clear(self):
if self.running:
tkinter.messagebox.showwarning(
"Warning", "Please stop the Watcher.")
return
self.watcher.clear()
self.logger.config(state=tk.NORMAL)
self.logger.delete('1.0', tk.END)
self.logger.config(state=tk.DISABLED)
def load_steam_folders(self):
if self.running:
tkinter.messagebox.showwarning(
"Warning", "Please stop the Watcher.")
return
folders_copy = self.watcher.get_folders().copy()
self.watcher.load_steam_folders()
for folder in self.watcher.get_folders():
if folder not in folders_copy:
self.log("Added: ", color="green")
self.log(folder + '\n')
def watch(self):
self.running = True
self.timer.clear()
self.log("Checking for active downloads.. \n")
last_size = self.watcher.folder_sizes() # random initial value
self.timer.wait(timeout=10)
new_size = self.watcher.folder_sizes() # random initial value
if self.timer.is_set():
self.log("Watcher stopped. \n", color="red")
self.running = False
return
if last_size == new_size:
self.log("No active downloads found. \n", color="red")
self.running = False
return
else:
self.log("Updates ")
self.log("found. \n", color="green")
self.log("Updating.. \n")
while self.watcher.is_updating(last_size, new_size):
last_size = self.watcher.folder_sizes()
if not self.timer.is_set():
self.timer.wait(timeout=60)
if self.timer.is_set():
self.log("Watcher stopped. \n", color="red")
self.running = False
return
new_size = self.watcher.folder_sizes()
self.log("Finishing up.. \n")
self.timer.wait(timeout=300)
self.log("Updating finished. \n")
self.log("Shutting down computer. \n")
self.running = False
subprocess.call(["shutdown", "/s"])
def watch_thread(self):
if self.running:
tkinter.messagebox.showwarning(
"Warning", "Please stop the Watcher.")
return
if not self.watcher.get_folders():
self.log("Warning: ", "orange")
self.log("No directories added. \n")
return
self.thread = threading.Thread(target=self.watch)
self.thread.start()
def stop(self):
if not self.running:
return
self.running = False
self.timer.set()
def run(self):
self.window.mainloop()
def on_exit(self):
if self.running:
tkinter.messagebox.showwarning(
"Warning", "Please stop the Watcher.")
else:
self.window.destroy()
def main():
app = Application()
app.run()
if __name__ == "__main__":
main()
| 33.771318 | 87 | 0.580627 | 8,262 | 0.948238 | 0 | 0 | 0 | 0 | 0 | 0 | 1,635 | 0.187651 |
87f916d84fbb3ebf66d1daf924621a5103749784 | 1,200 | py | Python | mywebsite.py | jzorrof/my_website | 3e0d31e5c4d981dd2116c9f7048aa3f111815ff7 | [
"Apache-2.0"
] | null | null | null | mywebsite.py | jzorrof/my_website | 3e0d31e5c4d981dd2116c9f7048aa3f111815ff7 | [
"Apache-2.0"
] | null | null | null | mywebsite.py | jzorrof/my_website | 3e0d31e5c4d981dd2116c9f7048aa3f111815ff7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
__author__ = 'Fanzhong'
from flask import Flask, render_template
from boto.s3.connection import S3Connection
from boto.s3.key import Key
import json
app = Flask(__name__)
'''
This is my website index
I'll create my website from now
data: 2015.04.10
'''
def get_from_s3():
conn = S3Connection('AKIAJZ5NU5RXHVW3QXPA', 'dHE5tDMMk/WwAoyvrd44TaKsJfnNqLSjEUGOmXt5')
bucketname = conn.get_bucket('scrapy_data_2')
print bucketname
k = Key(bucketname)
k.key = 'my_scrapy'
k.get_contents_to_filename('getjson.json')
@app.route("/")
def index():
return render_template('index.html')
@app.route("/qiche")
def qiche():
testdata=[]
try:
with open("getjson.json") as jsf:
for each_line in jsf:
js = json.loads(each_line,encoding='utf-8')
getjson = json.dumps(js, ensure_ascii=False)
print(type(getjson))
except IOError as err:
print('err was' + str(err))
#return render_template('qiche.html' , testdata={'error':'nothingloaded'})
return render_template('qiche.html' , testdata=testdata)
if __name__ == '__main__':
#get_from_s3()
app.run(debug = True) | 27.906977 | 91 | 0.660833 | 0 | 0 | 0 | 0 | 568 | 0.473333 | 0 | 0 | 381 | 0.3175 |
87fa240a88f3abeac45e080706b9c6859a916e21 | 14,483 | py | Python | tests/neptune/new/internal/backends/test_neptune_backend_mock.py | neptune-ml/neptune-client | 7aea63160b5149c3fec40f62d3b0da7381a35748 | [
"Apache-2.0"
] | 13 | 2019-02-11T13:18:38.000Z | 2019-12-26T06:26:07.000Z | tests/neptune/new/internal/backends/test_neptune_backend_mock.py | neptune-ml/neptune-client | 7aea63160b5149c3fec40f62d3b0da7381a35748 | [
"Apache-2.0"
] | 39 | 2019-03-07T13:40:10.000Z | 2020-01-07T17:19:24.000Z | tests/neptune/new/internal/backends/test_neptune_backend_mock.py | neptune-ml/neptune-client | 7aea63160b5149c3fec40f62d3b0da7381a35748 | [
"Apache-2.0"
] | 4 | 2019-02-11T13:07:23.000Z | 2019-11-26T08:20:24.000Z | #
# Copyright (c) 2020, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import unittest
import uuid
from random import randint
from time import time
from neptune.new.exceptions import ContainerUUIDNotFound, MetadataInconsistency
from neptune.new.internal.backends.api_model import (
DatetimeAttribute,
FloatAttribute,
FloatPointValue,
FloatSeriesAttribute,
FloatSeriesValues,
StringAttribute,
StringPointValue,
StringSeriesAttribute,
StringSeriesValues,
StringSetAttribute,
)
from neptune.new.internal.backends.neptune_backend_mock import NeptuneBackendMock
from neptune.new.internal.container_type import ContainerType
from neptune.new.internal.operation import (
AddStrings,
AssignDatetime,
AssignFloat,
AssignString,
LogFloats,
LogStrings,
)
from tests.neptune.random_utils import a_string
class TestNeptuneBackendMock(unittest.TestCase):
# pylint:disable=protected-access
def setUp(self) -> None:
self.backend = NeptuneBackendMock()
project_id = self.backend._project_id
exp = self.backend.create_run(project_id=project_id)
model = self.backend.create_model(
project_id=project_id,
key="MOD",
)
model_version = self.backend.create_model_version(project_id=project_id, model_id=model.id)
self.ids_with_types = [
(self.backend._project_id, ContainerType.PROJECT),
(exp.id, ContainerType.RUN),
(model.id, ContainerType.MODEL),
(model_version.id, ContainerType.MODEL_VERSION),
]
def test_get_float_attribute(self):
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
# given
digit = randint(1, 10**4)
self.backend.execute_operations(
container_id, container_type, operations=[AssignFloat(["x"], digit)]
)
# when
ret = self.backend.get_float_attribute(container_id, container_type, path=["x"])
# then
self.assertEqual(FloatAttribute(digit), ret)
def test_get_string_attribute(self):
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
# given
text = a_string()
self.backend.execute_operations(
container_id, container_type, operations=[AssignString(["x"], text)]
)
# when
ret = self.backend.get_string_attribute(container_id, container_type, path=["x"])
# then
self.assertEqual(StringAttribute(text), ret)
def test_get_datetime_attribute(self):
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
# given
now = datetime.datetime.now()
now = now.replace(microsecond=1000 * int(now.microsecond / 1000))
self.backend.execute_operations(
container_id, container_type, [AssignDatetime(["x"], now)]
)
# when
ret = self.backend.get_datetime_attribute(container_id, container_type, ["x"])
# then
self.assertEqual(DatetimeAttribute(now), ret)
def test_get_float_series_attribute(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id,
container_type,
[
LogFloats(
["x"],
[
LogFloats.ValueType(5, None, time()),
LogFloats.ValueType(3, None, time()),
],
)
],
)
self.backend.execute_operations(
container_id,
container_type,
[
LogFloats(
["x"],
[
LogFloats.ValueType(2, None, time()),
LogFloats.ValueType(9, None, time()),
],
)
],
)
# when
ret = self.backend.get_float_series_attribute(container_id, container_type, ["x"])
# then
self.assertEqual(FloatSeriesAttribute(9), ret)
def test_get_string_series_attribute(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id,
container_type,
[
LogStrings(
["x"],
[
LogStrings.ValueType("adf", None, time()),
LogStrings.ValueType("sdg", None, time()),
],
)
],
)
self.backend.execute_operations(
container_id,
container_type,
[
LogStrings(
["x"],
[
LogStrings.ValueType("dfh", None, time()),
LogStrings.ValueType("qwe", None, time()),
],
)
],
)
# when
ret = self.backend.get_string_series_attribute(container_id, container_type, ["x"])
# then
self.assertEqual(StringSeriesAttribute("qwe"), ret)
def test_get_string_set_attribute(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id, container_type, [AddStrings(["x"], {"abcx", "qwe"})]
)
# when
ret = self.backend.get_string_set_attribute(container_id, container_type, ["x"])
# then
self.assertEqual(StringSetAttribute({"abcx", "qwe"}), ret)
def test_get_string_series_values(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id,
container_type,
[
LogStrings(
["x"],
[
LogStrings.ValueType("adf", None, time()),
LogStrings.ValueType("sdg", None, time()),
],
)
],
)
self.backend.execute_operations(
container_id,
container_type,
[
LogStrings(
["x"],
[
LogStrings.ValueType("dfh", None, time()),
LogStrings.ValueType("qwe", None, time()),
],
)
],
)
# when
ret = self.backend.get_string_series_values(
container_id, container_type, path=["x"], limit=100, offset=0
)
# then
self.assertEqual(
StringSeriesValues(
4,
[
StringPointValue(timestampMillis=42342, step=0, value="adf"),
StringPointValue(timestampMillis=42342, step=1, value="sdg"),
StringPointValue(timestampMillis=42342, step=2, value="dfh"),
StringPointValue(timestampMillis=42342, step=3, value="qwe"),
],
),
ret,
)
def test_get_float_series_values(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id,
container_type,
[
LogFloats(
["x"],
[
LogFloats.ValueType(5, None, time()),
LogFloats.ValueType(3, None, time()),
],
)
],
)
self.backend.execute_operations(
container_id,
container_type,
[
LogFloats(
["x"],
[
LogFloats.ValueType(2, None, time()),
LogFloats.ValueType(9, None, time()),
],
)
],
)
# when
ret = self.backend.get_float_series_values(
container_id, container_type, path=["x"], limit=100, offset=0
)
# then
self.assertEqual(
FloatSeriesValues(
4,
[
FloatPointValue(timestampMillis=42342, step=0, value=5),
FloatPointValue(timestampMillis=42342, step=1, value=3),
FloatPointValue(timestampMillis=42342, step=2, value=2),
FloatPointValue(timestampMillis=42342, step=3, value=9),
],
),
ret,
)
def test_get_float_attribute_wrong_type(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id, container_type, [AssignString(["x"], "abc")]
)
# then
with self.assertRaises(MetadataInconsistency):
self.backend.get_float_series_attribute(container_id, container_type, ["x"])
def test_get_string_attribute_wrong_type(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id, container_type, [AssignFloat(["x"], 5)]
)
# then
with self.assertRaises(MetadataInconsistency):
self.backend.get_string_attribute(container_id, container_type, ["x"])
def test_get_datetime_attribute_wrong_type(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id, container_type, [AssignString(["x"], "abc")]
)
# then
with self.assertRaises(MetadataInconsistency):
self.backend.get_datetime_attribute(container_id, container_type, ["x"])
def test_get_string_series_attribute_wrong_type(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id, container_type, [AssignString(["x"], "abc")]
)
# then
with self.assertRaises(MetadataInconsistency):
self.backend.get_string_series_attribute(container_id, container_type, ["x"])
def test_get_string_set_attribute_wrong_type(self):
# given
for container_id, container_type in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id, container_type, [AssignString(["x"], "abc")]
)
# then
with self.assertRaises(MetadataInconsistency):
self.backend.get_string_set_attribute(container_id, container_type, ["x"])
def test_container_not_found(self):
# given
for (container_id, container_type) in self.ids_with_types:
with self.subTest(f"For containerType: {container_type}"):
self.backend.execute_operations(
container_id, container_type, [AssignString(["x"], "abc")]
)
# then
with self.assertRaises(ContainerUUIDNotFound):
self.backend.get_float_series_attribute(
str(uuid.uuid4()), container_type, ["x"]
)
| 38.724599 | 99 | 0.501623 | 13,082 | 0.903266 | 0 | 0 | 0 | 0 | 0 | 0 | 1,589 | 0.109715 |
87fb11d641ea2166f5e03d1848db017c1904d410 | 1,725 | py | Python | rasp/device/regressor_device.py | CreeperLin/RASP | c45f9ad57b18d92b663419538562d511149c329f | [
"MIT"
] | 1 | 2021-07-20T14:56:58.000Z | 2021-07-20T14:56:58.000Z | rasp/device/regressor_device.py | CreeperLin/RASP | c45f9ad57b18d92b663419538562d511149c329f | [
"MIT"
] | null | null | null | rasp/device/regressor_device.py | CreeperLin/RASP | c45f9ad57b18d92b663419538562d511149c329f | [
"MIT"
] | 2 | 2020-04-21T06:49:04.000Z | 2021-07-20T14:57:06.000Z | from ..utils.reporter import report
class RegressorDevice():
tape = []
regressor = None
@staticmethod
def init(regressor):
RegressorDevice.tape = []
RegressorDevice.regressor = regressor
@staticmethod
def reset():
RegressorDevice.tape = []
@staticmethod
def add_node(node):
RegressorDevice.tape.append(node)
@staticmethod
def run_node(node):
pass
@staticmethod
def run(df=None, X_col=None, y_col=None, X_filter=None, y_filter=None):
X_col = RegressorDevice.regressor.var_observe if X_col is None else X_col
y_col = RegressorDevice.regressor.var_target if y_col is None else y_col
fields = list(set(X_col + y_col))
df = report(RegressorDevice.tape,
include_root=False,
report_fields=fields) if df is None else df
X = RegressorDevice.regressor.filter(df, X_col, row_filter=X_filter)
y = RegressorDevice.regressor.filter(df, y_col, row_filter=y_filter)
X = X.reshape(y.shape[0], -1)
RegressorDevice.regressor.test(X, y)
return RegressorDevice.regressor.predict(X)
init = RegressorDevice.init
reset = RegressorDevice.reset
add_node = RegressorDevice.add_node
run = RegressorDevice.run
def get_tape():
return RegressorDevice.tape
def get_data(X_col, y_col, fields):
X_col = RegressorDevice.regressor.var_observe if X_col is None else X_col
y_col = RegressorDevice.regressor.var_target if y_col is None else y_col
fields = list(set(X_col + y_col)) if fields is None else fields
return report(RegressorDevice.tape,
include_root=False,
report_fields=fields)
| 27.380952 | 81 | 0.672464 | 1,132 | 0.656232 | 0 | 0 | 1,043 | 0.604638 | 0 | 0 | 0 | 0 |
87fbec4be50353beb91a6993ffce8b10edc30d98 | 679 | py | Python | cctbx_website/run_tests.py | dwpaley/cctbx_project | 476428363d260126319e82aa24d1e79b6474d877 | [
"BSD-3-Clause-LBNL"
] | null | null | null | cctbx_website/run_tests.py | dwpaley/cctbx_project | 476428363d260126319e82aa24d1e79b6474d877 | [
"BSD-3-Clause-LBNL"
] | null | null | null | cctbx_website/run_tests.py | dwpaley/cctbx_project | 476428363d260126319e82aa24d1e79b6474d877 | [
"BSD-3-Clause-LBNL"
] | null | null | null | from __future__ import absolute_import, division, print_function
from libtbx import test_utils
import libtbx.load_env
#tst_list = [
# "$D/regression/tst_py_from_html.py"
# ]
tst_list = [
"$D/regression/tst_1_template.py",
"$D/regression/tst_2_doc_high_level_objects.py",
"$D/regression/tst_3_doc_model_manager.py",
"$D/regression/tst_4_doc_data_manager.py",
"$D/regression/tst_5_doc_map_manager.py",
"$D/regression/tst_6_doc_model_map_manager.py",
]
def run():
build_dir = libtbx.env.under_build("cctbx_website")
dist_dir = libtbx.env.dist_path("cctbx_website")
test_utils.run_tests(build_dir, dist_dir, tst_list)
if (__name__ == "__main__"):
run()
| 25.148148 | 64 | 0.755523 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 344 | 0.506627 |
87fd884af907e9e970ccc13cfcca8085d841d1bd | 1,520 | py | Python | python/tlbm/wavy_channel/wavy_channel_generator.py | stu314159/HPC_Introduction_with_LBM | cbba81460513166b4814f3028807020be9b5c234 | [
"MIT"
] | null | null | null | python/tlbm/wavy_channel/wavy_channel_generator.py | stu314159/HPC_Introduction_with_LBM | cbba81460513166b4814f3028807020be9b5c234 | [
"MIT"
] | null | null | null | python/tlbm/wavy_channel/wavy_channel_generator.py | stu314159/HPC_Introduction_with_LBM | cbba81460513166b4814f3028807020be9b5c234 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 7 08:53:18 2021
@author: sblair
"""
import numpy as np
import scipy.integrate as integrate
from scipy.optimize import fsolve
import matplotlib.pyplot as plt
L_hx = 30; # cm, length of the heat exchanger
nX = 100; # number of points in the x-direction
n_period = 4;
A_lam_ratio = 0.3; # ratio between amplitude and wavelength
def get_B(A):
return A_lam_ratio*(2*np.pi)/A;
def wave_form_p(x,A):
return (A/2)*np.sin(get_B(A)*x);
def d_wave_form_p(x,A):
return get_B(A)*(A/2)*np.cos(get_B(A)*x);
def get_X_max(A):
return n_period*2.*np.pi/get_B(A);
def chord_length_error(A):
result = integrate.quad(lambda x: np.sqrt(1.+d_wave_form_p(x,A))**2,
0,get_X_max(A));
chord_length = result[0];
return chord_length - L_hx;
A = fsolve(chord_length_error,0.1);
print(f'{"Amplitude = %g cm"}'%A);
def wave_form(x):
return wave_form_p(x,A);
# def d_wave_form(x):
# return d_wave_form_p(x,A);
# def phi(x):
# return np.arctan(d_wave_form(x));
# offset = 0.5;
# def offset_x(x):
# return offset*(-np.sin(phi(x)));
# def offset_y(x):
# return offset*(np.cos(phi(x)));
print(f'{"A: %12.8f"}'%A);
print(f'{"B: %12.8f"}'%get_B(A));
print(f'{"x_max: %12.8f "}'%get_X_max(A));
xMin = 0;
xMax = get_X_max(A);
X = np.linspace(xMin,xMax,nX);
fig = plt.figure()
ax = fig.add_subplot(111)
plt.plot(X,wave_form(X))
plt.grid()
ax.set_aspect('equal',adjustable='box');
plt.show() | 19.74026 | 72 | 0.6375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 536 | 0.352632 |
87fed2a33ced4e90675329ce32e82137ec7dfcce | 3,355 | py | Python | tests/client/test_decoders.py | timgates42/apistar | 2edeb6946af3562c6310fcd6db393d3960152f32 | [
"BSD-3-Clause"
] | 4,284 | 2017-08-16T15:10:03.000Z | 2022-03-23T23:16:35.000Z | tests/client/test_decoders.py | timgates42/apistar | 2edeb6946af3562c6310fcd6db393d3960152f32 | [
"BSD-3-Clause"
] | 405 | 2017-08-16T20:45:55.000Z | 2022-02-24T05:03:01.000Z | tests/client/test_decoders.py | timgates42/apistar | 2edeb6946af3562c6310fcd6db393d3960152f32 | [
"BSD-3-Clause"
] | 452 | 2017-08-17T04:44:30.000Z | 2022-03-15T16:50:22.000Z | import os
from starlette.applications import Starlette
from starlette.responses import PlainTextResponse, Response
from starlette.testclient import TestClient
from apistar.client import Client, decoders
app = Starlette()
@app.route("/text-response/")
def text_response(request):
return PlainTextResponse("hello, world")
@app.route("/file-response/")
def file_response(request):
headers = {
"Content-Type": "image/png",
"Content-Disposition": 'attachment; filename="filename.png"',
}
return Response(b"<somedata>", headers=headers)
@app.route("/file-response-url-filename/name.png")
def file_response_url_filename(request):
headers = {"Content-Type": "image/png", "Content-Disposition": "attachment"}
return Response(b"<somedata>", headers=headers)
@app.route("/file-response-no-extension/name")
def file_response_no_extension(request):
headers = {"Content-Type": "image/png", "Content-Disposition": "attachment"}
return Response(b"<somedata>", headers=headers)
@app.route("/")
def file_response_no_name(request):
headers = {"Content-Type": "image/png", "Content-Disposition": "attachment"}
return Response(b"<somedata>", headers=headers)
schema = {
"openapi": "3.0.0",
"info": {"title": "Test API", "version": "1.0"},
"servers": [{"url": "http://testserver"}],
"paths": {
"/text-response/": {"get": {"operationId": "text-response"}},
"/file-response/": {"get": {"operationId": "file-response"}},
"/file-response-url-filename/name.png": {
"get": {"operationId": "file-response-url-filename"}
},
"/file-response-no-extension/name": {
"get": {"operationId": "file-response-no-extension"}
},
"/": {"get": {"operationId": "file-response-no-name"}},
},
}
def test_text_response():
client = Client(schema, session=TestClient(app))
data = client.request("text-response")
assert data == "hello, world"
def test_file_response():
client = Client(schema, session=TestClient(app))
data = client.request("file-response")
assert os.path.basename(data.name) == "filename.png"
assert data.read() == b"<somedata>"
def test_file_response_url_filename():
client = Client(schema, session=TestClient(app))
data = client.request("file-response-url-filename")
assert os.path.basename(data.name) == "name.png"
assert data.read() == b"<somedata>"
def test_file_response_no_extension():
client = Client(schema, session=TestClient(app))
data = client.request("file-response-no-extension")
assert os.path.basename(data.name) == "name.png"
assert data.read() == b"<somedata>"
def test_file_response_no_name():
client = Client(schema, session=TestClient(app))
data = client.request("file-response-no-name")
assert os.path.basename(data.name) == "download.png"
assert data.read() == b"<somedata>"
def test_unique_filename(tmpdir):
client = Client(
schema, session=TestClient(app), decoders=[decoders.DownloadDecoder(tmpdir)]
)
data = client.request("file-response")
assert os.path.basename(data.name) == "filename.png"
assert data.read() == b"<somedata>"
data = client.request("file-response")
assert os.path.basename(data.name) == "filename (1).png"
assert data.read() == b"<somedata>"
| 31.35514 | 84 | 0.665574 | 0 | 0 | 0 | 0 | 968 | 0.288525 | 0 | 0 | 1,144 | 0.340984 |
e20165267f4b4a5c094fd275f94d49afce2cec55 | 2,834 | py | Python | Src/check_linguistic_info.py | rstodden/ATILF-LLF.v3 | 768c453fb0d28c3e237c1b22522a319c1b157c1f | [
"MIT"
] | null | null | null | Src/check_linguistic_info.py | rstodden/ATILF-LLF.v3 | 768c453fb0d28c3e237c1b22522a319c1b157c1f | [
"MIT"
] | null | null | null | Src/check_linguistic_info.py | rstodden/ATILF-LLF.v3 | 768c453fb0d28c3e237c1b22522a319c1b157c1f | [
"MIT"
] | null | null | null | import os, json
from collections import Counter
# test if exist and mkdir
# ../Results/features/corpus_info/
# ../Results/labels
for col in ["FEATS", "PARSEME:MWE", "UPOS", "XPOS", "DEPREL", "DEPS", "LEMMA"]:
for file_type in ["train.cupt", "dev.cupt", "test.blind.cupt"]:
counter_dict = dict()
count_all = Counter()
for lang_dir in os.listdir("../sharedtask_11/"):
for filename in os.listdir("../sharedtask_11/"+lang_dir):
if filename.endswith(file_type):
lang_counter = Counter()
with open("../sharedtask_11/"+lang_dir+'/'+filename) as f:
content = f.readlines()
for line in content:
if line.startswith('# global.columns = '):
header = line.split('# global.columns = ')[1].strip()
nr_col = header.split(' ').index(col)
print(nr_col)
if not line.startswith("#") and line != '\n':
col_value = line.strip().split('\t')[nr_col]
if col_value == "_" or col_value == "*":
lang_counter[col_value] += 1
count_all[col_value] +=1
continue
if col == "FEATS":
splitted_morpho_info = col_value.split("|")
if len(splitted_morpho_info) > 1:
for e in splitted_morpho_info:
if "=" in e:
e = e.split("=")[0]
lang_counter[e] += 1
count_all[e] += 1
else:
if "=" in splitted_morpho_info[0]:
splitted_morpho_info = splitted_morpho_info[0].split("=")[0]
else:
splitted_morpho_info = splitted_morpho_info[0]
lang_counter[splitted_morpho_info] += 1
count_all[splitted_morpho_info] +=1
elif col == "PARSEME:MWE":
splitted_mwe_type = col_value.split(";")
if len(splitted_mwe_type) > 1:
for e in splitted_mwe_type:
if ":" in e:
e = e.split(":")[1]
if e.isdigit():
continue
lang_counter[e] += 1
count_all[e] += 1
else:
if ":" in col_value:
col_value = col_value.split(":")[1]
if col_value.isdigit():
continue
lang_counter[col_value] += 1
count_all[col_value] +=1
else:
lang_counter[col_value] += 1
count_all[col_value] +=1
counter_dict[lang_dir] = lang_counter
if col == "PARSEME:MWE":
with open("../Results/labels/number_"+col+'_'+file_type.split(".")[0]+".json", "w") as f:
json.dump(counter_dict, f)
with open("../Results/labels/number_"+col+'_'+file_type.split(".")[0]+"_all.json", "w") as f:
json.dump(count_all, f)
else:
with open("../Results/features/corpus_info/number_"+col+'_'+file_type.split(".")[0]+".json", "w") as f:
json.dump(counter_dict, f)
with open("../Results/features/corpus_info/number_"+col+'_'+file_type.split(".")[0]+"_all.json", "w") as f:
json.dump(count_all, f)
| 36.805195 | 110 | 0.58151 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 563 | 0.198659 |
e20171e4e7c66b04f9ec33c3a6bff43dd7856257 | 1,403 | py | Python | rex/eutil.py | dnanto/rex | 88ba883765f3cb35fe17a3675dbbca65d0de832f | [
"MIT"
] | null | null | null | rex/eutil.py | dnanto/rex | 88ba883765f3cb35fe17a3675dbbca65d0de832f | [
"MIT"
] | null | null | null | rex/eutil.py | dnanto/rex | 88ba883765f3cb35fe17a3675dbbca65d0de832f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, FileType
from Bio import Entrez
from rex.util import batchify
def parse_args(argv):
parser = ArgumentParser(description="eutil", formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument(
"eutil", default="efetch", help="the E-utility to use"
)
parser.add_argument(
"id", type=FileType(), help="the identifiers"
)
parser.add_argument(
"-db", "--db", "-database", "--database", default="nuccore",
help="the NCBI database"
)
parser.add_argument(
"-params", help="the space separated key=value pairs"
)
parser.add_argument(
"-post-size", "--post-size", type=int, default=200,
help="the number of records to post at a time"
)
parser.add_argument(
"-email", "--email", default="",
help="the e-mail to identify yourself to NCBI (for politeness reasons)"
)
args = parser.parse_args(argv)
return args
def main(argv):
args = parse_args(argv[1:])
Entrez.email = args.email
eutil = getattr(Entrez, args.eutil)
params = dict(item.split("=") for item in args.params.split()) if args.params else {}
with args.id as file:
for batch in batchify(map(str.strip, file), size=args.post_size):
with eutil(db=args.db, id=",".join(batch), **params) as handle:
sys.stdout.write(handle.read())
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| 25.053571 | 92 | 0.707056 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 362 | 0.258019 |
e2030732a198280850be22b59cadf31448549da7 | 3,594 | py | Python | src/relevancy_measures/calc_NCDG.py | dannycho7/RTP_Latest | 1cb1dbd72419f9c2c61f20879b56872a9d28a3dc | [
"Apache-2.0"
] | null | null | null | src/relevancy_measures/calc_NCDG.py | dannycho7/RTP_Latest | 1cb1dbd72419f9c2c61f20879b56872a9d28a3dc | [
"Apache-2.0"
] | null | null | null | src/relevancy_measures/calc_NCDG.py | dannycho7/RTP_Latest | 1cb1dbd72419f9c2c61f20879b56872a9d28a3dc | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
import sys
import argparse
import math
parser = argparse.ArgumentParser(description='Post-processing after labeling, please put your rating in a file with '
'the same order as in docs.txt, one rating per line.')
parser.add_argument('--doc', required=True, help='path for doc_id_union.txt')
parser.add_argument('--rating', required=True, help='path for your rating file')
parser.add_argument('--result', required=True, help='list of doc_id.txt, separated by colon')
parser.add_argument('--dict', default='phase1/gen_index/vdrelation.txt', help='path for vdrelation.txt')
parser.add_argument('-n', type=int, default=10, help='number of results')
args = parser.parse_args()
v_id_path = args.doc
rating_path = args.rating
result_paths = args.result.split(',')
v_ids = []
ratings = []
data = []
try:
print('Reading vid-did mapping ...')
# read vid-did map from vdrelation.txt
vd_map = {}
with open(args.dict, 'r') as f:
for line in f:
v_id, doc_id = line.split()
vd_map[v_id] = doc_id
# read document ids
with open(v_id_path, 'r') as f:
for line in f:
v_id = line.split()[0]
v_ids.append(v_id)
# read ratings
with open(rating_path, 'r') as f:
for line in f:
rating = int(line.split()[0])
ratings.append(rating)
rating_map = dict(zip(v_ids, ratings))
# read data from each result file
for path in result_paths:
with open(path, 'r') as f:
local_v_ids = []
for line in f:
v_id = line.split()[0]
local_v_ids.append((v_id, rating_map[v_id]))
data.append((path, local_v_ids))
# generate ideal rating order
unique_ratings = {}
for v_id, rating in rating_map.items():
if vd_map[v_id] in unique_ratings:
current_rating = unique_ratings[vd_map[v_id]]
if rating > current_rating:
unique_ratings[vd_map[v_id]] = rating
else:
unique_ratings[vd_map[v_id]] = rating
rating_order = list(unique_ratings.values())
rating_order.sort(reverse=True)
ideal_rating_order = rating_order[:args.n] + [0] * (args.n - len(rating_order[:args.n])) # fix: fill the empty entries with 0s
# calculate and generate result
for path, rating_list in data:
title = ' @ '.join(path.split('/')[-3:-1])
print('\nResult for', title, '\n')
print('{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}\t{:<8}'.format('Rank','VID','Rating','Relevance','DCG','Ideal Rating','Ideal Relevance','IDCG','NDCG'))
# print('Rank\tRating\tRelevance\tDCG\tIdeal Rating\tIdeal Relevance\tIDCG\tNDCG')
rank = 1
dcg = 0
idcg = 0
filled_rating_list = rating_list + [('Empty', 0)] * (args.n - len(rating_list))
for v_id, rating in filled_rating_list:
relevance = (2 ** rating - 1) / math.log2(1 + rank)
dcg += relevance
ideal_rating = ideal_rating_order[rank - 1]
ideal_relevance = (2 ** ideal_rating - 1) / math.log2(1 + rank)
idcg += ideal_relevance
ndcg = dcg / idcg
print('{:<8}\t{:<8}\t{:<8}\t{:<8.4f}\t{:<8.4f}\t{:<8}\t{:<8.4f}\t{:<8.4f}\t{:<8.4f}'.format(rank, v_id, rating, relevance, dcg, ideal_rating, ideal_relevance,
idcg, ndcg))
rank += 1
except IOError as e:
print('Cannot open necessary files', file=sys.stderr) | 35.584158 | 173 | 0.586533 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 961 | 0.26739 |
e2033f8cbe6a73bf5cc8da3c75dc093abcd8cd80 | 4,273 | py | Python | opytimark/core/benchmark.py | gugarosa/opytimark | cad25623f23ce4b509d59381cf7bd79e41a966b6 | [
"Apache-2.0"
] | 3 | 2020-06-11T22:58:26.000Z | 2021-03-15T20:12:29.000Z | opytimark/core/benchmark.py | gugarosa/opytimark | cad25623f23ce4b509d59381cf7bd79e41a966b6 | [
"Apache-2.0"
] | 1 | 2020-08-13T12:10:35.000Z | 2020-08-17T14:30:45.000Z | opytimark/core/benchmark.py | gugarosa/opytimark | cad25623f23ce4b509d59381cf7bd79e41a966b6 | [
"Apache-2.0"
] | null | null | null | """Benchmark-based class.
"""
import opytimark.utils.exception as e
class Benchmark:
"""A Benchmark class is the root of any benchmarking function.
It is composed by several properties that defines the traits of a function,
as well as a non-implemented __call__ method.
"""
def __init__(self, name='Benchmark', dims=1, continuous=False, convex=False,
differentiable=False, multimodal=False, separable=False):
"""Initialization method.
Args:
name (str): Name of the function.
dims (int): Number of allowed dimensions.
continuous (bool): Whether the function is continuous.
convex (bool): Whether the function is convex.
differentiable (bool): Whether the function is differentiable.
multimodal (bool): Whether the function is multimodal.
separable (bool): Whether the function is separable.
"""
# Name of the function
self.name = name
# Number of allowed dimensions
self.dims = dims
# Continuous
self.continuous = continuous
# Convexity
self.convex = convex
# Differentiability
self.differentiable = differentiable
# Modality
self.multimodal = multimodal
# Separability
self.separable = separable
@property
def name(self):
"""str: Name of the function.
"""
return self._name
@name.setter
def name(self, name):
if not isinstance(name, str):
raise e.TypeError('`name` should be a string')
self._name = name
@property
def dims(self):
"""int: Number of allowed dimensions.
"""
return self._dims
@dims.setter
def dims(self, dims):
if not isinstance(dims, int):
raise e.TypeError('`dims` should be a integer')
if (dims < -1 or dims == 0):
raise e.ValueError('`dims` should be >= -1 and different than 0')
self._dims = dims
@property
def continuous(self):
"""bool: Whether function is continuous or not.
"""
return self._continuous
@continuous.setter
def continuous(self, continuous):
if not isinstance(continuous, bool):
raise e.TypeError('`continuous` should be a boolean')
self._continuous = continuous
@property
def convex(self):
"""bool: Whether function is convex or not.
"""
return self._convex
@convex.setter
def convex(self, convex):
if not isinstance(convex, bool):
raise e.TypeError('`convex` should be a boolean')
self._convex = convex
@property
def differentiable(self):
"""bool: Whether function is differentiable or not.
"""
return self._differentiable
@differentiable.setter
def differentiable(self, differentiable):
if not isinstance(differentiable, bool):
raise e.TypeError('`differentiable` should be a boolean')
self._differentiable = differentiable
@property
def multimodal(self):
"""bool: Whether function is multimodal or not.
"""
return self._multimodal
@multimodal.setter
def multimodal(self, multimodal):
if not isinstance(multimodal, bool):
raise e.TypeError('`multimodal` should be a boolean')
self._multimodal = multimodal
@property
def separable(self):
"""bool: Whether function is separable or not.
"""
return self._separable
@separable.setter
def separable(self, separable):
if not isinstance(separable, bool):
raise e.TypeError('`separable` should be a boolean')
self._separable = separable
def __call__(self, x):
"""This method returns the function's output when the class is called.
Note that it needs to be implemented in every child class as it is the
one to hold the benchmarking function logic.
Args:
x (np.array): An input array for calculating the function's output.
Returns:
The benchmarking function output `f(x)`.
"""
raise NotImplementedError
| 24.699422 | 80 | 0.607302 | 4,201 | 0.98315 | 0 | 0 | 2,367 | 0.553943 | 0 | 0 | 1,888 | 0.441844 |
e20384b81ca4f4f0e6bcef7012f54531808f1314 | 468 | py | Python | tronx/helpers/decorators.py | beastzx18/Tron | 92207b841c80311e484e8f350b96f7df8a76d3b9 | [
"MIT"
] | 8 | 2021-08-22T06:43:34.000Z | 2022-02-24T17:09:49.000Z | tronx/helpers/decorators.py | beastzx18/Tron | 92207b841c80311e484e8f350b96f7df8a76d3b9 | [
"MIT"
] | 61 | 2021-09-12T11:05:33.000Z | 2021-12-07T15:26:18.000Z | tronx/helpers/decorators.py | beastzx18/Tron | 92207b841c80311e484e8f350b96f7df8a76d3b9 | [
"MIT"
] | 6 | 2021-09-08T08:43:04.000Z | 2022-02-24T17:09:50.000Z | from pyrogram.types import CallbackQuery
from .variables import USER_ID
from pyrogram.errors import MessageNotModified
def alert_user(func):
async def wrapper(_, cb: CallbackQuery):
if cb.from_user and not cb.from_user.id in USER_ID:
await cb.answer(
f"Sorry, but you can't use this userbot ! make your own userbot at @tronuserbot",
show_alert=True
)
else:
try:
await func(_, cb)
except MessageNotModified:
pass
return wrapper
| 20.347826 | 86 | 0.728632 | 0 | 0 | 0 | 0 | 0 | 0 | 303 | 0.647436 | 80 | 0.17094 |
e20433bc889dc0f32de713dc2c45f59d8175f0f2 | 481 | py | Python | cachetclient/v1/__init__.py | amdemas/cachet-client | 6a34ada87f99f8a3af593eefadc37a83f59827dd | [
"MIT"
] | null | null | null | cachetclient/v1/__init__.py | amdemas/cachet-client | 6a34ada87f99f8a3af593eefadc37a83f59827dd | [
"MIT"
] | null | null | null | cachetclient/v1/__init__.py | amdemas/cachet-client | 6a34ada87f99f8a3af593eefadc37a83f59827dd | [
"MIT"
] | null | null | null | from cachetclient.v1.client import Client # noqa
from cachetclient.v1.subscribers import Subscriber # noqa
from cachetclient.v1.components import Component # noqa
from cachetclient.v1.component_groups import ComponentGroup # noqa
from cachetclient.v1.incident_updates import IndicentUpdate # noqa
from cachetclient.v1.metrics import Metric # noqa
from cachetclient.v1.metric_points import MetricPoint # noqa
from cachetclient.v1 import enums # noqa
__version__ = '1.1.0'
| 40.083333 | 67 | 0.814969 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 55 | 0.114345 |
e20447ecbe22286bf4163b908e5eba34ac71394f | 141 | py | Python | documentation/admin.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | 1 | 2017-04-27T19:35:42.000Z | 2017-04-27T19:35:42.000Z | documentation/admin.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | null | null | null | documentation/admin.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | null | null | null | from django.contrib import admin
from establishment.documentation.models import DocumentationEntry
admin.site.register(DocumentationEntry)
| 23.5 | 65 | 0.87234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
e204c971bbb2c9c7ca0b1436805590f663181057 | 96 | py | Python | libs/parsers/__init__.py | pullself/Compilers | 590226d02e5291857cb3875bd1ed6315c37fc74e | [
"MIT"
] | null | null | null | libs/parsers/__init__.py | pullself/Compilers | 590226d02e5291857cb3875bd1ed6315c37fc74e | [
"MIT"
] | null | null | null | libs/parsers/__init__.py | pullself/Compilers | 590226d02e5291857cb3875bd1ed6315c37fc74e | [
"MIT"
] | null | null | null | import libs.parsers.parser
import libs.parsers.constructor
__all__ = ['parser', 'constructor']
| 19.2 | 35 | 0.78125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 21 | 0.21875 |
e206029a10a5d9ac7e885778cd9c1b1b8647077a | 3,732 | py | Python | src/demo.py | FanShuixing/CenterNet | 8d81281fb20ec1a98c3fd034bff957dc6ba531d8 | [
"MIT"
] | null | null | null | src/demo.py | FanShuixing/CenterNet | 8d81281fb20ec1a98c3fd034bff957dc6ba531d8 | [
"MIT"
] | null | null | null | src/demo.py | FanShuixing/CenterNet | 8d81281fb20ec1a98c3fd034bff957dc6ba531d8 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
import os
import cv2
from opts import opts
from detectors.detector_factory import detector_factory
import pandas as pd
import json
image_ext = ['jpg', 'jpeg', 'png', 'webp']
video_ext = ['mp4', 'mov', 'avi', 'mkv']
time_stats = ['tot', 'load', 'pre', 'net', 'dec', 'post', 'merge']
def demo(opt):
os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpus_str
opt.debug = max(opt.debug, 1)
Detector = detector_factory[opt.task]
detector = Detector(opt)
if opt.demo == 'webcam' or \
opt.demo[opt.demo.rfind('.') + 1:].lower() in video_ext:
cam = cv2.VideoCapture(0 if opt.demo == 'webcam' else opt.demo)
detector.pause = False
while True:
_, img = cam.read()
cv2.imshow('input', img)
ret = detector.run(img)
time_str = ''
for stat in time_stats:
time_str = time_str + '{} {:.3f}s |'.format(stat, ret[stat])
print(time_str)
if cv2.waitKey(1) == 27:
return # esc to quit
else:
# if os.path.isdir(opt.demo):
with open('/input1/%s' % opt.demo) as fr:
file_list = pd.read_csv(fr).values[:, 1]
image_names = []
for each in file_list:
image_names.append(os.path.join('/input1', each))
# image_names = []
# ls = os.listdir(opt.demo)
# for file_name in sorted(ls):
# ext = file_name[file_name.rfind('.') + 1:].lower()
# if ext in image_ext:
# image_names.append(os.path.join(opt.demo, file_name))
total_dict = {}
for (image_name) in image_names:
name_no_suffix = image_name.split('/')[-1].replace('.jpg', '')
with open('/input1/mask_labels/%s.json' % name_no_suffix) as fr:
info = json.load(fr)
gt_box = info['num_box']
img = cv2.imread(image_name)
h, w, _ = img.shape
img = cv2.resize(img, (768, 576))
info_dict = {}
bboxes_json = []
ret = detector.run(image_name)
# 将输出结果写入到json中
results = ret['results']
for j in range(1, 2):
for bbox in results[j]:
tmp = {}
if bbox[4] > opt.vis_thresh:
tmp['x_min'] = bbox[0] / w
tmp['y_min'] = bbox[1] / h
tmp['x_max'] = bbox[2] / w
tmp['y_max'] = bbox[3] / h
tmp['label'] = 'mucai'
tmp['confidence'] = 1
bboxes_json.append(tmp)
cv2.rectangle(img, (int(bbox[0] / w * 768), int(bbox[1] / h * 576)),
(int(bbox[2] / w * 768), int(bbox[3] / h * 576)), (255, 0, 0), 2)
cv2.imwrite('predict/%s_pred%s_gt%s.jpg' % (name_no_suffix, len(bboxes_json), gt_box), img)
info_dict['image_height'] = 768
info_dict['image_width'] = 576
info_dict['num_box'] = len(bboxes_json)
info_dict['bboxes'] = bboxes_json
total_dict[name_no_suffix] = info_dict
with open('predict.json', 'w+') as fr:
json.dump(total_dict, fr)
time_str = ''
for stat in time_stats:
time_str = time_str + '{} {:.3f}s |'.format(stat, ret[stat])
print(time_str)
if __name__ == '__main__':
opt = opts().init()
demo(opt)
| 37.32 | 103 | 0.497053 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 769 | 0.205067 |
e2062f6e1dd31afd24e042366439bb3f075d80bc | 1,849 | py | Python | example/app.py | jfwm2/gourde | ac338a76d009470159c365ff64c214a578b0f3e9 | [
"Apache-2.0"
] | 6 | 2018-03-30T13:43:14.000Z | 2021-03-09T12:12:54.000Z | example/app.py | jfwm2/gourde | ac338a76d009470159c365ff64c214a578b0f3e9 | [
"Apache-2.0"
] | 22 | 2018-04-18T10:54:28.000Z | 2019-11-18T08:24:47.000Z | example/app.py | jfwm2/gourde | ac338a76d009470159c365ff64c214a578b0f3e9 | [
"Apache-2.0"
] | 8 | 2018-04-18T10:50:23.000Z | 2022-01-21T03:32:03.000Z | #!/usr/bin/env python
"""Gourde example."""
import argparse
import flask
from gourde import Gourde
# Optional API.
try:
import flask_restplus
except ImportError:
flask_restplus = None
class Error(Exception):
"""All local errors."""
pass
# This could be as simple as :
# gourde = Gourde(app)
# app = gourde.app
# More complicated example:
gourde = Gourde(__name__)
app = gourde.app
# Override the default index
@app.route("/")
def index():
return flask.render_template("index.html")
# Add a new page.
@app.route("/example")
def example():
return flask.render_template("example.html")
# Create a custom health check callbback.
def is_healthy():
"""Custom "health" check."""
import random
if random.random() > 0.5:
raise Error()
return True
if flask_restplus:
class HelloWorld(flask_restplus.Resource):
def get(self):
return {"hello": "world"}
def initialize_api(flask_app):
"""Initialize an API."""
if not flask_restplus:
return
api = flask_restplus.Api(version="1.0", title="My Example API")
api.add_resource(HelloWorld, "/hello")
blueprint = flask.Blueprint("api", __name__, url_prefix="/api")
api.init_app(blueprint)
flask_app.register_blueprint(blueprint)
def initialize_app(flask_app, args):
"""Initialize the App."""
# Setup gourde with the args.
gourde.setup(args)
# Register a custom health check.
gourde.is_healthy = is_healthy
# Add an optional API
initialize_api(flask_app)
def main():
# Setup a custom parser.
parser = argparse.ArgumentParser(description="Example")
parser = Gourde.get_argparser(parser)
args = parser.parse_args()
initialize_app(app, args)
# Start the application.
gourde.run()
if __name__ == "__main__":
main()
| 17.12037 | 67 | 0.666306 | 165 | 0.089237 | 0 | 0 | 161 | 0.087074 | 0 | 0 | 583 | 0.315306 |
e20721a05bdc5f184d3fae8e26cffd96f9a854af | 4,963 | py | Python | precise/skatervaluation/battlecode/arrangingbattles.py | OVVO-Financial/precise | ce744cadfca18f4ab77c68cc27bf8d712561127f | [
"MIT"
] | null | null | null | precise/skatervaluation/battlecode/arrangingbattles.py | OVVO-Financial/precise | ce744cadfca18f4ab77c68cc27bf8d712561127f | [
"MIT"
] | null | null | null | precise/skatervaluation/battlecode/arrangingbattles.py | OVVO-Financial/precise | ce744cadfca18f4ab77c68cc27bf8d712561127f | [
"MIT"
] | null | null | null | from precise.skaters.covariance.allcovskaters import ALL_D0_SKATERS
from precise.skaters.covarianceutil.likelihood import cov_skater_loglikelihood
from uuid import uuid4
import os
import json
import pathlib
from pprint import pprint
import traceback
from collections import Counter
from momentum.functions import rvar
from precise.skatertools.data.equity import random_m6_returns
from precise.whereami import SKATER_WIN_DATA
import numpy as np
import time
DEFAULT_M6_PARAMS = {'n_dim': 25,
'n_obs': 356,
'n_burn':300,
'atol': 1,
'lb':-1000,
'ub':1000,
'interval':'d'}
def params_category_and_data(params:dict):
"""
Supplement params (usually inferred from battle script file names) with defaults
"""
if params['topic']== 'm6':
combined_params = DEFAULT_M6_PARAMS
combined_params.update(params)
descriptions = {'m': 'm6_stocks_monthly',
'd': 'm6_stocks_daily'}
combined_params['description'] = descriptions[combined_params['interval']]
category = combined_params['description'] + '_p' + str(combined_params['n_dim']) + '_n' + str(combined_params['n_burn'])
xs = random_m6_returns(verbose=False, **combined_params)
return combined_params, category, xs
else:
raise ValueError('m6 is only topic, for now')
def skater_battle( params:dict ):
"""
Write results to a new queue
"""
n_per_battle = 3
atol = 1.0
try:
params, category, xs_test = params_category_and_data(params=params)
except Exception as e:
print(e)
pprint(params)
raise ValueError('Something is probably wrong with params for getting data, so this config will not fly')
print('Data retrieval test passed for category '+category)
pprint(params)
time.sleep(1)
print('Will test the following skaters')
pprint(ALL_D0_SKATERS)
qn = str(uuid4())+'.json'
queue_dir = os.path.join(SKATER_WIN_DATA, category)
queue = os.path.join(queue_dir,qn)
pathlib.Path(queue_dir).mkdir(parents=True, exist_ok=True)
print(queue)
battles = Counter()
timing = dict()
reliability = dict()
failures = dict()
worst_ll_seen = 10000000
lb = params['lb']
ub = params['ub']
while True:
n_obs = params['n_obs']
params, category, xs = params_category_and_data(params=params)
assert len(xs)==n_obs
xs = np.array(xs)
np.random.shuffle(ALL_D0_SKATERS)
fs = ALL_D0_SKATERS[:n_per_battle]
stuff = list()
for f in fs:
try:
ll, metrics = cov_skater_loglikelihood(f=f, xs=xs, n_burn=params['n_burn'], with_metrics=True, lb=lb, ub=ub)
metrics['name']=f.__name__
metrics['traceback']=''
metrics['passing']=1
stuff.append( (ll,metrics) )
if ll<worst_ll_seen:
worst_ll_seen = ll
print({'worst_ll_seen':ll})
name = metrics['name']
if name not in timing:
timing[name] = {}
timing[name] = rvar(timing[name], x=metrics['time'], rho=0.05)
if name not in reliability:
reliability[name] = {}
reliability[name] = rvar(reliability[name], x=1.0, rho=0.05)
except Exception as e:
metrics = {'name':f.__name__,'passing':0,'traceback':traceback.format_exc(),'ll':-100000000}
if f.__name__ not in reliability:
reliability[f.__name__] = {}
reliability[f.__name__] = rvar(reliability[f.__name__], x=0.0, rho=0.05)
failures[f.__name__] = traceback.format_exc()
ll = worst_ll_seen
stuff.append( (ll,metrics))
valid = [ s for s in stuff if s[1]['passing']>0.5 ]
if len(valid)<=2:
print('urhg')
for i, mi in enumerate(valid):
for j, mj in enumerate(valid):
if j != i:
if mi[0] > mj[0]+atol:
i_name = mi[1]['name']
j_name = mj[1]['name']
cmp_name = i_name+'>'+j_name
battles.update({cmp_name:1.0})
reliabilties = dict([(nm, reliab['mean']) for nm,reliab in reliability.items() ] )
cpu_times = dict([(nm, tm['mean']) for nm, tm in timing.items()])
if np.random.rand()<0.01:
with open(queue,'wt') as fh:
json.dump(battles,fh)
print('---')
pprint(reliabilties)
print('---')
pprint(cpu_times)
print('---')
pprint(battles)
print(' ')
pprint(failures)
| 32.651316 | 128 | 0.557929 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 678 | 0.136611 |
e20a03cd63ed875b87f6b377a2d00238a2c57de9 | 7,995 | py | Python | src/PCMF/Positive_CMF.py | N-YS-KK/PCMF | b85959aa4dcfc490ea06066d77c9a79dce016859 | [
"MIT"
] | 3 | 2021-08-11T03:02:38.000Z | 2022-02-17T01:35:04.000Z | src/PCMF/Positive_CMF.py | N-YS-KK/PCMF | b85959aa4dcfc490ea06066d77c9a79dce016859 | [
"MIT"
] | null | null | null | src/PCMF/Positive_CMF.py | N-YS-KK/PCMF | b85959aa4dcfc490ea06066d77c9a79dce016859 | [
"MIT"
] | 1 | 2021-08-07T11:28:57.000Z | 2021-08-07T11:28:57.000Z | import numpy as np
import tensorflow as tf
class Positive_Collective_Matrix_Factorization:
"""
Our proposed model PCMF.
Attributes
----------
X : numpy.ndarray
Y : numpy.ndarray
alpha : int
Y weight of loss function.
d_hidden : int
Number of potential topics.
This is the number of columns in the matrix U.
This is the number of rows in the matrix V.
This is the number of rows in the matrix Z.
lamda : int
Regularization weight.
link_X : str
Link function of matrix X.
This is supposed to take 'sigmoid', 'linear' and 'log'.
link_Y : str
Link function of matrix Y.
This is supposed to take 'sigmoid', 'linear' and 'log'.
weight_X : numpy.ndarray
Weight of each element of X of loss function.
weight_Y : numpy.ndarray
Weight of each element of Y of loss function.
optim_steps : int
Number of repetitions of Adam.
verbose : int
How many steps to output the progress.
lr : int
Learning rate.
#We'll skip the rest.
"""
def __init__(self, X, Y, alpha=1, d_hidden=12, lamda=0.1):
"""
Parameters
----------
X : numpy.ndarray
Y : numpy.ndarray
alpha : int
Y weight of loss function.
d_hidden : int
Number of potential topics.
This is the number of columns in the matrix U.
This is the number of rows in the matrix V.
This is the number of rows in the matrix Z.
lamda : int
Regularization weight.
"""
self.X = X
self.Y = Y
self.alpha = alpha
self.d = d_hidden
self.lamda = lamda
def train(
self,
link_X="sigmoid",
link_Y="sigmoid",
weight_X=None,
weight_Y=None,
optim_steps=401,
verbose=100,
lr=0.005,
):
"""
Trained matrix U, matrix V, and matrix Z.
Parameters
----------
link_X : str
Link function of matrix X.
This is supposed to take 'sigmoid', 'linear' and 'log'.
link_Y : str
Link function of matrix Y.
This is supposed to take 'sigmoid', 'linear' and 'log'.
weight_X : numpy.ndarray
Weight of each element of X of loss function.
weight_Y : numpy.ndarray
Weight of each element of Y of loss function.
optim_steps : int
Number of repetitions of Adam.
verbose : int
How many steps to output the progress.
lr : int
Learning rate.
Returns
-------
U_ : numpy.ndarray
Trained matrix U.
V_ : numpy.ndarray
Trained matrix V.
Z_ : numpy.ndarray
Trained matrix Z.
"""
# Initialization by normal distribution.
U = tf.keras.backend.variable(
tf.random.truncated_normal([self.X.shape[0], self.d], 0, 1.0),
dtype=tf.float32,
)
V = tf.keras.backend.variable(
tf.random.truncated_normal([self.d, self.X.shape[1]], 0, 1.0),
dtype=tf.float32,
)
Z = tf.keras.backend.variable(
tf.random.truncated_normal([self.d, self.Y.shape[1]], 0, 1.0),
dtype=tf.float32,
)
# Definition of correct answer data.
X = tf.keras.backend.constant(self.X, dtype=tf.float32)
Y = tf.keras.backend.constant(self.Y, dtype=tf.float32)
# Positive number subtracted from the argument when the link function = sigmoid.
if link_X == "sigmoid":
sig_biasX = tf.keras.backend.variable(12, dtype=tf.float32)
if link_Y == "sigmoid":
sig_biasY = tf.keras.backend.variable(12, dtype=tf.float32)
# Weight of each element of X and Y of loss function. If not specified, all will be 1.0.
if weight_X is None: # Range is {0, 1} or [0, 1]
weight_X = np.ones_like(X)
if weight_Y is None: # Range is {0, 1} or [0, 1]
weight_Y = np.ones_like(Y)
def loss(alpha=self.alpha, lamda=self.lamda):
"""
Calculate the loss function.
Parameters
----------
alpha : int
Y weight of loss function.
lamda : int
Regularization weight.
Returns
-------
loss_all : tensorflow.python.framework.ops.EagerTensor
"""
U_ = tf.nn.softplus(U)
V_ = tf.nn.softplus(V)
Z_ = tf.nn.softplus(Z)
# For X, calculate loss according to the set link function.
if link_X == "sigmoid": # Range is {0, 1} or [0, 1]
X_ = (
tf.matmul(U_, V_) - sig_biasX
) # Inner product> 0, subtract only sig_biasX.
loss_X = tf.math.reduce_mean(
weight_X
* (X * tf.math.softplus(-X_) + (1 - X) * tf.math.softplus(X_))
) # sigmoid + cross_entropy
elif link_X == "linear": # Range is (0, ∞)
X_ = tf.matmul(U_, V_)
loss_X = tf.math.reduce_mean(weight_X * tf.square(X - X_))
elif link_X == "log": # Range is (-∞, ∞)
X_ = tf.math.log(tf.matmul(U_, V_))
loss_X = tf.math.reduce_mean(weight_X * tf.square(X - X_))
# For Y, calculate loss according to the set link function.
if link_Y == "sigmoid": # Range is {0, 1} or [0, 1]
Y_ = (
tf.matmul(U_, Z_) - sig_biasY
) # Inner product> 0, subtract only sig_biasX.
loss_Y = tf.math.reduce_mean(
weight_Y
* (Y * tf.math.softplus(-Y_) + (1 - Y) * tf.math.softplus(Y_))
) # sigmoid + cross_entropy
elif link_Y == "linear": # Range is (0, ∞)
Y_ = tf.matmul(U_, Z_)
loss_Y = tf.math.reduce_mean(weight_Y * tf.square(Y - Y_))
elif link_Y == "log": # Range is (-∞, ∞)
Y_ = tf.math.log(tf.matmul(U_, Z_))
loss_Y = tf.math.reduce_mean(weight_Y * tf.square(Y - Y_))
# Norm
norm = (
tf.math.reduce_euclidean_norm(U_)
+ tf.math.reduce_euclidean_norm(V_)
+ tf.math.reduce_euclidean_norm(Z_)
)
# Loss function
loss_all = loss_X + alpha * loss_Y + lamda * norm
return loss_all
# Actual calculation from here.
opt = tf.optimizers.Adam(learning_rate=lr)
loss_record = []
for times in range(optim_steps):
loss_ = lambda: loss()
loss_record.append(loss_().numpy())
# Change the combination of variables to update depending on whether each link function is sigmoid.
if link_X == "sigmoid":
if link_Y == "sigmoid":
opt.minimize(loss_, var_list=[U, V, Z, sig_biasX, sig_biasY])
else:
opt.minimize(loss_, var_list=[U, V, Z, sig_biasX])
else:
if link_Y == "sigmoid":
opt.minimize(loss_, var_list=[U, V, Z, sig_biasY])
else:
opt.minimize(loss_, var_list=[U, V, Z])
if verbose > 0:
if times % verbose == 0:
print(
"[Info] At time-step {}, loss is {}".format(
times, loss_record[-1]
)
)
# Apply softplus when outputting.
U_ = tf.nn.softplus(U).numpy()
V_ = tf.nn.softplus(V).numpy()
Z_ = tf.nn.softplus(Z).numpy()
return U_, V_, Z_ | 31.476378 | 111 | 0.507817 | 7,963 | 0.994505 | 0 | 0 | 0 | 0 | 0 | 0 | 3,783 | 0.472462 |
e20a8d95b63861ccc768d4ea908159a54c8a95bd | 447 | py | Python | UniquePaths.py | pauloadaoag/leetcode | 6e6078ab237b2c912b9e0806a0bdf731fd2204e9 | [
"MIT"
] | null | null | null | UniquePaths.py | pauloadaoag/leetcode | 6e6078ab237b2c912b9e0806a0bdf731fd2204e9 | [
"MIT"
] | null | null | null | UniquePaths.py | pauloadaoag/leetcode | 6e6078ab237b2c912b9e0806a0bdf731fd2204e9 | [
"MIT"
] | null | null | null | class Solution:
# @return an integer
def uniquePaths(self, m, n):
if ((m == 0) or (n == 0)):
return 0
if (n > m):
return self.uniquePaths(n, m)
row = [1] * m
for i in range(1, n):
# print row
r2 = [1]
last = 1
for j in range(1, m):
last = last + row[j]
r2.append(last)
row = r2
return row.pop()
| 22.35 | 39 | 0.39821 | 446 | 0.997763 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.069351 |
e20aae6a3c7bc628ede1c78033fd0f2eaf838cb5 | 5,080 | py | Python | Spider/ScanningSpider/spiders/CVEDetails.py | halftion/discern | f304b0557aa0c7293566ced9075d9ff9365ea424 | [
"BSD-3-Clause"
] | 7 | 2021-04-20T14:06:28.000Z | 2022-01-25T16:25:29.000Z | Spider/ScanningSpider/spiders/CVEDetails.py | halftion/discern | f304b0557aa0c7293566ced9075d9ff9365ea424 | [
"BSD-3-Clause"
] | 1 | 2021-11-08T02:44:17.000Z | 2021-11-08T02:44:17.000Z | Spider/ScanningSpider/spiders/CVEDetails.py | halftion/discern | f304b0557aa0c7293566ced9075d9ff9365ea424 | [
"BSD-3-Clause"
] | 8 | 2021-02-22T01:42:46.000Z | 2022-01-25T12:53:26.000Z | import re
import scrapy
import util
from ScanningSpider.items import CVEItem
from ScanningSpider.items import CVEDetailItem
class CVEDetails(scrapy.Spider):
name = "cve_detail"
allowed_domains = ['cvedetails.com']
base_url = 'https://www.cvedetails.com/vulnerability-list/year-'
baer_detail_url = "https://www.cvedetails.com/"
# start_urls = ['https://www.cvedetails.com/vulnerability-list/year-2019/vulnerabilities.html']
#初始页面入口:
def start_requests(self):
for year in range(2019,1998,-1):
url = self.base_url + str(year) + '/vulnerabilities.html'
yield scrapy.Request(url,self.parseList)
#分页入口:
def parseList(self, response):
num = int(response.xpath('//div[@id="pagingb"]/b/text()').get())
#计算页码总数
pages = num//50
if num % 50 > 0:pages += 1
#获取年份
year = int(response.xpath('//div[@class="submenu"]/div/b/a/text()').get())
url = "https://www.cvedetails.com" + response.xpath('//div[@id="pagingb"]/a')[0].xpath('./@href').get()
for page in range(1,pages+1):
#替换页码:
sub_url = re.sub('page=1','page=' + str(page),url)
yield scrapy.Request(sub_url,self.parseInfo)
#分页处理:
def parseInfo(self,response):
for num in range(len(response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]'))):
cve = CVEItem()
cve['cve_id'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[1].xpath('./a/text()').get()
cve['cve_url'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[1].xpath('./a/@href').get()
cve['cwe_id'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[2].xpath('./a/text()').get()
cve['exp'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[3].xpath('./b/text()').get().strip()
cve['vulnerability_type'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[4].xpath('./text()').get().strip()
cve['score'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[7].xpath('./div/text()').get()
# 访问级别
cve['gainedaccess_level'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[8].xpath('./text()').get()
# 访问方式(远程、本地)
cve['access'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[9].xpath('./text()').get()
# 复杂度
cve['complexity'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[10].xpath('./text()').get()
# 认证系统
cve['authentication'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[11].xpath('./text()').get()
# 机密性
cve['confidentiality'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[12].xpath('./text()').get()
# 完整性
cve['integrity'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[13].xpath('./text()').get()
# 可用性
cve['availability'] = response.xpath('//table[@id="vulnslisttable"]/tr[@class="srrowns"]')[num].xpath('./td')[14].xpath('./text()').get()
cve['description'] = response.xpath('//table[@id="vulnslisttable"]/tr/td[@class="cvesummarylong"]')[num].xpath('./text()').get().strip()
yield cve
# 这里的dont_filter是为了防止重复爬取
url = self.baer_detail_url + cve['cve_url']
yield scrapy.Request(url,self.parseDetail,meta={'cve_id':cve['cve_id']},dont_filter=True)
def parseDetail(self,response):
cve_id = response.meta['cve_id']
for num in range(1,len(response.xpath('//table[@id="vulnprodstable"]/tr'))):
detail = CVEDetailItem()
detail['cve_id'] = cve_id
detail['product_type'] = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[1].xpath('./text()').get().strip()
detail['vendor'] = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[2].xpath('./a/text()').get().strip()
detail['product'] = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[3].xpath('./a/text()').get().strip()
detail['version'] = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[4].xpath('./text()').get().strip()
detail['update'] = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[5].xpath('./text()').get().strip()
detail['edition'] = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[6].xpath('./text()').get().strip()
detail['language'] = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[7].xpath('./text()').get().strip()
yield detail | 68.648649 | 162 | 0.576969 | 5,105 | 0.976099 | 4,696 | 0.897897 | 0 | 0 | 0 | 0 | 2,316 | 0.44283 |
e20d9cffb20687d16b9b32885b1c7eab97057a0f | 21,383 | py | Python | dep/scm.py | harveyt/dep | 5a52fda5ce75033c240c52fd98d3ffde99ed6617 | [
"MIT"
] | null | null | null | dep/scm.py | harveyt/dep | 5a52fda5ce75033c240c52fd98d3ffde99ed6617 | [
"MIT"
] | null | null | null | dep/scm.py | harveyt/dep | 5a52fda5ce75033c240c52fd98d3ffde99ed6617 | [
"MIT"
] | null | null | null | #
# Source Code Management
# ======================
#
# %%LICENSE%%
#
import os
import re
from dep import opts
from dep.helpers import *
class Repository:
def __init__(self, work_dir, url, vcs, name):
self.work_dir = work_dir
self.url = url
self.vcs = vcs
self.name = name
self.branch = None
self.commit = None
def write_state_to_config_section(self, section):
section["url"] = self.url
section["vcs"] = self.vcs
if self.branch:
section["branch"] = self.branch
if self.commit:
section["commit"] = self.commit
def read_state_from_config_section(self, section):
self.branch = section["branch"] if section.has_key("branch") else None
self.commit = section["commit"] if section.has_key("commit") else None
def read_state_from_disk(self):
pass
@staticmethod
def determine_vcs_from_url(url):
# TODO: Hard coded for now
return "git"
@staticmethod
def determine_vcs_from_work_dir(work_dir):
# TODO: Hard coded for now
if GitRepository.is_present(work_dir):
return "git"
else:
return "file"
@staticmethod
def determine_name_from_url(url):
# TODO: Hard coded for now
name = os.path.basename(url)
name = re.sub(r"\.git$", "", name)
return name
@staticmethod
def create(work_dir, url=None, name=None, parent=None):
# Determine URL and vcs if none provided
if url is None:
if work_dir is None:
error("Cannot create repository with no URL and no working directory")
url = "file://{}".format(work_dir)
vcs = Repository.determine_vcs_from_work_dir(work_dir)
else:
vcs = Repository.determine_vcs_from_url(url)
# Determine name if none provided
if name is None:
name = Repository.determine_name_from_url(url)
# Determine work_dir if none provided
if work_dir is None:
work_dir = os.path.join(os.getcwd(), name)
# TODO: Support more VCS
if vcs == "git":
return GitRepository(work_dir, url, name, parent)
elif vcs == "file":
return FileRepository(work_dir, url)
else:
error("Cannot determine VCS from repository URL '{}'", url)
def debug_dump(self, prefix=""):
if not opts.args.debug or opts.args.quiet:
return
debug("{}--- {} ---", prefix, self)
debug("{}work_dir = {}", prefix, self.work_dir)
debug("{}url = {}", prefix, self.url)
debug("{}vcs = {}", prefix, self.vcs)
debug("{}name = {}", prefix, self.name)
debug("{}branch = {}", prefix, self.branch)
debug("{}commit = {}", prefix, self.commit)
self._debug_dump_contents(prefix)
def _debug_dump_contents(self, prefix):
pass
class FileRepository(Repository):
def __init__(self, work_dir, url):
name = Repository.determine_name_from_url(url)
Repository.__init__(self, work_dir, url, "file", name)
def __str__(self):
return "{} '{}'".format(self.__class__.__name__, self.work_dir)
def register(self, path):
pass
def unregister(self, path):
pass
def pre_edit(self, path):
pass
def post_edit(self, path):
pass
def download(self):
pass
def checkout(self, branch=None, commit=None):
pass
def has_ignore(self, path):
return False
def add_ignore(self, path):
pass
def remove_ignore(self, path):
pass
def has_local_modifications(self):
return True
def refresh(self):
pass
def record(self):
pass
def merge_branch(self, name):
pass
def status(self, path, kw):
return True
def create_branch(self, name, startpoint):
pass
def create_worktree(self, branch_name):
pass
class GitRepository(Repository):
def __init__(self, work_dir, url, name, parent):
if parent is not None and not isinstance(parent, GitRepository):
error("GitRepository must have Git parent repository or no parent")
Repository.__init__(self, work_dir, url, "git", name)
self.parent = parent
self.dot_git_path = os.path.join(work_dir, ".git")
self.git_dir = self._compute_git_dir()
self.git_common_dir = self._compute_git_common_dir()
self.worktree_path = self._compute_worktree_path()
self.ignore_file = os.path.join(work_dir, ".gitignore")
self.quiet_flag = "--quiet" if opts.args.quiet else None
def __str__(self):
return "{} '{}'".format(self.__class__.__name__, self.git_dir)
def _debug_dump_contents(self, prefix):
debug("{}parent = {}", prefix, self.parent)
debug("{}dot_git_path = {}", prefix, self.dot_git_path)
debug("{}git_dir = {}", prefix, self.git_dir)
debug("{}git_common_dir = {}", prefix, self.git_common_dir)
debug("{}worktree_path = {}", prefix, self.worktree_path)
debug("{}ignore_file = {}", prefix, self.ignore_file)
debug("{}quiet_flag = {}", prefix, self.quiet_flag)
def read_state_from_disk(self):
if os.path.exists(self.dot_git_path):
self.branch = self._get_branch()
self.commit = self._get_commit()
def _read_git_dir(self):
try:
git_dir = None
with open(self.dot_git_path, 'r') as f:
for line in f:
m = re.match(r"^gitdir:\s+(.*)$", line)
if m:
git_dir = m.group(1)
break
if git_dir is None:
error("Cannot find gitdir in '{}'", self.dot_git_path)
if not os.path.isabs(git_dir):
git_dir = os.path.join(self.work_dir, git_dir)
return git_dir
except IOError, e:
error("Cannot open '{}' for reading: {}", self.dot_git_path, e)
def _compute_git_dir(self):
# If .git exists as directory, either root or old style so use that always.
# If .git exists as file, contents determines actual git directory location always.
if os.path.isdir(self.dot_git_path):
return self.dot_git_path
elif os.path.isfile(self.dot_git_path):
return self._read_git_dir()
# If root project, simply use the .git directory.
if self.parent is None:
return self.dot_git_path
deps_path = os.path.join("deps", self.name)
git_dir = os.path.join(self.parent.git_common_dir, deps_path)
if self.parent.worktree_path is not None:
git_dir = os.path.join(git_dir, "worktrees/.UNKNOWN.")
return git_dir
def _is_separate_git_dir(self):
return self.git_dir != self.dot_git_path
def _get_separate_git_dir_flag(self):
return "--separate-git-dir" if self._is_separate_git_dir() else None
def _get_separate_git_dir_arg(self):
return self.git_dir if self._is_separate_git_dir() else None
def _compute_git_common_dir(self):
# The repository git_dir is one of:
# WORK_DIR/.git/worktrees/WORKTREE_ID
# WORK_DIR/.git/deps/NAME/worktrees/WORKTREE_ID
m = re.match(r"(.*/\.git(/deps/[^/]*)?)/worktrees/[^/]*$", self.git_dir)
if m:
return m.group(1)
return self.git_dir
def _compute_worktree_path(self):
if self.parent is None:
# Root is a worktree if git_dir and git_common_dir are different
if self.git_dir == self.git_common_dir:
return None
common_root = os.path.dirname(self.git_common_dir)
return os.path.relpath(self.work_dir, common_root)
# Other repositories inherit from parent
return self.parent.worktree_path
@staticmethod
def is_present(work_dir):
dot_git_path = os.path.join(work_dir, ".git")
return os.path.exists(dot_git_path)
def register(self, path):
run("git", "add", path, cwd=self.work_dir)
def unregister(self, path):
run("git", "rm", "--cached", path, cwd=self.work_dir)
def pre_edit(self, path):
pass
def post_edit(self, path):
run("git", "add", path, cwd=self.work_dir)
def _worktree_add(self):
self.parent.debug_dump("parent: ")
self.debug_dump("local: ")
dep_to_root_path = os.path.relpath(self.parent.work_dir, self.work_dir)
dep_path = os.path.relpath(self.work_dir, self.parent.work_dir)
worktree_path = os.path.join(dep_to_root_path, self.worktree_path, dep_path)
parent_common_root = os.path.dirname(self.parent.git_common_dir)
worktree_common_dir = os.path.join(parent_common_root, dep_path)
branch_name = self._branch_name_from_ref(self.branch)
debug("dep_to_root_path={}", dep_to_root_path)
debug("dep_path={}", dep_path)
debug("worktree_path={}", worktree_path)
debug("parent_common_root={}", parent_common_root)
debug("worktree_common_dir={}", worktree_common_dir)
debug("branch_name={}", branch_name)
status("Adding worktree {}\n on branch '{}'",
self.work_dir, branch_name)
run("git", "worktree", "add", worktree_path, branch_name, cwd=worktree_common_dir)
# NOTE: The git_dir will be incorrect (unknown) until after it is created, must update.
self.git_dir = self._compute_git_dir()
self.debug_dump("worktree: ")
def _clone(self):
status("Downloading {}\n from '{}'", self, self.url)
if self._is_separate_git_dir():
make_dirs(os.path.dirname(self.git_dir))
run("git", "clone",
self.quiet_flag, self._get_separate_git_dir_flag(), self._get_separate_git_dir_arg(),
"--no-checkout", self.url, self.work_dir)
def download(self):
validate_dir_notexists_or_empty(self.work_dir)
validate_dir_notexists(self.git_dir)
if self.worktree_path is not None:
self._worktree_add()
else:
self._clone()
def _is_working_dir_empty(self):
work_dir_contents = filter(lambda entry: not entry in [".", "..", ".git"], os.listdir(self.work_dir))
return len(work_dir_contents) == 0
def _need_checkout(self, branch=None, commit=None, force=False):
debug("_need_checkout: force={}", force)
if force or self._is_working_dir_empty():
return True
if branch is not None:
cur_branch = self._get_branch()
debug("_need_checkout: cur_branch={} required={}", cur_branch, branch)
if cur_branch != branch:
return True
if commit is not None:
cur_commit = self._get_commit()
debug("_need_checkout: cur_commit={} required={}", cur_commit, commit)
if cur_commit != commit:
return True
return False
def checkout(self, branch=None, commit=None):
if not self._need_checkout(branch=branch, commit=commit):
return
branch_flag = None if branch is None or commit is None else "-B"
branch_name = None if branch is None else self._branch_name_from_ref(branch)
commit_flag = None if commit is None else commit
branch_mesg = "" if branch is None else "\n on branch '{}'".format(branch)
commit_mesg = "" if commit is None else "\n at commit '{}'".format(commit)
status("Checkout {}{}{}\n in '{}'", self, branch_mesg, commit_mesg, self.work_dir)
run("git", "checkout", self.quiet_flag, branch_flag, branch_name, commit_flag, cwd=self.work_dir)
def _read_ignore(self):
if not os.path.exists(self.ignore_file):
return []
try:
ignores = []
with open(self.ignore_file, 'r') as f:
for line in f:
line = line.strip()
ignores.append(line)
return ignores
except IOError, e:
error("Cannot open '{}' for reading: {}", self.ignore_file, e)
def has_ignore(self, path):
path = "/" + path
ignores = self._read_ignore()
return path in ignores
def add_ignore(self, path):
verbose("Adding '{}' to ignore file '{}'", path, self.ignore_file)
if opts.args.dry_run:
return
# TODO: With git we know we can just post_edit the file to do the right thing.
# TODO: With out vcs we might need register/pre_edit.
try:
with open(self.ignore_file, 'a') as f:
f.write('/{}\n'.format(path))
except IOError, e:
error("Cannot open '{}' for writing: {}'", self.ignore_file, e)
self.post_edit(self.ignore_file)
def remove_ignore(self, path):
verbose("Removing '{}' from ignore file '{}'", path, self.ignore_file)
if opts.args.dry_run:
return
if not os.path.exists(self.ignore_file):
# TODO: There is no ignore file, so cannot remove?
return
# TODO: With git we know we can just post_edit the file to do the right thing.
# TODO: With out vcs we might need pre_edit.
ignores = self._read_ignore()
try:
with open(self.ignore_file, 'w') as f:
for ignore in ignores:
if ignore != "/" + path:
f.write('{}\n'.format(ignore))
except IOError, e:
error("Cannot open '{}' for writing: {}'", self.ignore_file, e)
self.post_edit(self.ignore_file)
# TODO: Remove if ignore file is now empty?
def _is_status_conflict(self, line):
style = line[0:2]
if style == "DD" or style == "AU" or style == "UD" or style == "UA":
return True
if style == "DU" or style == "AA" or style == "UU":
return True
return False
def _get_status(self):
ahead = 0
behind = 0
changes = 0
conflicts = 0
with Pipe("git", "status", "--porcelain", "--branch", cwd=self.work_dir) as p:
for line in p:
m = re.match(r"##\s+[^[]*(\[(\s*ahead\s+(\d+)\s*)?,?(\s*behind\s+(\d+)\s*)?\])?", line)
if m:
ahead = m.group(3) if m.group(3) else 0
behind = m.group(5) if m.group(5) else 0
else:
if self._is_status_conflict(line):
conflicts = conflicts + 1
else:
changes = changes + 1
return (changes, ahead, behind, conflicts)
def _is_merge_in_progress(self):
# Local modifications if merge is in progress so merge will be committed.
merge_head_file = os.path.join(self.git_dir, "MERGE_HEAD")
return os.path.exists(merge_head_file)
def has_local_modifications(self):
return self._is_merge_in_progress() or self._get_status()[0] > 0
def is_ahead(self):
return self._get_status()[1] > 0
def refresh(self):
check_local = True
if not os.path.exists(self.work_dir):
check_local = False
if not os.path.exists(self.git_dir):
self.download()
if check_local and self.has_local_modifications():
error("{} has local modifications, not refreshed", self)
self.checkout(self.branch, self.commit)
def _get_branch(self):
branch = run_query("git", "rev-parse", "--symbolic-full-name", "HEAD", cwd=self.work_dir).rstrip("\n")
# TODO: Check it is valid!
if branch == "HEAD":
# Detached head is not supported (yet), need to checkout a branch.
# TODO: Support checkout of tag and arbitary commit - pick the first sensible branch containing that commit.
error("{} is checked out with a detached head, not yet supported; checkout a branch (not a tag)", self)
return branch
def _get_commit(self):
commit = run_query("git", "rev-parse", "HEAD", cwd=self.work_dir).rstrip("\n")
# TODO: Check it is valid!
return commit
def _get_describe(self):
actual_branch = self._get_branch()
describe = run_query("git", "describe", "--tags", "--always", cwd=self.work_dir).rstrip("\n")
# TODO: Check it is valid!
return describe
def record(self):
new_branch = self._get_branch()
new_commit = self._get_commit()
if new_branch != self.branch or new_commit != self.commit:
self.branch = new_branch
self.commit = new_commit
status("""Recording {}
at commit '{}'
on branch '{}'""", self, self.commit, self.branch)
def _branch_name_from_ref(self, ref):
return re.sub(r"refs/heads/", "", ref)
def merge_branch(self, name):
run("git", "merge", self.quiet_flag, "--no-commit", "--no-ff", name, cwd=self.work_dir, allow_failure=True)
def status(self, path, kw):
if kw.get('status_long'):
return self.status_long(path, kw)
else:
return self.status_short(path, kw)
def status_short(self, path, kw):
branch = self.branch
commit = self.commit
actual_branch = self._get_branch()
actual_commit = self._get_commit()
changes, ahead, behind, conflicts = self._get_status()
merging = self._is_merge_in_progress()
# Determine modification state
if changes is None:
mod = "?"
elif conflicts:
mod = "C"
elif changes:
mod = "*"
elif merging:
mod = ">"
else:
mod = " "
# Deteremine branch and commit differences
if branch is None:
branch_diff = " "
else:
branch_diff = (" " if branch == actual_branch else "*")
if commit is None:
commit_diff = " "
else:
commit_diff = (" " if commit == actual_commit else "*")
# Determine ahead/behind
ahead = "?" if ahead is None else ahead
behind = "?" if behind is None else behind
# Determine values to show
actual_branch = self._branch_name_from_ref(actual_branch)
show_commit = kw.get('status_commit')
show_describe = kw.get('status_describe')
if not show_commit and not show_describe:
show_commit = (actual_branch != "master")
show_describe = (actual_branch == "master")
if not show_commit or show_describe:
actual_commit = self._get_describe()
commit_value = commit_diff + actual_commit
branch_value = branch_diff + actual_branch
lead = ("## " if kw.get('status_long') else "")
if kw.get('status_first'):
status("{}M Branch Commit Push Pull Path", lead)
status("{}- --------------- ---------------------------------------- ---- ---- --------------------------", lead)
status("{}{:1} {:16} {:41} {:>4} {:>4} {}", lead, mod, branch_value, commit_value, ahead, behind, path)
return self._status_is_clean(mod, branch_diff, commit_diff, ahead, behind, kw)
def _status_is_clean(self, mod, branch_diff, commit_diff, ahead, behind, kw):
if mod != " ":
return False
if branch_diff != " ":
return False
if commit_diff != " ":
return False
if kw.get('status_push_clean') and ahead != 0:
return False
if kw.get('status_pull_clean') and behind != 0:
return False
return True
def status_long(self, path, kw):
status_seperator()
kw['status_first'] = True
is_clean = self.status_short(path, kw)
status("")
run("git", "status", "--long", cwd=self.work_dir)
status("")
return is_clean
def create_branch(self, name, startpoint):
starting = ("\n with start point '{}'".format(startpoint) if startpoint is not None else "")
status("Branch {}\n to branch '{}'{}", self, name, starting)
run("git", "checkout", "-b", name, startpoint, cwd=self.work_dir)
def create_worktree(self, branch_name):
worktree_root = "branch"
worktree_path = os.path.join(worktree_root, branch_name)
work_dir = os.path.join(self.work_dir, worktree_path)
status("Adding worktree {}\n on branch '{}'", work_dir, branch_name)
run("git", "worktree", "add", worktree_path, branch_name)
# Ensure worktree_root is ignored.
if not self.has_ignore(worktree_root):
self.add_ignore(worktree_root)
# Create a .deproot so root finding does not go through "branch" to parent directories.
deproot_path = os.path.join(self.work_dir, worktree_root, ".deproot")
if not os.path.exists(deproot_path):
open(deproot_path, 'a').close()
return Repository.create(work_dir)
| 38.389587 | 127 | 0.582191 | 21,240 | 0.993312 | 0 | 0 | 1,674 | 0.078286 | 0 | 0 | 4,338 | 0.202871 |
e20dfc2a207d87cb746506a4b2801706084ee98d | 966 | py | Python | env/Lib/site-packages/promise/compat.py | nerdyator/graphene-django-cookbook | e19aec7a6e910898558b16e910c7392b0b219390 | [
"MIT"
] | 2 | 2019-01-07T07:11:05.000Z | 2019-11-10T18:08:40.000Z | env/Lib/site-packages/promise/compat.py | nerdyator/graphene-django-cookbook | e19aec7a6e910898558b16e910c7392b0b219390 | [
"MIT"
] | null | null | null | env/Lib/site-packages/promise/compat.py | nerdyator/graphene-django-cookbook | e19aec7a6e910898558b16e910c7392b0b219390 | [
"MIT"
] | 2 | 2017-10-18T14:12:35.000Z | 2020-03-24T06:01:23.000Z | try:
from asyncio import Future, iscoroutine, ensure_future # type: ignore
except ImportError:
class Future: # type: ignore
def __init__(self):
raise Exception("You need asyncio for using Futures")
def set_result(self):
raise Exception("You need asyncio for using Futures")
def set_exception(self):
raise Exception("You need asyncio for using Futures")
def ensure_future(): # type: ignore
raise Exception("ensure_future needs asyncio for executing")
def iscoroutine(obj): # type: ignore
return False
try:
from Queue import Queue # type: ignore # flake8: noqa
except ImportError:
from queue import Queue # type: ignore # flake8: noqa
try:
from .iterate_promise import iterate_promise
except (SyntaxError, ImportError):
def iterate_promise(promise):
raise Exception(
'You need "yield from" syntax for iterate in a Promise.')
| 28.411765 | 74 | 0.668737 | 321 | 0.332298 | 0 | 0 | 0 | 0 | 0 | 0 | 321 | 0.332298 |
e20e1a653bf9813e2805ad343ed123d0380ed481 | 3,579 | py | Python | examples/run_flexible_building_optimal_operation.py | sonercandas/fledge | ac107df1236d898ea31f62a60a568cf00a9bda08 | [
"MIT"
] | 2 | 2020-05-25T03:06:55.000Z | 2020-09-30T11:14:27.000Z | examples/run_flexible_building_optimal_operation.py | sonercandas/fledge | ac107df1236d898ea31f62a60a568cf00a9bda08 | [
"MIT"
] | null | null | null | examples/run_flexible_building_optimal_operation.py | sonercandas/fledge | ac107df1236d898ea31f62a60a568cf00a9bda08 | [
"MIT"
] | null | null | null | """Example script for setting up and solving a flexible building optimal operation problem."""
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pyomo.environ as pyo
import fledge.config
import fledge.database_interface
import fledge.der_models
import fledge.electric_grid_models
import fledge.power_flow_solvers
def main():
# Settings.
scenario_name = 'singapore_tanjongpagar'
plots = True # If True, script may produce plots.
# Recreate / overwrite database, to incorporate changes in the CSV files.
fledge.database_interface.recreate_database()
# Obtain data.
scenario_data = fledge.database_interface.ScenarioData(scenario_name)
der_data = fledge.database_interface.DERData(scenario_name)
price_data = fledge.database_interface.PriceData(scenario_name)
# Obtain price timeseries.
price_name = 'energy'
price_timeseries = price_data.price_timeseries_dict[price_name]
# Obtain model.
der_name = der_data.flexible_buildings['der_name'][0] # Pick first `der_name`.
flexible_building_model = fledge.der_models.FlexibleBuildingModel(der_data, der_name)
# Instantiate optimization problem.
optimization_problem = pyo.ConcreteModel()
# Define variables.
flexible_building_model.define_optimization_variables(optimization_problem)
# Define constraints.
flexible_building_model.define_optimization_constraints(optimization_problem)
# Disable thermal grid connection.
optimization_problem.der_connection_constraints = pyo.ConstraintList()
for timestep in scenario_data.timesteps:
optimization_problem.der_connection_constraints.add(
0.0
==
optimization_problem.output_vector[timestep, der_name, 'grid_thermal_power_cooling']
)
# Define objective.
flexible_building_model.define_optimization_objective(optimization_problem, price_timeseries)
# Solve optimization problem.
optimization_solver = pyo.SolverFactory(fledge.config.solver_name)
optimization_result = optimization_solver.solve(optimization_problem, tee=fledge.config.solver_output)
try:
assert optimization_result.solver.termination_condition is pyo.TerminationCondition.optimal
except AssertionError:
raise AssertionError(f"Solver termination condition: {optimization_result.solver.termination_condition}")
# optimization_problem.display()
# Obtain results.
(
state_vector,
control_vector,
output_vector
) = flexible_building_model.get_optimization_results(
optimization_problem
)
# Print results.
print(f"state_name = \n{state_vector.to_string()}")
print(f"control_name = \n{control_vector.to_string()}")
print(f"output_name = \n{output_vector.to_string()}")
# Plot results.
if plots:
for output_name in flexible_building_model.output_names:
plt.plot(flexible_building_model.output_maximum_timeseries[output_name], label="Maximum", drawstyle='steps-post')
plt.plot(flexible_building_model.output_minimum_timeseries[output_name], label="Minimum", drawstyle='steps-post')
plt.plot(output_vector[output_name], label="Optimal", drawstyle='steps-post')
plt.legend()
plt.title(f"Output: {output_name}")
plt.show()
plt.close()
plt.plot(price_timeseries['price_value'], drawstyle='steps-post')
plt.title(f"Price: {price_name}")
plt.show()
plt.close()
if __name__ == '__main__':
main()
| 35.435644 | 125 | 0.734283 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 965 | 0.269628 |
e20e70f3f6889113f4850d284104c41e6354001c | 1,307 | py | Python | Searching_and_Recursion_Model_project/07_csSearchRotatedSortedArray.py | sarahmarie1976/CSPT15_DS_ALGO_SEARCH_RECURSION_GP | 68e0bb01e54727cbc629bf1041190a02724f0d0d | [
"MIT"
] | null | null | null | Searching_and_Recursion_Model_project/07_csSearchRotatedSortedArray.py | sarahmarie1976/CSPT15_DS_ALGO_SEARCH_RECURSION_GP | 68e0bb01e54727cbc629bf1041190a02724f0d0d | [
"MIT"
] | null | null | null | Searching_and_Recursion_Model_project/07_csSearchRotatedSortedArray.py | sarahmarie1976/CSPT15_DS_ALGO_SEARCH_RECURSION_GP | 68e0bb01e54727cbc629bf1041190a02724f0d0d | [
"MIT"
] | null | null | null | """
For a given positive integer n determine if it can be represented as a sum of two Fibonacci numbers (possibly equal).
Example
For n = 1, the output should be
fibonacciSimpleSum2(n) = true.
Explanation: 1 = 0 + 1 = F0 + F1.
For n = 11, the output should be
fibonacciSimpleSum2(n) = true.
Explanation: 11 = 3 + 8 = F4 + F6.
For n = 60, the output should be
fibonacciSimpleSum2(n) = true.
Explanation: 11 = 5 + 55 = F5 + F10.
For n = 66, the output should be
fibonacciSimpleSum2(n) = false.
Input/Output
[execution time limit] 4 seconds (py3)
[input] integer n
Guaranteed constraints:
1 ≤ n ≤ 2 · 109.
[output] boolean
true if n can be represented as Fi + Fj, false otherwise.
"""
def csSearchRotatedSortedArray(nums, target):
start = 0
end = len(nums) - 1
while start <= end:
mid = start + (end - start) // 2
print(nums[start], " - ",nums[mid], " - ", nums[end])
if nums[mid] == target:
return mid
else:
if target < nums[mid]:
if target < nums[start]:
start = mid + 1
else:
end = mid -1
else:
if target > nums[end]:
end = mid - 1
else:
start = mid + 1
return -1
| 22.534483 | 117 | 0.553175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 711 | 0.541921 |
e20f893f4ebed523aabfdbe3b19d3e3678100a96 | 1,291 | py | Python | 2020/day_09/day09.py | d02d33pak/Advent-Of-Code | 765b0302c256ad61864095a537a3f6379901b1c2 | [
"MIT"
] | null | null | null | 2020/day_09/day09.py | d02d33pak/Advent-Of-Code | 765b0302c256ad61864095a537a3f6379901b1c2 | [
"MIT"
] | null | null | null | 2020/day_09/day09.py | d02d33pak/Advent-Of-Code | 765b0302c256ad61864095a537a3f6379901b1c2 | [
"MIT"
] | null | null | null | """
Advent of Code : Day 09
"""
from os import path
def parse_input(filename):
""" Parse input file values """
script_dir = path.dirname(__file__)
file_path = path.join(script_dir, filename)
with open(file_path, "r") as file:
val = list(map(int, file.read().splitlines()))
return val
def find_pair(target, values):
""" Return sum pair for target value """
values = set(values)
for value in values:
if target - value in values:
return True
return False
# PART 1
def part1(values, window=25):
""" Find invalid XMAS no. """
index = 0
for value in values[window:]:
if find_pair(value, values[index : (window + index)]):
index += 1
else:
return value
return -1
# PART 2
def part2(values, window=25):
""" Solve part two """
target = part1(values, window)
sum_, index, offset = 0, 0, 0
while sum_ != target:
if sum_ < target:
sum_ += values[offset + index]
index += 1
if sum_ > target:
offset += 1
sum_ = 0
index = 0
min_of_range = min(values[offset : offset + index])
max_of_range = max(values[offset : offset + index])
return min_of_range + max_of_range
| 20.492063 | 62 | 0.569326 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 172 | 0.13323 |
e2101c00fb0005b243b277050e3c456984d8e6cc | 1,746 | py | Python | companies/migrations/0003_auto_20210221_1537.py | Ins-V/wc_crm | 5d75907bb48e892328712ed0b2cf96b9083239aa | [
"MIT"
] | null | null | null | companies/migrations/0003_auto_20210221_1537.py | Ins-V/wc_crm | 5d75907bb48e892328712ed0b2cf96b9083239aa | [
"MIT"
] | null | null | null | companies/migrations/0003_auto_20210221_1537.py | Ins-V/wc_crm | 5d75907bb48e892328712ed0b2cf96b9083239aa | [
"MIT"
] | null | null | null | # Generated by Django 3.1.7 on 2021-02-21 13:37
import companies.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('companies', '0002_auto_20210221_1408'),
]
operations = [
migrations.CreateModel(
name='Email',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('owner', models.CharField(max_length=150, verbose_name='владелец')),
('address', models.EmailField(max_length=254, verbose_name='адрес электронной почты')),
],
options={
'verbose_name': 'email',
'verbose_name_plural': 'emails',
},
),
migrations.CreateModel(
name='Phone',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('owner', models.CharField(max_length=150, verbose_name='владелец')),
('number', models.CharField(max_length=15, validators=[companies.validators.PhoneValidator], verbose_name='номер')),
],
options={
'verbose_name': 'телефон',
'verbose_name_plural': 'телефоны',
},
),
migrations.AddField(
model_name='company',
name='emails',
field=models.ManyToManyField(to='companies.Email', verbose_name='emails'),
),
migrations.AddField(
model_name='company',
name='phones',
field=models.ManyToManyField(to='companies.Phone', verbose_name='phones'),
),
]
| 35.632653 | 132 | 0.563574 | 1,682 | 0.93289 | 0 | 0 | 0 | 0 | 0 | 0 | 441 | 0.244592 |
e2103297434d37fcf54c7a0433384b0abc0f2a63 | 1,622 | py | Python | Problemset/longest-palindromic-substring/longest-palindromic-substring.py | KivenCkl/LeetCode | fcc97c66f8154a5d20c2aca86120cb37b9d2d83d | [
"MIT"
] | 7 | 2019-05-08T03:41:05.000Z | 2020-12-22T12:39:43.000Z | Problemset/longest-palindromic-substring/longest-palindromic-substring.py | Yuziquan/LeetCode | 303fc1c8af847f783c4020bd731b28b72ed92a35 | [
"MIT"
] | 1 | 2021-07-19T03:48:35.000Z | 2021-07-19T03:48:35.000Z | Problemset/longest-palindromic-substring/longest-palindromic-substring.py | Yuziquan/LeetCode | 303fc1c8af847f783c4020bd731b28b72ed92a35 | [
"MIT"
] | 7 | 2019-05-10T20:43:20.000Z | 2021-02-22T03:47:35.000Z |
# @Title: 最长回文子串 (Longest Palindromic Substring)
# @Author: KivenC
# @Date: 2019-06-12 15:25:33
# @Runtime: 136 ms
# @Memory: 13.1 MB
class Solution:
def longestPalindrome(self, s: str) -> str:
# # way 1
# # 从回文串的中心向两边扩展,O(n^2)
# # 分奇数串和偶数串
# if len(s) < 2:
# return s
# def expandAroundCenter(s: str, left: int, right: int):
# while left >= 0 and right < len(s) and s[left] == s[right]:
# left -= 1
# right += 1
# return right - left - 1
# start, end = 0, 0
# for i, _ in enumerate(s):
# len_odd = expandAroundCenter(s, i, i) # 奇数串
# len_even = expandAroundCenter(s, i, i + 1) # 偶数串
# res = max(len_odd, len_even)
# if res > end - start:
# start = i - (res - 1) // 2
# end = i + res // 2
# return s[start: end+1]
# way 2
# Manacher 算法
# O(n)
new_s = '$#' + '#'.join(s) + '#'
mx, ct, resLen, resCenter = 0, 0, 0, 0
p = [0 for _ in range(len(new_s))]
for i in range(1, len(new_s)):
p[i] = min(p[2 * ct - i], mx - i) if mx > i else 1
while i - p[i] >= 0 and i + p[i] < len(new_s) and new_s[i + p[i]] == new_s[i - p[i]]:
p[i] += 1
if mx < i + p[i] - 1:
mx = i + p[i] - 1
ct = i
if resLen < p[i]:
resLen = p[i]
resCenter = i
return s[(resCenter - resLen + 1) // 2:(resCenter + resLen - 1) // 2]
| 33.102041 | 97 | 0.421085 | 1,543 | 0.911939 | 0 | 0 | 0 | 0 | 0 | 0 | 950 | 0.561466 |
e2115b0532e79c3a412eaca3018e8b49164b6980 | 1,176 | py | Python | Tools/SystemDebug/python/tca.py | tomlenth/oneAPI-samples | 67df7461a8484a6b928f41158e9b7f9c38eef7bd | [
"MIT"
] | 1 | 2020-12-07T22:50:13.000Z | 2020-12-07T22:50:13.000Z | Tools/SystemDebug/python/tca.py | tomlenth/oneAPI-samples | 67df7461a8484a6b928f41158e9b7f9c38eef7bd | [
"MIT"
] | 1 | 2020-08-24T19:09:28.000Z | 2020-08-24T19:09:28.000Z | Tools/SystemDebug/python/tca.py | tomlenth/oneAPI-samples | 67df7461a8484a6b928f41158e9b7f9c38eef7bd | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
'''
==============================================================
Copyright © 2019 Intel Corporation
SPDX-License-Identifier: MIT
==============================================================
'''
import intel.tca as tca
target = tca.get_target(id="whl_u_cnp_lp")
components = [(c.component, tca.latest(c.steppings))
for c in target.components]
component_config = tca.ComponentWithSelectedSteppingList()
for comp in components:
config_tmp = tca.ComponentWithSelectedStepping()
config_tmp.component, config_tmp.stepping = comp
supported_connections = target.get_supported_connection_configurations(
component_config)
def conn_filter(conn: tca.ConnectionConfiguration) -> bool:
if conn.type != tca.ConnectionType_IPC:
return False
if "CCA" not in conn.ipc_configuration.selection:
return False
return True
connection_config = next(filter(conn_filter, supported_connections))
profile = tca.Profile()
profile.name = "My TCA profile"
profile.target = target
profile.component_configuration = component_config
profile.connection_configuration = connection_config
tca.load(profile)
tca.connect()
| 30.153846 | 71 | 0.684524 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 258 | 0.219201 |
e212dd1d544f98d4df805c27386f3ef17f1c4a1e | 1,570 | py | Python | src/tests/assembly/structured_config/assembled_config/test_assembled_config.py | fabio-d/fuchsia-stardock | e57f5d1cf015fe2294fc2a5aea704842294318d2 | [
"BSD-2-Clause"
] | 5 | 2022-01-10T20:22:17.000Z | 2022-01-21T20:14:17.000Z | src/tests/assembly/structured_config/assembled_config/test_assembled_config.py | fabio-d/fuchsia-stardock | e57f5d1cf015fe2294fc2a5aea704842294318d2 | [
"BSD-2-Clause"
] | null | null | null | src/tests/assembly/structured_config/assembled_config/test_assembled_config.py | fabio-d/fuchsia-stardock | e57f5d1cf015fe2294fc2a5aea704842294318d2 | [
"BSD-2-Clause"
] | null | null | null | # Copyright 2022 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import pathlib
import subprocess
import sys
from run_assembly import run_product_assembly
def main():
parser = argparse.ArgumentParser(
description=
"Ensure that ffx assembly product adds structured config with the right ffx config."
)
parser.add_argument(
"--ffx-bin",
type=pathlib.Path,
required=True,
help="Path to ffx binary.")
parser.add_argument(
"--product-assembly-config",
type=pathlib.Path,
required=True,
help="Path to product assembly configuration input.")
parser.add_argument(
"--input-bundles-dir",
type=pathlib.Path,
required=True,
help="Path to input bundles directory.")
parser.add_argument(
"--outdir",
type=pathlib.Path,
required=True,
help="Path to output directory.")
parser.add_argument(
"--stamp",
type=pathlib.Path,
required=True,
help="Path to stampfile for telling ninja we're done.")
args = parser.parse_args()
output = run_product_assembly(
ffx_bin=args.ffx_bin,
product=args.product_assembly_config,
input_bundles=args.input_bundles_dir,
outdir=args.outdir,
extra_config=["assembly_example_enabled=true"])
output.check_returncode()
with open(args.stamp, 'w') as f:
pass # creates the file
| 29.074074 | 92 | 0.649682 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 550 | 0.350318 |
e2136cfd1dbd979eb4930b72b614fe5fadbbe82e | 7,388 | py | Python | vaas-app/src/vaas/manager/api.py | allegro/vaas | 3d2d1f1a9dae6ac69a13563a37f9bfdf4f986ae2 | [
"Apache-2.0"
] | 251 | 2015-09-02T10:50:51.000Z | 2022-03-16T08:00:35.000Z | vaas-app/src/vaas/manager/api.py | allegro/vaas | 3d2d1f1a9dae6ac69a13563a37f9bfdf4f986ae2 | [
"Apache-2.0"
] | 154 | 2015-09-02T14:54:08.000Z | 2022-03-16T08:34:17.000Z | vaas-app/src/vaas/manager/api.py | allegro/vaas | 3d2d1f1a9dae6ac69a13563a37f9bfdf4f986ae2 | [
"Apache-2.0"
] | 31 | 2015-09-03T07:51:05.000Z | 2020-09-24T09:02:40.000Z | # -*- coding: utf-8 -*-
import logging
from celery.result import AsyncResult
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS, Resource
from tastypie import fields
from tastypie.fields import ListField
from tastypie.authentication import ApiKeyAuthentication, MultiAuthentication, SessionAuthentication
from vaas.external.api import ExtendedDjangoAuthorization as DjangoAuthorization
from vaas.external.tasty_validation import ModelCleanedDataFormValidation
from vaas.external.serializer import PrettyJSONSerializer
from vaas.cluster.api import DcResource
from vaas.manager.forms import ProbeModelForm, DirectorModelForm, BackendModelForm, TimeProfileModelForm
from vaas.manager.models import Backend, Probe, Director, TimeProfile, ReloadTask
from vaas.monitor.models import BackendStatus
from vaas.external.oauth import VaasMultiAuthentication
logger = logging.getLogger('vaas')
class TimeProfileResource(ModelResource):
class Meta:
queryset = TimeProfile.objects.all()
resource_name = 'time_profile'
serializer = PrettyJSONSerializer()
authorization = DjangoAuthorization()
authentication = VaasMultiAuthentication(ApiKeyAuthentication())
validation = ModelCleanedDataFormValidation(form_class=TimeProfileModelForm)
always_return_data = True
filtering = {
'max_connections': ['exact'],
'connect_timeout': ['exact'],
'first_byte_timeout': ['exact'],
'between_bytes_timeout': ['exact']
}
class ProbeResource(ModelResource):
class Meta:
queryset = Probe.objects.all()
resource_name = 'probe'
serializer = PrettyJSONSerializer()
authorization = DjangoAuthorization()
authentication = VaasMultiAuthentication(ApiKeyAuthentication())
validation = ModelCleanedDataFormValidation(form_class=ProbeModelForm)
always_return_data = True
filtering = {
'name': ['exact'],
'url': ['exact'],
'expected_response': ['exact']
}
class DirectorResource(ModelResource):
probe = fields.ForeignKey(ProbeResource, 'probe', full=True)
time_profile = fields.ForeignKey(TimeProfileResource, 'time_profile', full=True)
backends = fields.ToManyField(
'vaas.manager.api.BackendResource', 'backends', null=True
)
cluster = fields.ToManyField(
'vaas.cluster.api.LogicalClusterResource', 'cluster', null=True, full=True
)
class Meta:
queryset = Director.objects.all()
resource_name = 'director'
serializer = PrettyJSONSerializer()
authorization = DjangoAuthorization()
authentication = VaasMultiAuthentication(ApiKeyAuthentication(), SessionAuthentication())
validation = ModelCleanedDataFormValidation(form_class=DirectorModelForm)
always_return_data = True
filtering = {
'name': ['exact'],
'enabled': ['exact'],
'probe': ALL_WITH_RELATIONS,
'cluster': ALL_WITH_RELATIONS,
'service': ['exact'],
'virtual': ['exact'],
'service_tag': ['exact'],
'reachable_via_service_mesh': ['exact'],
}
def save_m2m(self, bundle):
try:
new_uris = bundle.obj.new_clusters_uris
bundle.obj.new_clusters = [cluster.obj for cluster in bundle.data['cluster']
if cluster.data['resource_uri'] in new_uris]
logger.info("[DirectorResource.save_m2m()] new_clusters = %s", bundle.obj.new_clusters)
except (AttributeError, KeyError):
pass
return super(DirectorResource, self).save_m2m(bundle)
def update_in_place(self, request, original_bundle, new_data):
try:
original_bundle.obj.old_clusters = list(original_bundle.obj.cluster.all())
except AttributeError:
original_bundle.obj.old_clusters = []
logger.info("[DirectorResource.update_in_place()] old_clusters = %s", original_bundle.obj.old_clusters)
try:
original_bundle.obj.new_clusters_uris = new_data['cluster']
except KeyError:
original_bundle.obj.new_clusters_uris = []
original_bundle.obj.new_data = new_data
return super(DirectorResource, self).update_in_place(request, original_bundle, new_data)
class BackendResource(ModelResource):
dc = fields.ForeignKey(DcResource, 'dc', full=True)
director = fields.ForeignKey(DirectorResource, 'director')
tags = ListField()
class Meta:
queryset = Backend.objects.all()
resource_name = 'backend'
serializer = PrettyJSONSerializer()
authorization = DjangoAuthorization()
authentication = VaasMultiAuthentication(ApiKeyAuthentication())
validation = ModelCleanedDataFormValidation(form_class=BackendModelForm)
always_return_data = True
filtering = {
'dc': ALL_WITH_RELATIONS,
'director': ALL_WITH_RELATIONS,
'address': ['exact'],
'port': ['exact']
}
def dehydrate(self, bundle):
status = BackendStatus.objects.filter(address=bundle.data['address'],
port=bundle.data['port'])
if len(status) > 0:
bundle.data['status'] = status[0].status
else:
bundle.data['status'] = "Unknown"
bundle.data['time_profile'] = {
'max_connections': bundle.obj.director.time_profile.max_connections,
'connect_timeout': bundle.obj.director.time_profile.connect_timeout,
'first_byte_timeout': bundle.obj.director.time_profile.first_byte_timeout,
'between_bytes_timeout': bundle.obj.director.time_profile.between_bytes_timeout
}
return bundle
def build_filters(self, filters=None, ignore_bad_filters=False):
if filters is None:
filters = {}
orm_filters = super(BackendResource, self).build_filters(filters, ignore_bad_filters=ignore_bad_filters)
if 'tag' in filters:
orm_filters['tags__name__in'] = filters['tag'].split(',')
return orm_filters
def dehydrate_tags(self, bundle):
return list(map(str, bundle.obj.tags.all()))
def hydrate_tags(self, bundle):
if isinstance(bundle.data.get('tags'), list):
bundle.data['tags'] = ','.join(bundle.data['tags'])
elif bundle.data.get('tags') is None:
bundle.data['tags'] = ''
return bundle
def save_m2m(self, bundle):
tags = bundle.data.get('tags', [])
bundle.obj.tags.set(*tags)
return super(BackendResource, self).save_m2m(bundle)
class ReloadTaskResource(Resource):
status = fields.CharField(attribute='status')
info = fields.CharField(attribute='info')
class Meta:
resource_name = 'task'
list_allowed_methods = ['get']
authorization = DjangoAuthorization()
authentication = VaasMultiAuthentication(ApiKeyAuthentication())
fields = ['status', 'info']
include_resource_uri = True
def obj_get(self, bundle, **kwargs):
task = AsyncResult(kwargs['pk'])
return ReloadTask(kwargs['pk'], task.status, '{}'.format(task.info))
def get_object_list(self, request):
return None
| 38.279793 | 112 | 0.662426 | 6,472 | 0.876015 | 0 | 0 | 0 | 0 | 0 | 0 | 918 | 0.124256 |
e213fa9b060bf34620b9e001f44b9f1d8d1ad80f | 331 | py | Python | Examples/StackImgExample.py | Mohak-CODING-HEAVEN/CVPRO | 09a2cb4a428738c9e77f17b71469d55eff5e3699 | [
"MIT"
] | 5 | 2021-07-24T18:20:11.000Z | 2022-03-23T09:58:27.000Z | Examples/StackImgExample.py | Mohak-CODING-HEAVEN/cvpro | 09a2cb4a428738c9e77f17b71469d55eff5e3699 | [
"MIT"
] | null | null | null | Examples/StackImgExample.py | Mohak-CODING-HEAVEN/cvpro | 09a2cb4a428738c9e77f17b71469d55eff5e3699 | [
"MIT"
] | null | null | null | from cvpro import stackImages
import cv2
cap = cv2.VideoCapture(0)
while True:
success, img = cap.read()
imgGray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
imgList = [img, img, imgGray, img, imgGray]
imgStacked = stackImages(imgList, 2, 0.5)
cv2.imshow("stackedImg", imgStacked)
cv2.waitKey(1)
| 23.642857 | 52 | 0.661631 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.036254 |
e2151dbd8d042f1a314638218c6335ca02b0727a | 513 | py | Python | tests/unit/test_models.py | Joel-Milligan/physics-tutor-assignment | 9db0b871c947bb19886026c06bcd04cefd852d98 | [
"MIT"
] | null | null | null | tests/unit/test_models.py | Joel-Milligan/physics-tutor-assignment | 9db0b871c947bb19886026c06bcd04cefd852d98 | [
"MIT"
] | null | null | null | tests/unit/test_models.py | Joel-Milligan/physics-tutor-assignment | 9db0b871c947bb19886026c06bcd04cefd852d98 | [
"MIT"
] | null | null | null | from datetime import datetime
from app.models import Assessment, User
def test_new_user():
new_user = User('usernam1', 'password', datetime(2020, 1, 1), True)
assert new_user.username == 'usernam1'
assert new_user.password != 'password'
assert new_user.signup_date == datetime(2020, 1, 1)
assert new_user.is_admin == True
def test_new_assessment():
new_assessment = Assessment('question', '56')
assert new_assessment.question == 'question'
assert new_assessment.answer == '56'
| 30.176471 | 71 | 0.71345 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 68 | 0.132554 |
e21623ec5e7d6ad7295ce59ea422e61906ff7a77 | 2,390 | py | Python | micro/config.py | lastseal/micro-config | e082c329262d1b5613c1641a6cdad98a0be2a5a6 | [
"MIT"
] | null | null | null | micro/config.py | lastseal/micro-config | e082c329262d1b5613c1641a6cdad98a0be2a5a6 | [
"MIT"
] | null | null | null | micro/config.py | lastseal/micro-config | e082c329262d1b5613c1641a6cdad98a0be2a5a6 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*
from datetime import datetime
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
import dataset
import logging
import dotenv
import signal
import sys
import os
class SlackHandler(logging.Handler):
def __init__(self, token, channel, username):
super().__init__()
self.setLevel(logging.ERROR)
self.client = WebClient(token=token)
self.channel = channel
self.username = username
def emit(self, record):
try:
message = self.format(record)
response = self.client.chat_postMessage(
channel=self.channel,
username=self.username,
text=message
)
logging.debug(response)
except SlackApiError as ex:
print(ex)
dotenv_path = os.path.join(os.getcwd(), ".env")
dotenv.load_dotenv( dotenv_path )
LOG_LEVEL = os.getenv("LOG_LEVEL") or 'INFO'
if LOG_LEVEL.lower() == "debug":
logging.basicConfig(
format='%(asctime)s.%(msecs).03d [%(pathname)s:%(lineno)d] %(levelname)s - %(message)s',
level=logging.DEBUG
)
else:
logging.basicConfig(
format='%(asctime)s.%(msecs).03d %(levelname)s - %(message)s',
level=logging.INFO
)
SLACK_BOT_TOKEN = os.getenv('SLACK_BOT_TOKEN')
SLACK_CHANNEL = os.getenv('SLACK_CHANNEL')
SLACK_USERNAME = os.getenv('SLACK_USERNAME')
if SLACK_BOT_TOKEN and SLACK_CHANNEL and SLACK_USERNAME:
logging.getLogger("").addHandler( SlackHandler(SLACK_BOT_TOKEN, SLACK_CHANNEL, SLACK_USERNAME) )
def handle_sigint(signum, frame):
logging.info("sigint received (%d)", signum)
sys.exit(0)
def handle_sigterm(signum, frame):
logging.warning("sigterm received (%d)", signum)
sys.exit(0)
signal.signal(signal.SIGINT, handle_sigint)
signal.signal(signal.SIGTERM, handle_sigterm)
CONFIG_URL = os.getenv('CONFIG_URL') or "sqlite:///mydatabase.db"
db = dataset.connect(CONFIG_URL)
table = db['config']
##
#
def get(name, default=None, type=None):
res = table.find_one(name=name)
if res is None:
return default
value = res['value']
if type is datetime:
return datetime.strptime(value, "%Y-%m-%d %H:%M:%S.%f")
return value
##
#
def set(name, value):
data = dict(name=name, value=str(value))
table.upsert(data, ['name'])
| 23.663366 | 100 | 0.648954 | 618 | 0.258577 | 0 | 0 | 0 | 0 | 0 | 0 | 367 | 0.153556 |
354d0a0e8ce535bc378cbced25f44f2527b5fa3a | 373 | py | Python | bindings/python/capstone/__init__.py | zouguangxian/capstone | a1818520dfb37596cc5a3f19f3e04412c4c66dca | [
"BSD-3-Clause"
] | 1 | 2021-07-06T23:36:41.000Z | 2021-07-06T23:36:41.000Z | bindings/python/capstone/__init__.py | zouguangxian/capstone | a1818520dfb37596cc5a3f19f3e04412c4c66dca | [
"BSD-3-Clause"
] | null | null | null | bindings/python/capstone/__init__.py | zouguangxian/capstone | a1818520dfb37596cc5a3f19f3e04412c4c66dca | [
"BSD-3-Clause"
] | null | null | null | from capstone import Cs, CsError, cs_disasm_quick, cs_version, CS_API_MAJOR, CS_API_MINOR, CS_ARCH_ARM, CS_ARCH_ARM64, CS_ARCH_MIPS, CS_ARCH_X86, CS_MODE_LITTLE_ENDIAN, CS_MODE_ARM, CS_MODE_THUMB, CS_OPT_SYNTAX, CS_OPT_SYNTAX_INTEL, CS_OPT_SYNTAX_ATT, CS_OPT_DETAIL, CS_OPT_ON, CS_OPT_OFF, CS_MODE_16, CS_MODE_32, CS_MODE_64, CS_MODE_BIG_ENDIAN, CS_MODE_MICRO, CS_MODE_N64
| 186.5 | 372 | 0.86059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
354d83637e37681157c4d9fc862645ada4cc9f0a | 2,841 | py | Python | tweetgen2.py | NISH1001/tweetypie | 334be291e87cac4f5df2d66aec58dd4c9e95b985 | [
"MIT"
] | 4 | 2019-05-08T10:00:20.000Z | 2021-06-09T08:40:40.000Z | tweetgen2.py | NISH1001/tweetypie | 334be291e87cac4f5df2d66aec58dd4c9e95b985 | [
"MIT"
] | null | null | null | tweetgen2.py | NISH1001/tweetypie | 334be291e87cac4f5df2d66aec58dd4c9e95b985 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from collections import defaultdict
import random
import sys
from preprocess import (
preprocess_sentence
)
from data import (
load_df
)
class MarkovChain:
def __init__(self, lookback=2):
self.trie = defaultdict(lambda : defaultdict(int))
self.lookback = lookback
self.tweets = []
def train(self, lines):
"""
Build markov model
"""
self.tweets += lines
print("Building trie...")
for title in lines:
tokens = title.split()
if len(tokens) > self.lookback:
for i in range(len(tokens) + 1):
a = ' '.join(tokens[max(0, i-self.lookback) : i])
b = ' '.join(tokens[i : i+1])
self.trie[a][b] += 1
self._build_probabilities()
def _build_probabilities(self):
"""
Calculate probabilities
"""
print("Building probabilities...")
for word, following in self.trie.items():
total = float(sum(following.values()))
for key in following:
following[key] /= total
def _sample(self, items):
next_word = None
t = 0.0
for k, v in items:
t += v
if t and random.random() < v/t:
next_word = k
return next_word
def generate(self, initial_words=[]):
sentence = initial_words[:-1]
next_word = self._sample(self.trie[''].items()) if not initial_words else initial_words[-1]
while next_word != '':
sentence.append(next_word)
next_word = self._sample(self.trie[' '.join(sentence[-self.lookback:])].items())
sentence = ' '.join(sentence)
return sentence
# flag = True
# # Prune lines that are substrings of actual lines
# for tweet in self.tweets:
# if sentence in tweet:
# flag = False
# break
# if flag:
# sentences.append(sentence)
# return sentences
def main():
tweetfile = "data/tweets/clean/clean.csv"
# df = load_df(tweetfile)
# some shitty CLI
args = sys.argv[1:]
if len(args) < 2:
print("LOL. Please input in the format <loopback value> <word1> <word2> ...")
print("Example: tweetgen2.py 2 my life")
return
n = int(args[0])
initial_words = args[1:]
mc = MarkovChain(lookback=n)
mc.train(load_df(tweetfile)['text'].values.tolist())
# initial_words = ['we', 'tend', 'to']
# initial_words = ['life', 'is']
tweet = mc.generate(initial_words)
print("Generated tweet::\n{}".format(tweet))
print('-'*30)
print("After preprocessing <SENTENCE>::\n{}".format(preprocess_sentence(tweet)))
if __name__ == "__main__":
main()
| 28.128713 | 99 | 0.551566 | 1,906 | 0.670891 | 0 | 0 | 0 | 0 | 0 | 0 | 719 | 0.25308 |
3550ddafa143f685dd1cca02d3526eff1e6cd743 | 4,042 | py | Python | Projects/VerilogOnline/archive/vo-tools-6/extra/converters/fig2json/archive/test.py | fredmorcos/attic | 0da3b94aa525df59ddc977c32cb71c243ffd0dbd | [
"Unlicense"
] | 2 | 2021-01-24T09:00:51.000Z | 2022-01-23T20:52:17.000Z | Projects/VerilogOnline/archive/vo-tools-6/extra/converters/fig2json/archive/test.py | fredmorcos/attic | 0da3b94aa525df59ddc977c32cb71c243ffd0dbd | [
"Unlicense"
] | 6 | 2020-02-29T01:59:03.000Z | 2022-02-15T10:25:40.000Z | Projects/VerilogOnline/archive/vo-tools-6/extra/converters/fig2json/archive/test.py | fredmorcos/attic | 0da3b94aa525df59ddc977c32cb71c243ffd0dbd | [
"Unlicense"
] | 1 | 2019-03-22T14:41:21.000Z | 2019-03-22T14:41:21.000Z | #!/usr/bin/python2
from pprint import PrettyPrinter
from argparse import ArgumentParser
def parse_image_header(input_file):
comment = None
for num, line in enumerate(input_file):
line = line.strip()
if num == 0:
# First line, has to be a comment with the version, but we don't care
continue
elif num == 1:
# Orientation = Landscape | Portrait
orientation = line
elif num == 2:
# Justification = Center | Flush Left
justification = line
elif num == 3:
# Units = Metric | Inches
units = line
elif num == 4:
# PaperSize = Letter | Legal | Ledger | Tabloid |
# A | B | C | D | E | A4 | A3 | A2 | A1 | A0 | B5
papersize = line
elif num == 5:
# Magnification = FLOAT
magnification = float(line)
elif num == 6:
# MultiPage = Single | Multiple
multipage = (line == 'Multiple')
elif num == 7:
# TransparentColor = -3=bg | -2=none | -1=default |
# 0-31=stdcolors | 32-=usercolors
transpcolor = int(line)
elif line.startswith('#'):
# optional comment, if not, then resolution
comment += line
comment += '\n'
else:
# Resolution = units/inch & CoordSystem = 1=LowerLeft | 2=UpperLeft
tmp_line = line.split(' ')
resolution = int(tmp_line[0])
coordsystem = int(tmp_line[1])
break # VERY IMPORTANT
return {'orientation': orientation,
'justification': justification,
'units': units,
'papersize': papersize,
'magnification': magnification,
'multipage': multipage,
'transpcolor': transpcolor,
'comment': comment,
'resolution': resolution,
'coordsystem': coordsystem}
def parse_objects(line, input_file):
objects = []
tmp_line = line.split(' ')
if int(tmp_line[0]) == 2: # polyline, polygon, box
# SubType: 1=polyline, 2=box, 3=polygon, 4=arc-box, 5=pic
subtype = int(tmp_line[1])
# LineStyle: -1=Default, 0=Solid, 1=Dashed, 2=Dotted, 3=Dash-dotted,
# 4=Dash-double-dotted, 5=Dash-triple-dotted
linestyle = int(tmp_line[2])
thickness = int(tmp_line[3])
pencolor = int(tmp_line[4])
fillcolor = int(tmp_line[5])
depth = int(tmp_line[6]) # 0...999
penstyle = int(tmp_line[7]) # NOT USED
areafill = int(tmp_line[8]) # -1=not filled
styleval = float(tmp_line[9])
# 0=Miter, 1=Round, 2=Bevel
joinstyle = int(tmp_line[10])
# ONLY FOR POLYLINE
# 0=Butt, 1=Round, 2=Projecting
capstyle = int(tmp_line[11])
radius = int(tmp_line[12])
# 0=off, 1=on
forwardarrow = int(tmp_line[13])
backwardarrow = int(tmp_line[14])
npoints = int(tmp_line[15])
for line in input_file:
pass
return objects
if __name__ == '__main__':
argparser = ArgumentParser(description='Convert a FIG file')
argparser.add_argument('input', help='Input FIG file to convert')
argparser.add_argument('output', help='Output file')
argparser.add_argument('format', help='Output format')
args = argparser.parse_args()
header = None
pseudocolors = {}
objects = []
with open(args.input, 'r') as input_file:
header = parse_image_header(input_file)
for line in input_file:
line = line.strip()
if line.startswith('0'):
tmp_line = line.split(' ')
pseudocolors[int(tmp_line[1])] = tmp_line[2]
else:
objects = parse_objects(line, input_file)
pp = PrettyPrinter(indent=2)
pp.pprint(header)
pp.pprint(pseudocolors)
pp.pprint(objects)
| 33.131148 | 81 | 0.542306 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,165 | 0.288224 |
3553237caf7967dd42d8b8a89fa00e8bcce96bdc | 834 | py | Python | k8s/python/repo.py | logevents/demo-jenkins | 0393637d71c9ced8dedfdaab1b8fc9d6abad40f0 | [
"Apache-2.0"
] | 2 | 2020-11-24T14:21:56.000Z | 2020-11-26T15:23:33.000Z | k8s/python/repo.py | logevents/demo-jenkins | 0393637d71c9ced8dedfdaab1b8fc9d6abad40f0 | [
"Apache-2.0"
] | null | null | null | k8s/python/repo.py | logevents/demo-jenkins | 0393637d71c9ced8dedfdaab1b8fc9d6abad40f0 | [
"Apache-2.0"
] | null | null | null | from http.server import HTTPServer, SimpleHTTPRequestHandler
class RepoRequestHandler(SimpleHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header("Content-type", "text/plain")
self.end_headers()
def _encode(self, text):
return text.encode('utf8')
def do_POST(self):
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
file_name = self.path[1:]
with open(file_name, "w+b") as f:
f.write(body)
f.close()
self._set_headers()
self.wfile.write(self._encode(f'{file_name} stored'))
host = 'localhost'
port = 8080
print(f'simple artifact repo running on {host}:{port}')
httpd = HTTPServer((host, port), RepoRequestHandler)
httpd.serve_forever()
| 26.903226 | 61 | 0.657074 | 604 | 0.724221 | 0 | 0 | 0 | 0 | 0 | 0 | 133 | 0.159472 |
3553f25af58c7c7fe60ba270a87ee11946f7ca62 | 719 | py | Python | ExerciciosPYTHON/NovPython/012.py | Samuel-Melo890/Python-Desafios | 2abc7734d6a6c1f5ab67421f792d6889d93bac94 | [
"MIT"
] | null | null | null | ExerciciosPYTHON/NovPython/012.py | Samuel-Melo890/Python-Desafios | 2abc7734d6a6c1f5ab67421f792d6889d93bac94 | [
"MIT"
] | 2 | 2022-03-18T16:06:07.000Z | 2022-03-18T16:55:29.000Z | ExerciciosPYTHON/NovPython/012.py | Samuel-Melo890/Python-Desafios | 2abc7734d6a6c1f5ab67421f792d6889d93bac94 | [
"MIT"
] | null | null | null | print('='*8,'Inscritos','='*8)
from module.interface import *
from time import sleep
menu('Lista de Inscritos')
with open('Inscritos.txt') as arq:
for o, n in enumerate(arq):
print(f'\033[35m{o + 1}\033[m \033[36m{n.title()}\033[m')
sleep(0.4)
print('-' * 40)
while True:
r = ' '
while r not in 'SN':
r = str(input('Deseja adicionar uma pessoa? [S/N] ')).strip().upper()[0]
if r in 'N':
break
with open('Inscritos.txt', 'a') as arq:
nome = str(input('Qual o nome da pessoa? ')).strip().title()
arq.write(f'\n{nome}')
sleep(1.5)
print('\033[32mPessoa adicionada com SUCESSO!\033[m')
sleep(0.5)
print('\033[36mPrograma Finalizado!\033[m')
| 28.76 | 80 | 0.582754 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 288 | 0.400556 |
355571e66551b980f466860ffabf578c8cf61baa | 1,771 | py | Python | pulse2percept/datasets/tests/test_nanduri2012.py | narenberg/pulse2percept | ca3aaf66672ccf3c9ee6a9a9d924184cdc6f031d | [
"BSD-3-Clause"
] | 40 | 2019-11-01T14:09:34.000Z | 2022-02-28T19:08:01.000Z | pulse2percept/datasets/tests/test_nanduri2012.py | narenberg/pulse2percept | ca3aaf66672ccf3c9ee6a9a9d924184cdc6f031d | [
"BSD-3-Clause"
] | 277 | 2019-11-22T03:30:31.000Z | 2022-03-28T00:11:03.000Z | pulse2percept/datasets/tests/test_nanduri2012.py | narenberg/pulse2percept | ca3aaf66672ccf3c9ee6a9a9d924184cdc6f031d | [
"BSD-3-Clause"
] | 31 | 2020-01-22T06:36:36.000Z | 2022-01-20T09:54:25.000Z | import pandas as pd
import numpy.testing as npt
from pulse2percept.datasets import load_nanduri2012
def test_load_nanduri2012():
data = load_nanduri2012(shuffle=False)
npt.assert_equal(isinstance(data, pd.DataFrame), True)
columns = ['subject', 'implant', 'electrode', 'task', 'stim_class',
'freq', 'amp_factor', 'ref_stim_class', 'ref_amp_factor',
'ref_freq', 'brightness', 'size', 'pulse_dur',
'interphase_dur', 'pulse_type', 'varied_param']
for expected_col in columns:
npt.assert_equal(expected_col in data.columns, True)
npt.assert_equal(data.shape, (128, 17))
npt.assert_equal(data.subject.unique(), ['S06'])
# Shuffle dataset (index will always be range(552), but rows are shuffled):
data = load_nanduri2012(shuffle=True, random_state=42)
npt.assert_equal(data.loc[0, 'subject'], 'S06')
npt.assert_equal(data.loc[0, 'electrode'], 'B1')
npt.assert_equal(data.loc[94, 'subject'], 'S06')
npt.assert_equal(data.loc[94, 'electrode'], 'A4')
# Select electrodes:
data = load_nanduri2012(electrodes='A2')
npt.assert_equal(data.shape, (16, 17))
npt.assert_equal(data.electrode.unique(), 'A2')
npt.assert_equal(data.subject.unique(), 'S06')
data = load_nanduri2012(electrodes=['A1', 'A9']) # 'A9' doesn't exist
npt.assert_equal(data.shape, (0, 17))
npt.assert_equal(data.electrode.unique(), 'A1')
npt.assert_equal(data.subject.unique(), 'S06')
# Select task
data = load_nanduri2012(task='rate')
npt.assert_equal(data.shape, (88, 17))
npt.assert_equal(data.task.unique(), 'rate')
data = load_nanduri2012(task='size')
npt.assert_equal(data.shape, (40, 17))
npt.assert_equal(data.task.unique(), 'size')
| 39.355556 | 79 | 0.671372 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 423 | 0.238848 |
355886956231c917bdbdf9d2101a62ff5e3cd95d | 293 | py | Python | arcsecond/api/endpoints/satellites.py | onekiloparsec/arcsecond.python | e4b22bf055c7f089ca9f0d6c4bda6314350878e0 | [
"MIT"
] | 7 | 2018-08-29T15:31:25.000Z | 2022-01-08T14:08:39.000Z | arcsecond/api/endpoints/satellites.py | onekiloparsec/arcsecond-python | e4b22bf055c7f089ca9f0d6c4bda6314350878e0 | [
"MIT"
] | 2 | 2018-10-21T07:42:26.000Z | 2020-02-24T10:11:22.000Z | arcsecond/api/endpoints/satellites.py | onekiloparsec/arcsecond-python | e4b22bf055c7f089ca9f0d6c4bda6314350878e0 | [
"MIT"
] | null | null | null | from ._base import APIEndPoint
class SatellitesAPIEndPoint(APIEndPoint):
name = 'satellites'
def _list_url(self, **filters):
return self._build_url('satellites', **filters)
def _detail_url(self, norad_number):
return self._build_url('satellites', norad_number)
| 24.416667 | 58 | 0.713311 | 259 | 0.883959 | 0 | 0 | 0 | 0 | 0 | 0 | 36 | 0.122867 |
35589980547cf3bbd203b94d5ac8dbe125b385c2 | 1,096 | py | Python | dask/TestNB2.py | mlkimmins/scalingpythonml | 517c6d3e14ce4eb331ab0fd3b0368e0bf10d9986 | [
"Apache-2.0"
] | 13 | 2020-02-09T16:03:10.000Z | 2022-03-19T14:08:16.000Z | dask/TestNB2.py | mlkimmins/scalingpythonml | 517c6d3e14ce4eb331ab0fd3b0368e0bf10d9986 | [
"Apache-2.0"
] | 3 | 2020-10-31T16:20:05.000Z | 2020-11-04T01:17:02.000Z | dask/TestNB2.py | mlkimmins/scalingpythonml | 517c6d3e14ce4eb331ab0fd3b0368e0bf10d9986 | [
"Apache-2.0"
] | 4 | 2020-12-21T22:23:16.000Z | 2022-03-29T20:25:28.000Z | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import dask
from dask_kubernetes import KubeCluster
import numpy as np
# In[ ]:
#tag::remote_lb_deploy[]
# In[2]:
# Specify a remote deployment using a load blanacer, necessary for communication with notebook from cluster
dask.config.set({"kubernetes.scheduler-service-type": "LoadBalancer"})
# In[4]:
cluster = KubeCluster.from_yaml('worker-spec.yaml', namespace='dask', deploy_mode='remote')
# In[ ]:
#end::remote_lb_deploy[]
# In[5]:
cluster.adapt(minimum=1, maximum=100)
# In[6]:
# Example usage
from dask.distributed import Client
import dask.array as da
# Connect Dask to the cluster
client = Client(cluster)
# In[7]:
client.scheduler_comm.comm.handshake_info()
# In[8]:
# Create a large array and calculate the mean
array = da.ones((1000, 1000, 1000))
print(array.mean().compute()) # Should print 1.0|
# In[9]:
print(array.mean().compute())
# In[10]:
print(array.sum().compute())
# In[13]:
dir(array)
# In[18]:
np.take(array, indices=[0, 10]).sum().compute()
# In[15]:
# In[ ]:
| 10.640777 | 107 | 0.666971 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 504 | 0.459854 |
3559247cc27efd7aa5a74724da6869a1c6747c97 | 1,329 | py | Python | api_v1/tests/test_models.py | andela-akiura/yonder | 1e7c2e113b9188b69459b2443e548d83baeb24e2 | [
"MIT"
] | 1 | 2017-09-04T11:45:32.000Z | 2017-09-04T11:45:32.000Z | api_v1/tests/test_models.py | andela-akiura/pixlr | 1e7c2e113b9188b69459b2443e548d83baeb24e2 | [
"MIT"
] | 4 | 2021-06-08T19:30:05.000Z | 2022-03-11T23:17:41.000Z | api_v1/tests/test_models.py | andela-akiura/khali | 1e7c2e113b9188b69459b2443e548d83baeb24e2 | [
"MIT"
] | null | null | null | from django.test import TestCase
from factories import ImageFactory, ThumbnailImageFactory, ThumbnailFilterFactory
from faker import Faker
from django.contrib.auth.models import User
fake = Faker()
class UserModelTest(TestCase):
pass
class ImageModelTest(TestCase):
def setUp(self):
self.image = ImageFactory()
def test_image_name(self):
fake.seed(1738)
self.assertEqual(self.image.image_name, fake.word())
def test_filter_name_is_none(self):
fake.seed(1738)
self.assertEqual(self.image.filter_name, 'NONE')
def test_created_by(self):
self.assertEqual(self.image.created_by,
User.objects.get(username='fake'))
class ThumbImageModelTest(TestCase):
def setUp(self):
self.thumb = ThumbnailImageFactory()
def test_thumbnail_name(self):
self.assertEqual(
self.thumb.thumbnail.name, 'images/thumbnails/example.jpg')
class ThumbFilterTest(TestCase):
def setUp(self):
self.thumb_filter = ThumbnailFilterFactory()
def test_thumbnail_name(self):
self.assertEqual(
self.thumb_filter.filtered_thumbnail.name,
'images/thumbnails/example.jpg')
def test_filter_name(self):
self.assertEqual(
self.thumb_filter.filter_name, 'BLUR')
| 27.122449 | 81 | 0.686983 | 1,120 | 0.842739 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.060196 |
355960a148237ed0b98deb243186406c35a92e89 | 1,189 | py | Python | turbustat/tests/test_rfft_to_fft.py | CFD-UTSA/Turbulence-stars | 354d02e38d15e3b0d1f751b43f430dbd3a14c250 | [
"MIT"
] | 42 | 2016-04-07T20:49:59.000Z | 2022-03-28T12:54:13.000Z | turbustat/tests/test_rfft_to_fft.py | CFD-UTSA/Turbulence-stars | 354d02e38d15e3b0d1f751b43f430dbd3a14c250 | [
"MIT"
] | 131 | 2015-03-05T21:42:27.000Z | 2021-07-22T14:59:04.000Z | turbustat/tests/test_rfft_to_fft.py | CFD-UTSA/Turbulence-stars | 354d02e38d15e3b0d1f751b43f430dbd3a14c250 | [
"MIT"
] | 21 | 2015-06-10T17:10:06.000Z | 2022-02-28T15:59:42.000Z | # Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
import pytest
from ..statistics.rfft_to_fft import rfft_to_fft
from ._testing_data import dataset1
import numpy as np
import numpy.testing as npt
try:
import pyfftw
PYFFTW_INSTALLED = True
except ImportError:
PYFFTW_INSTALLED = False
def test_rfft_to_rfft():
comp_rfft = rfft_to_fft(dataset1['moment0'][0])
test_rfft = np.abs(np.fft.rfftn(dataset1['moment0'][0]))
shape2 = test_rfft.shape[-1]
npt.assert_allclose(test_rfft, comp_rfft[:, :shape2])
def test_fft_to_rfft():
comp_rfft = rfft_to_fft(dataset1['moment0'][0])
test_fft = np.abs(np.fft.fftn(dataset1['moment0'][0]))
npt.assert_allclose(test_fft, comp_rfft)
@pytest.mark.skipif("not PYFFTW_INSTALLED")
def test_fftw():
comp_rfft = rfft_to_fft(dataset1['moment0'][0])
comp_rfft_fftw = rfft_to_fft(dataset1['moment0'][0], use_pyfftw=True,
threads=1)
test_fft = np.abs(np.fft.fftn(dataset1['moment0'][0]))
npt.assert_allclose(test_fft, comp_rfft_fftw)
npt.assert_allclose(comp_rfft, comp_rfft_fftw)
| 23.78 | 73 | 0.714886 | 0 | 0 | 0 | 0 | 393 | 0.33053 | 0 | 0 | 142 | 0.119428 |
3559772c07898e5d9fdc896cb93ca4b4066c9dc9 | 2,344 | py | Python | Penrose2.py | whitegreen/quasicrystal | 4be39dd8bc769fc2e210502270ab4ab12a2247d3 | [
"MIT"
] | 1 | 2021-09-12T11:36:12.000Z | 2021-09-12T11:36:12.000Z | Penrose2.py | whitegreen/quasicrystal | 4be39dd8bc769fc2e210502270ab4ab12a2247d3 | [
"MIT"
] | null | null | null | Penrose2.py | whitegreen/quasicrystal | 4be39dd8bc769fc2e210502270ab4ab12a2247d3 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
# projection method: Chapter 3, Grimm & Schreiber, 2002
basis = []
for i in range(5):
a = np.pi * 0.4 * i
basis.append([np.cos(a), np.sin(a), np.cos(2 * a), np.sin(2 * a), np.sqrt(0.5)])
basis = np.transpose(basis)
def lattice(en=2): # 5D lattice
s = (en - 1) / 2
lps = []
for i in range(en):
for j in range(en):
for m in range(en):
for n in range(en):
for l in range(en):
lps.append([i - s, j - s, m - s, n - s, l-s])
return np.array(lps)
def polytope(): # of 20 faces
c0 = [27, 30, 23, 29, 15]
c1 = [1, 4, 16, 2, 8]
c2 = [26, 18, 22, 20, 21, 5, 13, 9, 11, 10]
a = np.concatenate([np.full(5, 31), np.full(5, 0), c2])
b = np.concatenate([c0, c1, np.roll(c2, 1)])
c = np.concatenate([np.roll(c0,1), np.roll(c1,1), np.roll(c2, 2)])
v= np.vstack((a,b,c))
return v.transpose()
def normalize(v):
norm = np.linalg.norm(v)
return v / norm
def window(allpoints):
m_orth = basis[2:]
pps= lattice().dot(m_orth.transpose())
fs = polytope()
face_ps = []
face_ns = [] # each (face_ps[i], face_ns[i]) pair describes a face of the 3D polytope
for f in fs:
ps = pps[f] # fansy index
face_ps.append(ps[1])
nor = normalize(np.cross(ps[0] - ps[1], ps[2] - ps[1]))
if np.dot(ps[1], nor) > 0:
nor = nor* -1
face_ns.append(nor)
inside = []
for ps in allpoints:
pp = np.dot(m_orth, ps)
flag = True
for j in range(len(fs)):
if np.dot(pp - face_ps[j], face_ns[j]) < 0:
flag = False
break
inside.append(flag)
return inside
ps = lattice(3) #3 is very fast, 5 takes 10 seconds, lattice points (5D)
nodesize = ps.shape[0]
edges = []
for i in range(nodesize):
for j in range(i + 1, nodesize):
dis = np.linalg.norm(ps[i] - ps[j])
if 0.99 < dis < 1.01:
edges.append([i, j])
inside = window(ps)
pro = basis[:2]
for ida, idb in edges: # project all inside points & windowing the 5D lattice
if inside[ida] and inside[idb]:
a = pro.dot(ps[ida])
b = pro.dot(ps[idb])
plt.plot([a[0], b[0]], [a[1], b[1]], color='C0')
plt.axis('scaled')
plt.show()
| 29.3 | 92 | 0.526877 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 287 | 0.12244 |
355a7ea85ce6fa2501f960ef2b4246c7c49191cc | 872 | py | Python | ProgrammingChallenges#Book/Chapter 1/03.TheTrip/solutions/TheTrip.py | xergioalex/programmingContests | 37e8621e6849d53b20e97cf60353a39f63ded67a | [
"MIT"
] | null | null | null | ProgrammingChallenges#Book/Chapter 1/03.TheTrip/solutions/TheTrip.py | xergioalex/programmingContests | 37e8621e6849d53b20e97cf60353a39f63ded67a | [
"MIT"
] | null | null | null | ProgrammingChallenges#Book/Chapter 1/03.TheTrip/solutions/TheTrip.py | xergioalex/programmingContests | 37e8621e6849d53b20e97cf60353a39f63ded67a | [
"MIT"
] | null | null | null | from sys import stdin
# Main program
def main():
expenses = [0]*1000
for line in stdin:
n = int(line)
if (n == 0):
break;
total, toExchangePos, toExchangeNeg = (0,)*3
for i in range(n):
line = stdin.readline()
expenses[i] = float(line)
total += expenses[i]
# Get average
average = total / n
for i in range(n):
dif = expenses[i] - average
# Set two digits accuracy
dif = float((int(dif * 100.0)) / 100.0)
if (dif > 0):
toExchangePos += dif
else:
toExchangeNeg += dif
if (-toExchangeNeg > toExchangePos):
minToExchange = -toExchangeNeg
else:
minToExchange = toExchangePos
print("$%.2f" % minToExchange)
main()
| 20.761905 | 52 | 0.481651 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 59 | 0.067661 |
355a7eabdfa94479feee25369d31df7eb197c6b2 | 548 | py | Python | webserver/contest/context_processors.py | theSage21/judge-interface | e1d631f2623c032ee89576e745ce4acb371bc816 | [
"MIT"
] | 3 | 2015-11-07T15:17:42.000Z | 2021-03-22T23:07:00.000Z | webserver/contest/context_processors.py | theSage21/judge-interface | e1d631f2623c032ee89576e745ce4acb371bc816 | [
"MIT"
] | 2 | 2016-12-02T20:27:26.000Z | 2018-09-18T07:07:39.000Z | webserver/contest/context_processors.py | theSage21/judge-interface | e1d631f2623c032ee89576e745ce4acb371bc816 | [
"MIT"
] | 3 | 2015-09-20T09:38:50.000Z | 2018-04-12T14:18:13.000Z | from contest import models
from django.utils import timezone
from contest.functions import is_contest_on, contest_phase
def contest_time(request):
context = {}
now = timezone.now()
contest = models.ContestControl.objects.first()
if now < contest.start:
time = contest.start
elif contest.start <= now <= contest.end:
time = contest.end
else:
time = None
context['contest_time'] = time
context['contest_on'] = is_contest_on()
context['contest_phase'] = contest_phase()
return context
| 27.4 | 58 | 0.680657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 41 | 0.074818 |
355ac1914c73eb19b95d9073487e7c446179f0d7 | 17,852 | py | Python | python/calico/felix/frules.py | a0x8o/felix | fb431cc4a5482f1013bcbef89954d93551c8fec6 | [
"Apache-2.0"
] | 6 | 2016-10-18T04:04:25.000Z | 2016-10-18T04:06:49.000Z | python/calico/felix/frules.py | axbaretto/felix | fb431cc4a5482f1013bcbef89954d93551c8fec6 | [
"Apache-2.0"
] | 1 | 2021-06-01T21:45:37.000Z | 2021-06-01T21:45:37.000Z | python/calico/felix/frules.py | axbaretto/felix | fb431cc4a5482f1013bcbef89954d93551c8fec6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 Tigera, Inc. All rights reserved.
# Copyright (c) 2015 Cisco Systems. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
felix.frules
~~~~~~~~~~~~
Functions for generating iptables rules. This covers our top-level
chains as well as low-level conversion from our datamodel rules to
iptables format.
iptables background
~~~~~~~~~~~~~~~~~~~
iptables configuration is split into multiple tables, which each support
different types of rules. Each table contains multiple chains, which
are sequences of rules. At certain points in packet processing the
packet is handed to one of the always-present kernel chains in a
particular table. The kernel chains have default behaviours but they
can be modified to add or remove rules, including inserting a jump to
another chain.
Felix is mainly concerned with the "filter" table, which is used for
imposing policy rules. There are multiple kernel chains in the filter
table. After the routing decision has been made, packets enter
* the INPUT chain if they are destined for the host itself
* the OUTPUT chain if they are being sent by the host itself
* the FORWARD chain if they are to be forwarded between two interfaces.
Note: packets that are being forwarded do not traverse the INPUT or
OUTPUT chains at all. INPUT and OUTPUT are only used for packets that
the host itself is to receive/send.
Packet paths
~~~~~~~~~~~~
There are a number of possible paths through the filter chains that we
care about:
* Packets from a local workload to another local workload traverse the
FORWARD chain only. Felix must ensure that those packets have *both*
the outbound policy of the sending workload and the inbound policy
of the receiving workload applied.
* Packets from a local workload to a remote address traverse the FORWARD
chain only. Felix must ensure that those packets have the outbound
policy of the local workload applied.
* Packets from a remote address to a local workload traverse the FORWARD
chain only. Felix must apply the inbound policy of the local workload.
* Packets from a local workload to the host itself traverse the INPUT
chain. Felix must apply the outbound policy of the workload.
Chain structure
~~~~~~~~~~~~~~~
Rather than adding many rules to the kernel chains, which are a shared
resource (and hence difficult to unpick), Felix creates its own delegate
chain for each kernel chain and inserts a single jump rule into the
kernel chain:
* INPUT -> felix-INPUT
* FORWARD -> felix-FORWARD
The top-level felix-XXX chains are static and configured at start-of-day.
The felix-FORWARD chain sends packet that arrive from a local workload to
the felix-FROM-ENDPOINT chain, which applies inbound policy. Packets that
are denied by policy are dropped immediately. However, accepted packets
are returned to the felix-FORWARD chain in case they need to be processed
further. felix-FORWARD then directs packets that are going to local
endpoints to the felix-TO-ENDPOINT chain, which applies inbound policy.
Similarly, felix-TO-ENDPOINT either drops or returns the packet. Finally,
if both the FROM-ENDPOINT and TO-ENDPOINT chains allow the packet,
felix-FORWARD accepts the packet and allows it through.
The felix-INPUT sends packets from local workloads to the (shared)
felix-FROM-ENDPOINT chain, which applies outbound policy. Then it
(optionally) accepts packets that are returned.
Since workloads come and go, the TO/FROM-ENDPOINT chains are dynamic and
consist of dispatch tables based on device name. Those chains are managed
by dispatch.py.
The dispatch chains direct packets to per-endpoint ("felix-to/from")
chains, which are responsible for policing IP addresses. Those chains are
managed by endpoint.py. Since the actual policy rules can be shared by
multiple endpoints, we put each set of policy rules in its own chain and
the per-endpoint chains send packets to the relevant policy
(felix-p-xxx-i/o) chains in turn. Policy profile chains are managed by
profilerules.py.
Since an endpoint may be in multiple profiles and we execute the policy
chains of those profiles in sequence, the policy chains need to
communicate three different "return values"; for this we use the packet
Accept MARK (a configured bit in the MARK space):
* Packet was matched by a deny rule. In this case the packet is immediately
dropped.
* Packet was matched by an allow rule. In this case the packet is returned
with Accept MARK==1. The calling chain can then return the packet to its
caller for further processing.
* Packet was not matched at all. In this case, the packet is returned with
Accept MARK==0. The calling chain can then send the packet through the next
profile chain.
"""
import logging
import time
import netaddr
from calico.felix import devices
from calico.felix import futils
from calico.felix.futils import FailedSystemCall
from calico.felix.ipsets import HOSTS_IPSET_V4
_log = logging.getLogger(__name__)
FELIX_PREFIX = "felix-"
# Maximum number of port entries in a "multiport" match rule. Ranges count for
# 2 entries.
MAX_MULTIPORT_ENTRIES = 15
# Name of the global, stateless IP-in-IP device name.
IP_IN_IP_DEV_NAME = "tunl0"
# Rule to catch packets that are being sent down the IPIP tunnel from an
# incorrect local IP address of the host. This happens if:
#
# - the user explicitly binds their socket to the wrong source IP accidentally
# - the user sends traffic to, for example, a Kubernetes service IP, which is
# implemented via NAT instead of routing, leading the kernel to choose the
# wrong source IP.
#
# We NAT the source of the packet to use the tunnel IP. We assume that
# non-local IPs have been correctly routed. Since Calico-assigned IPs are
# non-local (because they're down a veth), they won't get caught by the rule.
# Other remote sources will only reach the tunnel if they're being NATted
# already (for example, a Kubernetes "NodePort"). The kernel will then
# choose the correct source on its own.
POSTROUTING_LOCAL_NAT_FRAGMENT = (
"POSTROUTING "
# Only match if the packet is going out via the tunnel.
"--out-interface %s "
# Match packets that don't have the correct source address. This matches
# local addresses (i.e. ones assigned to this host) limiting the match to
# the output interface (which we matched above as the tunnel). Avoiding
# embedding the IP address lets us use a static rule, which is easier to
# manage.
"-m addrtype ! --src-type LOCAL --limit-iface-out "
# Only match if the IP is also some local IP on the box. This prevents
# us from matching packets from workloads, which are remote as far as the
# routing table is concerned.
"-m addrtype --src-type LOCAL "
# NAT them to use the source IP of the tunnel. Using MASQUERADE means
# the kernel chooses the source automatically.
"-j MASQUERADE" % IP_IN_IP_DEV_NAME
)
# Chain names
# Dispatch chains to and from workload endpoints.
CHAIN_TO_ENDPOINT = FELIX_PREFIX + "TO-ENDPOINT"
CHAIN_FROM_ENDPOINT = FELIX_PREFIX + "FROM-ENDPOINT"
CHAIN_TO_LEAF = FELIX_PREFIX + "TO-EP-PFX"
CHAIN_FROM_LEAF = FELIX_PREFIX + "FROM-EP-PFX"
WORKLOAD_DISPATCH_CHAINS = {
"to_root": CHAIN_TO_ENDPOINT,
"from_root": CHAIN_FROM_ENDPOINT,
"to_leaf": CHAIN_TO_LEAF,
"from_leaf": CHAIN_FROM_LEAF,
}
# Ditto for host endpoints.
CHAIN_TO_IFACE = FELIX_PREFIX + "TO-HOST-IF"
CHAIN_FROM_IFACE = FELIX_PREFIX + "FROM-HOST-IF"
CHAIN_TO_IFACE_LEAF = FELIX_PREFIX + "TO-IF-PFX"
CHAIN_FROM_IFACE_LEAF = FELIX_PREFIX + "FROM-IF-PFX"
HOST_DISPATCH_CHAINS = {
"to_root": CHAIN_TO_IFACE,
"from_root": CHAIN_FROM_IFACE,
"to_leaf": CHAIN_TO_IFACE_LEAF,
"from_leaf": CHAIN_FROM_IFACE_LEAF,
}
# Failsafe whitelist chains.
CHAIN_FAILSAFE_IN = FELIX_PREFIX + "FAILSAFE-IN"
CHAIN_FAILSAFE_OUT = FELIX_PREFIX + "FAILSAFE-OUT"
# Per-endpoint/interface chain prefixes.
CHAIN_TO_PREFIX = FELIX_PREFIX + "to-"
CHAIN_FROM_PREFIX = FELIX_PREFIX + "from-"
# Top-level felix chains.
CHAIN_PREROUTING = FELIX_PREFIX + "PREROUTING"
CHAIN_POSTROUTING = FELIX_PREFIX + "POSTROUTING"
CHAIN_INPUT = FELIX_PREFIX + "INPUT"
CHAIN_OUTPUT = FELIX_PREFIX + "OUTPUT"
CHAIN_FORWARD = FELIX_PREFIX + "FORWARD"
CHAIN_FIP_DNAT = FELIX_PREFIX + 'FIP-DNAT'
CHAIN_FIP_SNAT = FELIX_PREFIX + 'FIP-SNAT'
def load_nf_conntrack():
"""
Try to force the nf_conntrack_netlink kernel module to be loaded.
"""
_log.info("Running conntrack command to force load of "
"nf_conntrack_netlink module.")
try:
# Run a conntrack command to trigger it to load the kernel module if
# it's not already compiled in. We list rules with a randomly-chosen
# link local address. That makes it very unlikely that we generate
# any wasteful output. We used to use "-S" (show stats) here but it
# seems to be bugged on some platforms, generating an error.
futils.check_call(["conntrack", "-L", "-s", "169.254.45.169"])
except FailedSystemCall:
_log.exception("Failed to execute conntrack command to force load of "
"nf_conntrack_netlink module. conntrack commands may "
"fail later.")
def install_global_rules(config, filter_updater, nat_updater, ip_version,
raw_updater=None):
"""
Set up global iptables rules. These are rules that do not change with
endpoint, and are expected never to change (such as the rules that send all
traffic through the top level Felix chains).
This method therefore :
- ensures that all the required global tables are present;
- applies any changes required.
"""
# If enabled, create the IP-in-IP device, but only for IPv4
if ip_version == 4:
if config.IP_IN_IP_ENABLED:
_log.info("IP-in-IP enabled, ensuring device exists.")
try:
_configure_ipip_device(config)
except FailedSystemCall:
# We've seen this fail occasionally if the kernel is
# concurrently starting the tunl0 device. Retry.
_log.exception("Failed to configure IPIP device, retrying...")
time.sleep(1)
_configure_ipip_device(config)
if config.IP_IN_IP_ENABLED and config.IP_IN_IP_ADDR:
# Add a rule to catch packets originated by this host that are
# going down the tunnel with the wrong source address. NAT them
# to use the address of the tunnel device instead. See comment
# on the constant for more details.
_log.info("IPIP enabled and tunnel address set: inserting "
"MASQUERADE rule to ensure tunnelled packets have "
"correct source.")
nat_updater.ensure_rule_inserted(POSTROUTING_LOCAL_NAT_FRAGMENT,
async=False)
else:
# Clean up the rule that we insert above if IPIP is enabled.
_log.info("IPIP disabled or no tunnel address set: removing "
"MASQUERADE rule.")
nat_updater.ensure_rule_removed(POSTROUTING_LOCAL_NAT_FRAGMENT,
async=False)
# Ensure that Calico-controlled IPv6 hosts cannot spoof their IP addresses.
# (For IPv4, this is controlled by a per-interface sysctl.)
iptables_generator = config.plugins["iptables_generator"]
if raw_updater:
raw_prerouting_chain, raw_prerouting_deps = (
iptables_generator.raw_rpfilter_failed_chain(ip_version=ip_version)
)
raw_updater.rewrite_chains({CHAIN_PREROUTING: raw_prerouting_chain},
{CHAIN_PREROUTING: raw_prerouting_deps},
async=False)
for iface_prefix in config.IFACE_PREFIX:
# The interface matching string; for example,
# if interfaces start "tap" then this string is "tap+".
iface_match = iface_prefix + '+'
raw_updater.ensure_rule_inserted(
"PREROUTING --in-interface %s --match rpfilter --invert "
"--jump %s" %
(iface_match, CHAIN_PREROUTING),
async=False)
# Both IPV4 and IPV6 nat tables need felix-PREROUTING,
# felix-POSTROUTING and felix-OUTPUT, along with the dependent
# DNAT and SNAT tables required for NAT/floating IP support.
prerouting_chain, prerouting_deps = (
iptables_generator.nat_prerouting_chain(ip_version=ip_version)
)
postrouting_chain, postrouting_deps = (
iptables_generator.nat_postrouting_chain(ip_version=ip_version)
)
output_chain, output_deps = (
iptables_generator.nat_output_chain(ip_version=ip_version)
)
nat_updater.rewrite_chains({CHAIN_PREROUTING: prerouting_chain,
CHAIN_POSTROUTING: postrouting_chain,
CHAIN_OUTPUT: output_chain,
CHAIN_FIP_DNAT: [],
CHAIN_FIP_SNAT: []},
{CHAIN_PREROUTING: prerouting_deps,
CHAIN_POSTROUTING: postrouting_deps,
CHAIN_OUTPUT: output_deps},
async=False)
nat_updater.ensure_rule_inserted(
"PREROUTING --jump %s" % CHAIN_PREROUTING, async=False)
nat_updater.ensure_rule_inserted(
"POSTROUTING --jump %s" % CHAIN_POSTROUTING, async=False)
nat_updater.ensure_rule_inserted(
"OUTPUT --jump %s" % CHAIN_OUTPUT, async=False)
# Now the filter table. This needs to have felix-FORWARD and felix-INPUT
# chains, which we must create before adding any rules that send to them.
if ip_version == 4 and config.IP_IN_IP_ENABLED:
hosts_set_name = HOSTS_IPSET_V4.set_name
HOSTS_IPSET_V4.ensure_exists()
else:
hosts_set_name = None
input_chain, input_deps = (
iptables_generator.filter_input_chain(ip_version, hosts_set_name)
)
output_chain, output_deps = (
iptables_generator.filter_output_chain(ip_version)
)
forward_chain, forward_deps = (
iptables_generator.filter_forward_chain(ip_version)
)
failsafe_in_chain, failsafe_in_deps = (
iptables_generator.failsafe_in_chain()
)
failsafe_out_chain, failsafe_out_deps = (
iptables_generator.failsafe_out_chain()
)
filter_updater.rewrite_chains(
{
CHAIN_FORWARD: forward_chain,
CHAIN_INPUT: input_chain,
CHAIN_OUTPUT: output_chain,
CHAIN_FAILSAFE_IN: failsafe_in_chain,
CHAIN_FAILSAFE_OUT: failsafe_out_chain,
},
{
CHAIN_FORWARD: forward_deps,
CHAIN_INPUT: input_deps,
CHAIN_OUTPUT: output_deps,
CHAIN_FAILSAFE_IN: failsafe_in_deps,
CHAIN_FAILSAFE_OUT: failsafe_out_deps,
},
async=False)
filter_updater.ensure_rule_inserted(
"INPUT --jump %s" % CHAIN_INPUT,
async=False)
filter_updater.ensure_rule_inserted(
"OUTPUT --jump %s" % CHAIN_OUTPUT,
async=False)
filter_updater.ensure_rule_inserted(
"FORWARD --jump %s" % CHAIN_FORWARD,
async=False)
def _configure_ipip_device(config):
"""Creates and enables the IPIP tunnel device.
:raises FailedSystemCall on failure.
"""
if not devices.interface_exists(IP_IN_IP_DEV_NAME):
# Make sure the IP-in-IP device exists; since we use the global
# device, this command actually creates it as a side-effect of
# initialising the kernel module rather than explicitly creating
# it.
_log.info("Tunnel device didn't exist; creating.")
futils.check_call(["ip", "tunnel", "add", IP_IN_IP_DEV_NAME,
"mode", "ipip"])
futils.check_call(["ip", "link", "set", IP_IN_IP_DEV_NAME, "mtu",
str(config.IP_IN_IP_MTU)])
if not devices.interface_up(IP_IN_IP_DEV_NAME):
_log.info("Tunnel device wasn't up; enabling.")
futils.check_call(["ip", "link", "set", IP_IN_IP_DEV_NAME, "up"])
# Allow an IP address to be added to the tunnel. This is useful to
# allow the host to have an IP on a private IPIP network so that it can
# originate traffic and have it routed correctly.
_log.info("Setting IPIP device IP to %s", config.IP_IN_IP_ADDR)
tunnel_addrs = [netaddr.IPAddress(config.IP_IN_IP_ADDR)] if config.IP_IN_IP_ADDR else []
devices.set_interface_ips(futils.IPV4, IP_IN_IP_DEV_NAME,
set(tunnel_addrs))
_log.info("Configured IPIP device.")
def interface_to_chain_suffix(config, iface_name):
"""
Extracts the suffix from a given interface name, uniquely shortening it
to 16 characters if necessary.
:param iface_name: The interface name
:returns string: the suffix (shortened if necessary)
"""
for prefix in sorted(config.IFACE_PREFIX, reverse=True):
if iface_name.startswith(prefix):
iface_name = iface_name[len(prefix):]
break
iface_name = futils.uniquely_shorten(iface_name, 16)
return iface_name
| 41.906103 | 92 | 0.698969 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10,810 | 0.605534 |
355af8d5ae4552973efc6c0ce81832474cc5e594 | 2,716 | py | Python | browse.py | Thorsten-Sick/tags_for_media_ccc_de | ad1a117ea1dfc2b508d287854ba9b2f5c5a438ca | [
"MIT"
] | 1 | 2018-01-11T15:46:56.000Z | 2018-01-11T15:46:56.000Z | browse.py | Thorsten-Sick/tags_for_media_ccc_de | ad1a117ea1dfc2b508d287854ba9b2f5c5a438ca | [
"MIT"
] | 1 | 2018-11-04T18:42:57.000Z | 2018-11-18T22:14:49.000Z | browse.py | Thorsten-Sick/tags_for_media_ccc_de | ad1a117ea1dfc2b508d287854ba9b2f5c5a438ca | [
"MIT"
] | 1 | 2018-11-24T19:17:31.000Z | 2018-11-24T19:17:31.000Z | #!/usr/bin/env python3
# TODO: Write a command line tool to browser and search in the database
# TODO: Define a command set to search for strings, tags, similar talks, mark talks as seen, mark talks as irrelevant, mark talks as relevant, open a browser and watch, show details, quit
# https://opensource.com/article/17/5/4-practical-python-libraries
# TODO: Maybe use fuzzyfinder
# TODO: use prompt_toolkit autocompletion, auto suggestion and history
# TODO: Use pygments for syntax highlighting https://pygments.org/
from prompt_toolkit import prompt
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.completion import NestedCompleter
from dropdata import MediaTagger
import argparse
def printHelp():
print("""
tags: list tags
TODO tags + tag: list all talks containing a specific tag
TODO similar: Find similar content
TODO seen: Mark talks as seen
TODO irrelevant: Mark talks as irrelevant
TODO relevant: Mark talks as relevant
TODO show: Show content in browser
TODO details: Show details
quit: quit
help: get help
""")
def getCompleter():
""" Generates a nested completer
:return:
"""
mt = MediaTagger(frab=False, subtitles=False, default=False, offline=True)
return NestedCompleter.from_nested_dict({'help': None, # Show help
'quit':None, # Quit
'tags': {key: None for (key) in mt.list_tags()+[""]}, # Search for tags
'similar':None, # Find similar content using k-nearest
})
if __name__=="__main__":
### Parsing args
parser = argparse.ArgumentParser()
parser.add_argument("--data", help="Database file name", default = "frab.json", type = str)
args = parser.parse_args()
### Load data
### Logic
BrowserCompleter = getCompleter()
mt = MediaTagger(frab=False, subtitles=False, default=False, offline=True)
mt.read_file(args.data)
while 1:
user_input = prompt('> ',
history=FileHistory("history.txt"),
auto_suggest=AutoSuggestFromHistory(),
completer=BrowserCompleter,
)
user_input = user_input.lower()
if user_input == "quit":
break
elif user_input == "help":
printHelp()
elif user_input == "tags":
# pure tags, list them
print(",".join(mt.list_tags()))
else:
print(user_input)
| 32.722892 | 187 | 0.610088 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,172 | 0.431517 |
355b54f8b2fba95e01f01d6e3b0468747cbcfa07 | 587 | py | Python | Curso-Em-Video-Python/1Materias/08_Utilizando_Modulos/#08 - Utilizando Módulos C random.py | pedrohd21/Cursos-Feitos | b223aad83867bfa45ad161d133e33c2c200d42bd | [
"MIT"
] | null | null | null | Curso-Em-Video-Python/1Materias/08_Utilizando_Modulos/#08 - Utilizando Módulos C random.py | pedrohd21/Cursos-Feitos | b223aad83867bfa45ad161d133e33c2c200d42bd | [
"MIT"
] | null | null | null | Curso-Em-Video-Python/1Materias/08_Utilizando_Modulos/#08 - Utilizando Módulos C random.py | pedrohd21/Cursos-Feitos | b223aad83867bfa45ad161d133e33c2c200d42bd | [
"MIT"
] | null | null | null | import random
# num = random.random() para numeros de 0 e 1
num = random.randint(1, 10)
print(num)
'''import random 'choice'
n1 = str(input('Primeiro aluno: '))
n2 = str(input('Segundo aluno: '))
n3 = str(input('Terceiro aluno: '))
n4 = str(input('Quarto aluno: '))
lista = [n1, n2, n3, n4]
escolha = random.choice(lista)
print(escolha)'''
'''import random 'shuffle'
n1 = str(input('Aluno: '))
n2 = str(input('Aluno: '))
n3 = str(input('Aluno: '))
n4 = str(input('Aluno: '))
lista = [n1, n2, n3, n4]
sorteio = random.shuffle(lista)
print('A ordem de apresentação é ')
print(lista)''' | 24.458333 | 45 | 0.645656 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 532 | 0.901695 |
355d9f89110de4ad691f2cde310c459a0094cdbf | 1,520 | py | Python | help.py | TarikCinar/python-sesli-asistan | 1a29a8d3081b67ff352cf03f7b01ac01b7118deb | [
"MIT"
] | 1 | 2021-05-28T17:27:50.000Z | 2021-05-28T17:27:50.000Z | help.py | TarikCinar/python-sesli-asistan | 1a29a8d3081b67ff352cf03f7b01ac01b7118deb | [
"MIT"
] | null | null | null | help.py | TarikCinar/python-sesli-asistan | 1a29a8d3081b67ff352cf03f7b01ac01b7118deb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'help.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(400, 450)
Form.setMinimumSize(QtCore.QSize(400, 450))
Form.setMaximumSize(QtCore.QSize(400, 450))
Form.setStyleSheet("\n"
"background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgb(40,40,211) , stop:1 rgb(99,136,153) );")
self.textBrowser = QtWidgets.QTextBrowser(Form)
self.textBrowser.setGeometry(QtCore.QRect(10, 20, 381, 421))
self.textBrowser.setMinimumSize(QtCore.QSize(10, 10))
self.textBrowser.setMaximumSize(QtCore.QSize(121121, 325235))
self.textBrowser.setStyleSheet("#textBrowser{\n"
"\n"
"font: 12pt \"Consolas\";\n"
"}")
self.textBrowser.setFrameShape(QtWidgets.QFrame.NoFrame)
self.textBrowser.setObjectName("textBrowser")
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Help"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| 31.666667 | 122 | 0.675 | 1,080 | 0.710526 | 0 | 0 | 0 | 0 | 0 | 0 | 403 | 0.265132 |
35604318c4baa0bc21434d140ab47982045b6541 | 1,729 | py | Python | tarea2c/mod/game_over.py | camilo-nb/CC3501-tareas | b27cb01c8bcb88d7530bb337b1a1dab1c0f8f34e | [
"MIT"
] | null | null | null | tarea2c/mod/game_over.py | camilo-nb/CC3501-tareas | b27cb01c8bcb88d7530bb337b1a1dab1c0f8f34e | [
"MIT"
] | null | null | null | tarea2c/mod/game_over.py | camilo-nb/CC3501-tareas | b27cb01c8bcb88d7530bb337b1a1dab1c0f8f34e | [
"MIT"
] | null | null | null | import os
from collections import deque
import numpy as np
from OpenGL.GL import *
import lib.basic_shapes as bs
import lib.easy_shaders as es
import lib.transformations as tr
class GameOver:
def __init__(self):
self.GPU = deque([
es.toGPUShape(bs.createTextureCube(os.path.join('mod', 'tex', 'game_over_1.png')), GL_REPEAT, GL_NEAREST),
es.toGPUShape(bs.createTextureCube(os.path.join('mod', 'tex', 'game_over_2.png')), GL_REPEAT, GL_NEAREST),
es.toGPUShape(bs.createTextureCube(os.path.join('mod', 'tex', 'game_over_3.png')), GL_REPEAT, GL_NEAREST),
es.toGPUShape(bs.createTextureCube(os.path.join('mod', 'tex', 'game_over_4.png')), GL_REPEAT, GL_NEAREST)
])
self.x, self.y, self.z = 0, 310, 100
self.phi = 0
self.tick = 0
self.s = 10
self.transform = tr.matmul([tr.translate(self.x, self.y, self.z), tr.scale(self.s, self.s, 0.0001)])
def draw(self, pipeline, projection, view):
glUseProgram(pipeline.shaderProgram)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "model"), 1, GL_TRUE, self.transform)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "projection"), 1, GL_TRUE, projection)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "view"), 1, GL_TRUE, view)
pipeline.drawShape(self.GPU[0])
def update(self):
self.GPU.append(self.GPU.popleft())
self.tick -= 0.1 * np.exp(self.tick/10)
self.phi = np.exp(self.tick)
self.transform = tr.matmul([tr.translate(self.x, self.y, self.z), tr.rotationX(2 * self.phi), tr.rotationZ(self.phi), tr.scale(self.s, self.s, 0.0001)])
| 46.72973 | 161 | 0.664546 | 1,549 | 0.895894 | 0 | 0 | 0 | 0 | 0 | 0 | 133 | 0.076923 |
35610507ab819d86ea042c5778fb91378bad6012 | 4,571 | py | Python | src/pycity_scheduling/classes/electrical_heater.py | ElsevierSoftwareX/SOFTX-D-20-00087 | d2d3f1effda2c0499cb05abf87435375a21379e3 | [
"MIT"
] | 4 | 2021-11-01T15:13:27.000Z | 2022-01-16T18:01:06.000Z | src/pycity_scheduling/classes/electrical_heater.py | ElsevierSoftwareX/SOFTX-D-20-00087 | d2d3f1effda2c0499cb05abf87435375a21379e3 | [
"MIT"
] | 2 | 2021-11-18T05:58:00.000Z | 2022-01-19T16:46:20.000Z | src/pycity_scheduling/classes/electrical_heater.py | ElsevierSoftwareX/SOFTX-D-20-00087 | d2d3f1effda2c0499cb05abf87435375a21379e3 | [
"MIT"
] | 5 | 2021-11-01T15:13:35.000Z | 2022-02-03T21:28:48.000Z | """
The pycity_scheduling framework
Copyright (C) 2022,
Institute for Automation of Complex Power Systems (ACS),
E.ON Energy Research Center (E.ON ERC),
RWTH Aachen University
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import numpy as np
import pyomo.environ as pyomo
import pycity_base.classes.supply.electrical_heater as eh
from pycity_scheduling.util.generic_constraints import LowerActivationLimit
from pycity_scheduling.classes.thermal_entity_heating import ThermalEntityHeating
from pycity_scheduling.classes.electrical_entity import ElectricalEntity
class ElectricalHeater(ThermalEntityHeating, ElectricalEntity, eh.ElectricalHeater):
"""
Extension of pyCity_base class ElectricalHeater for scheduling purposes.
Parameters
----------
environment : pycity_scheduling.classes.Environment
Common to all other objects. Includes time and weather instances.
p_th_nom : float
Nominal thermal power output in [kW].
eta : float, optional
Efficiency of the electrical heater. Defaults to one.
lower_activation_limit : float, optional (only adhered to in integer mode)
Must be in [0, 1]. Lower activation limit of the electrical heater
as a percentage of the rated power. When the electrical heater is
in operation, its power must be zero or between the lower activation
limit and its rated power.
- `lower_activation_limit = 0`: Linear behavior
- `lower_activation_limit = 1`: Two-point controlled
Notes
-----
- EHs offer sets of constraints for operation. In the `convex` mode the
following constraints and bounds are generated by the EH:
.. math::
0 \\geq p_{th\\_heat} &\\geq& -p_{th\\_nom} \\\\
\\eta * p_{el} &=& - p_{th\\_heat}
- See also:
- pycity_scheduling.util.generic_constraints.LowerActivationLimit: Generates additional constraints for the
`lower_activation_limit` in `integer` mode.
"""
def __init__(self, environment, p_th_nom, eta=1, lower_activation_limit=0):
# Flow temperature of 55 C
super().__init__(environment, p_th_nom*1000, eta, 85, lower_activation_limit)
self._long_id = "EH_" + self._id_string
self.p_th_nom = p_th_nom
self.activation_constr = LowerActivationLimit(self, "p_th_heat", lower_activation_limit, -p_th_nom)
def populate_model(self, model, mode="convex"):
"""
Add device block to pyomo ConcreteModel.
Call parent's `populate_model` method and set thermal variables upper
bounds to `self.p_th_nom`. Also add constraint to bind electrical
demand to thermal output.
Parameters
----------
model : pyomo.ConcreteModel
mode : str, optional
Specifies which set of constraints to use.
- `convex` : Use linear constraints
- `integer` : Use integer variables representing discrete control decisions
"""
super().populate_model(model, mode)
m = self.model
if mode == "convex" or "integer":
m.p_th_heat_vars.setlb(-self.p_th_nom)
m.p_th_heat_vars.setub(0.0)
def p_coupl_rule(model, t):
return - model.p_th_heat_vars[t] == self.eta * model.p_el_vars[t]
m.p_coupl_constr = pyomo.Constraint(m.t, rule=p_coupl_rule)
self.activation_constr.apply(m, mode)
else:
raise ValueError(
"Mode %s is not implemented by class ElectricalHeater." % str(mode)
)
return
| 41.18018 | 118 | 0.703566 | 3,021 | 0.660906 | 0 | 0 | 0 | 0 | 0 | 0 | 3,201 | 0.700284 |
35632b004edf0889b947ff8dab8238164166f87d | 13,394 | py | Python | klasses/api.py | mitodl/bootcamp-ecommerce | ba7d6aefe56c6481ae2a5afc84cdd644538b6d50 | [
"BSD-3-Clause"
] | 2 | 2018-06-20T19:37:03.000Z | 2021-01-06T09:51:40.000Z | klasses/api.py | mitodl/bootcamp-ecommerce | ba7d6aefe56c6481ae2a5afc84cdd644538b6d50 | [
"BSD-3-Clause"
] | 1,226 | 2017-02-23T14:52:28.000Z | 2022-03-29T13:19:54.000Z | klasses/api.py | mitodl/bootcamp-ecommerce | ba7d6aefe56c6481ae2a5afc84cdd644538b6d50 | [
"BSD-3-Clause"
] | 3 | 2017-03-20T03:51:27.000Z | 2021-03-19T15:54:31.000Z | """
API functionality for bootcamps
"""
import logging
from datetime import datetime, timedelta
import pytz
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.db.models import Sum
from applications.constants import AppStates
from ecommerce.models import Line, Order
from klasses.constants import DATE_RANGE_MONTH_FMT, ENROLL_CHANGE_STATUS_DEFERRED
from klasses.models import BootcampRun, BootcampRunEnrollment
from main import features
from novoed import tasks as novoed_tasks
log = logging.getLogger(__name__)
def deactivate_run_enrollment(
*, run_enrollment=None, user=None, bootcamp_run=None, change_status=None
):
"""
Helper method to deactivate a BootcampRunEnrollment. Can accept a BootcampRunEnrollment as an argument, or a
User and BootcampRun that can be used to find the enrollment.
Args:
run_enrollment (Optional[BootcampRunEnrollment]): The bootcamp run enrollment to deactivate
user (Optional[User]): The enrolled user (only required if run_enrollment is not provided)
bootcamp_run (Optional[BootcampRun]): The enrolled bootcamp run (only required if run_enrollment
is not provided)
change_status (Optional[str]): The change status to set on the enrollment when deactivating
Returns:
Optional[BootcampRunEnrollment]: The updated enrollment (or None if the enrollment doesn't exist)
"""
if run_enrollment is None and (user is None or bootcamp_run is None):
raise ValueError("Must provide run_enrollment, or both user and bootcamp_run")
if run_enrollment is None:
run_enrollment = BootcampRunEnrollment.objects.filter(
user=user, bootcamp_run=bootcamp_run
).first()
if run_enrollment is None:
return
run_enrollment.active = False
run_enrollment.change_status = change_status
run_enrollment.save()
if (
features.is_enabled(features.NOVOED_INTEGRATION)
and run_enrollment.bootcamp_run.novoed_course_stub
):
novoed_tasks.unenroll_user_from_novoed_course.delay(
user_id=run_enrollment.user.id,
novoed_course_stub=run_enrollment.bootcamp_run.novoed_course_stub,
)
return run_enrollment
def adjust_app_state_for_new_price(user, bootcamp_run, new_price=None):
"""
Given a new price for a bootcamp run, updated user's bootcamp application if (a) it exists, and (b) the new price
is such that the bootcamp application state is no longer valid (e.g.: the new price is greater than the
amount that the user has paid, but the application is in the "complete" state)
Args:
user (User): The user whose application may be affected
bootcamp_run (BootcampRun): The bootcamp run of the application that may be affected
new_price (Optional[Any[int, Decimal]]): The new total price of the bootcamp run (if None, the bootcamp run's
normal price will be used)
Returns:
Optional[BootcampApplication]: The bootcamp application for the user/run referred to by the personal price
if it was modified (otherwise, None will be returned)
"""
total_paid_qset = Line.objects.filter(
order__user=user, bootcamp_run=bootcamp_run
).aggregate(aggregate_total_paid=Sum("price"))
total_paid = total_paid_qset["aggregate_total_paid"] or 0
new_price = new_price if new_price is not None else bootcamp_run.price
needs_payment = total_paid < new_price
application = user.bootcamp_applications.filter(
bootcamp_run=bootcamp_run,
# The state needs to change if (a) it's currently complete and now needs more payment, or (b) it's currently
# awaiting payment and the new price means they don't need to pay any more.
state=(
AppStates.COMPLETE.value
if needs_payment
else AppStates.AWAITING_PAYMENT.value
),
).first()
if application is None:
return
if needs_payment:
application.await_further_payment()
else:
application.complete()
application.save()
log.info(
"Personal price update caused application state change (user: %s, run: '%s', new state: %s)",
user.email,
bootcamp_run.title,
application.state,
)
return application
def _parse_formatted_date_range(date_range_str):
"""
Parses a string representing a date range (e.g.: "May 1, 2020 - Jan 30, 2021")
Args:
date_range_str (str): A string representing a date range
Returns:
Tuple[datetime.datetime, Optional[datetime.datetime]]: A tuple containing the two dates that were parsed from
the string
"""
if "-" not in date_range_str:
date1_string = date_range_str
date2_string = None
else:
date1_string, date2_string = date_range_str.split("-")
date1_parts = date1_string.split(",")
date1_monthday = date1_parts[0].strip().split(" ")
month1, day1 = date1_monthday[0], int(date1_monthday[1])
if not date2_string:
year1 = int(date1_parts[1].strip())
month2, day2, year2 = None, None, None
else:
date2_parts = date2_string.split(",")
date2_monthday = date2_parts[0].strip().split(" ")
year2 = int(date2_parts[1].strip())
year1 = year2 if len(date1_parts) < 2 else int(date1_parts[1].strip())
if len(date2_monthday) < 2:
month2 = month1
day2 = int(date2_monthday[0])
else:
month2 = date2_monthday[0]
day2 = int(date2_monthday[1])
date1 = datetime(
year=year1,
month=datetime.strptime(month1, DATE_RANGE_MONTH_FMT).month,
day=day1,
tzinfo=pytz.UTC,
)
date2 = (
None
if not date2_string
else datetime(
year=year2,
month=datetime.strptime(month2, DATE_RANGE_MONTH_FMT).month,
day=day2,
tzinfo=pytz.UTC,
)
)
return date1, date2
def fetch_bootcamp_run(run_property):
"""
Fetches a bootcamp run that has a field value (id, title, etc.) that matches the given property
Args:
run_property (str): A string representing some field value for a specific bootcamp run
Returns:
BootcampRun: The bootcamp run matching the given property
"""
if run_property:
if run_property.isdigit():
return BootcampRun.objects.get(id=run_property)
run = BootcampRun.objects.filter(title=run_property).first()
if run is not None:
return run
# If run_property is a string and didn't match a title, it might be a 'display_title' property value.
# Attempt to parse that and match it to a run.
if run is None and "," not in run_property:
return BootcampRun.objects.get(bootcamp__title=run_property)
potential_bootcamp_title, potential_date_range = run_property.split(
",", maxsplit=1
)
potential_start_date, potential_end_date = _parse_formatted_date_range(
potential_date_range
)
run_filters = dict(bootcamp__title=potential_bootcamp_title)
if potential_start_date:
run_filters.update(
dict(
start_date__gte=potential_start_date,
start_date__lt=potential_start_date + timedelta(days=1),
)
)
else:
run_filters["start_date"] = None
if potential_end_date:
run_filters.update(
dict(
end_date__gte=potential_end_date,
end_date__lt=potential_end_date + timedelta(days=1),
)
)
else:
run_filters["end_date"] = None
try:
return BootcampRun.objects.get(**run_filters)
except BootcampRun.DoesNotExist as exc:
raise BootcampRun.DoesNotExist(
"Could not find BootcampRun with the following filters: {}".format(
run_filters
)
) from exc
def create_run_enrollment(user, run, order=None):
"""
Creates local records of a user's enrollment in bootcamp runs, and attempts to enroll them
in novoed via API
Args:
user (User): The user to enroll
run (BootcampRun): The bootcamp run to enroll in
order (ecommerce.models.Order or None): The order associated with these enrollments
Returns:
(BootcampRunEnrollment): enrollment object that were successfully created
"""
enrollment, _ = BootcampRunEnrollment.objects.update_or_create(
user=user,
bootcamp_run=run,
defaults={"active": True, "change_status": None},
)
try:
user.profile.can_skip_application_steps = True
user.profile.save()
except ObjectDoesNotExist:
pass
if order: # enrollment created and order is available
application = order.application
application.bootcamp_run = run
application.save()
if (
features.is_enabled(features.NOVOED_INTEGRATION)
and enrollment.bootcamp_run.novoed_course_stub
):
novoed_tasks.enroll_users_in_novoed_course.delay(
user_ids=[enrollment.user.id],
novoed_course_stub=enrollment.bootcamp_run.novoed_course_stub,
)
return enrollment
def create_run_enrollments(user, runs, order=None):
"""
Creates local records of a user's enrollment in bootcamp runs, and attempts to enroll them
in novoed via API
Args:
user (User): The user to enroll
runs (iterable of BootcampRun): The bootcamp runs to enroll in
order (ecommerce.models.Order or None): The order associated with these enrollments
Returns:
(list of BootcampRunEnrollment): A list of enrollment objects that were successfully
created
"""
successful_enrollments = []
for run in runs:
try:
successful_enrollments.append(create_run_enrollment(user, run, order))
except: # pylint: disable=bare-except
log.exception(
"Failed to create/update enrollment record (user: %s, run: %s, order: %s)",
user,
run.bootcamp_run_id,
order.id if order else None,
)
return successful_enrollments
def defer_enrollment(
user, from_bootcamp_run_id, to_bootcamp_run_id, order_id, force=False
):
"""
Deactivates a user's existing enrollment in one bootcamp run and enrolls the user in another.
Args:
user (User): The enrolled user
from_bootcamp_run_id (str): The bootcamp_run_id value of the currently enrolled BootcampRun
to_bootcamp_run_id (str): The bootcamp_run_id value of the desired BootcampRun
order_id (int): The order_id value for an user's order ID
force (bool): If True, the deferral will be completed even if the current enrollment is inactive
or the desired enrollment is in a different bootcamp
Returns:
(BootcampRunEnrollment, BootcampRunEnrollment): The deactivated enrollment paired with the
new enrollment that was the target of the deferral
"""
from_enrollment = BootcampRunEnrollment.objects.get(
user=user, bootcamp_run__bootcamp_run_id=from_bootcamp_run_id
)
if not force and not from_enrollment.active:
raise ValidationError(
"Cannot defer from inactive enrollment (id: {}, run: {}, user: {}). "
"Set force=True to defer anyway.".format(
from_enrollment.id,
from_enrollment.bootcamp_run.bootcamp_run_id,
user.email,
)
)
to_run = BootcampRun.objects.get(bootcamp_run_id=to_bootcamp_run_id)
if from_enrollment.bootcamp_run == to_run:
raise ValidationError(
"Cannot defer to the same bootcamp run (run: {})".format(
to_run.bootcamp_run_id
)
)
if not to_run.is_not_beyond_enrollment:
raise ValidationError(
"Cannot defer to a bootcamp run that is outside of its enrollment period (run: {}).".format(
to_run.bootcamp_run_id
)
)
if not force and from_enrollment.bootcamp_run.bootcamp != to_run.bootcamp:
raise ValidationError(
"Cannot defer to a bootcamp run of a different bootcamp ('{}' -> '{}'). "
"Set force=True to defer anyway.".format(
from_enrollment.bootcamp_run.bootcamp.title, to_run.bootcamp.title
)
)
try:
defaults = {
"id": order_id,
"user": user,
# "application__bootcamp_run": from_enrollment.bootcamp_run,
}
order = Order.objects.get(**defaults)
except ObjectDoesNotExist:
raise ValidationError(
"Order (order: {}) does not exist for user (User: {}) against bootcamp run = (run: {})".format(
order_id, user, from_bootcamp_run_id
)
)
to_enrollment = create_run_enrollment(user, to_run, order=order)
from_enrollment = deactivate_run_enrollment(
run_enrollment=from_enrollment, change_status=ENROLL_CHANGE_STATUS_DEFERRED
)
return from_enrollment, to_enrollment
| 37.729577 | 117 | 0.657011 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,190 | 0.387487 |
35643bae4260d8b9d3b851012048c6639e6d322b | 1,745 | py | Python | news/management/commands/fetch_planet.py | SIBSIND/PHPMYADMINWEBSITE | e2112f0fb43f042be551ecaadb05b1cc79ba5360 | [
"MIT"
] | 31 | 2015-05-26T23:13:06.000Z | 2022-03-10T12:03:33.000Z | news/management/commands/fetch_planet.py | SIBSIND/PHPMYADMINWEBSITE | e2112f0fb43f042be551ecaadb05b1cc79ba5360 | [
"MIT"
] | 136 | 2015-01-15T23:30:23.000Z | 2022-03-31T00:59:01.000Z | news/management/commands/fetch_planet.py | SIBSIND/PHPMYADMINWEBSITE | e2112f0fb43f042be551ecaadb05b1cc79ba5360 | [
"MIT"
] | 158 | 2015-01-15T23:25:26.000Z | 2022-02-09T01:47:20.000Z | # -*- coding: UTF-8 -*-
# vim: set expandtab sw=4 ts=4 sts=4:
#
# phpMyAdmin web site
#
# Copyright (C) 2008 - 2016 Michal Cihar <michal@cihar.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from dateutil import parser
from news.management.commands import FeedCommand
from news.models import Planet
URL = 'https://planet.phpmyadmin.net/rss20.xml'
class Command(FeedCommand):
help = 'Imports planet posts'
url = URL
def process_feed(self, feed):
for entry in feed.entries:
params = {
'title': entry.title,
'date': parser.parse(entry.published),
}
planet, created = Planet.objects.get_or_create(
url=entry.link,
defaults=params
)
if not created:
continue
modified = False
for key in params:
if getattr(planet, key) != params[key]:
setattr(planet, key, params[key])
modified = True
if modified:
planet.save()
| 32.314815 | 73 | 0.634384 | 728 | 0.417192 | 0 | 0 | 0 | 0 | 0 | 0 | 911 | 0.522063 |
356456d1a91cf05134c753d550fbf53265ec7f7c | 17,082 | py | Python | src/convert_dataset_video_to_mouth_img.py | iglaweb/HippoYD | da2c40be8017c43a7b7b6c029e2df30cf7d54932 | [
"Apache-2.0"
] | 7 | 2021-07-02T03:57:20.000Z | 2022-03-20T13:23:32.000Z | src/convert_dataset_video_to_mouth_img.py | filipul1s/HippoYD | da2c40be8017c43a7b7b6c029e2df30cf7d54932 | [
"Apache-2.0"
] | null | null | null | src/convert_dataset_video_to_mouth_img.py | filipul1s/HippoYD | da2c40be8017c43a7b7b6c029e2df30cf7d54932 | [
"Apache-2.0"
] | 3 | 2021-07-02T16:07:28.000Z | 2022-03-20T13:23:33.000Z | import collections
import csv
import os
import sys
from enum import Enum
from pathlib import Path
# adapt paths for jupyter
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path:
sys.path.append(module_path)
import face_alignment
from yawn_train.src.blazeface_detector import BlazeFaceDetector
import cv2
import dlib
import numpy as np
from imutils import face_utils
from yawn_train.src.ssd_face_detector import SSDFaceDetector
# define one constants, for mouth aspect ratio to indicate open mouth
from yawn_train.src import download_utils, detect_utils, inference_utils
from yawn_train.src.model_config import MOUTH_AR_THRESH, MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT
class ImageResult:
def __init__(self, is_processed, is_opened_image):
self.is_processed = is_processed
self.is_opened_image = is_opened_image
@staticmethod
def not_processed():
return ImageResult(False, False)
class VideoResult:
def __init__(self, total_frames, dlib_counter, caffe_counter, blazeface_counter, opened_counter, closed_counter):
self.total_frames = total_frames
self.dlib_counter = dlib_counter
self.caffe_counter = caffe_counter
self.blazeface_counter = blazeface_counter
self.opened_counter = opened_counter
self.closed_counter = closed_counter
@staticmethod
def empty():
return VideoResult(0, 0, 0, 0, 0, 0)
class FACE_TYPE(Enum):
BLAZEFACE = 0
DLIB = 1
CAFFE = 2
@classmethod
def has_value(cls, value):
return value in cls._value2member_map_
def get_next(self):
val = self.value
if self.has_value(val + 1):
return FACE_TYPE(val + 1)
return FACE_TYPE(0)
class LNDMR_TYPE(Enum):
DLIB = 0
FACEALIGN = 1
COLOR_IMG = False
MOUTH_FOLDER = "./mouth_state_new10" + ("_color" if COLOR_IMG else "")
MOUTH_OPENED_FOLDER = os.path.join(MOUTH_FOLDER, 'opened')
MOUTH_CLOSED_FOLDER = os.path.join(MOUTH_FOLDER, 'closed')
TEMP_FOLDER = "./temp"
# https://ieee-dataport.org/open-access/yawdd-yawning-detection-dataset#files
YAWDD_DATASET_FOLDER = "./YawDD dataset"
CSV_STATS = 'video_stat.csv'
read_mouth_open_counter = 0
read_mouth_close_counter = 0
saved_mouth_open_counter = 0
saved_mouth_close_counter = 0
SAMPLE_STEP_IMG_OPENED = 1
SAMPLE_STEP_IMG_CLOSED = 4
(mStart, mEnd) = face_utils.FACIAL_LANDMARKS_IDXS["mouth"]
Path(MOUTH_FOLDER).mkdir(parents=True, exist_ok=True)
Path(MOUTH_OPENED_FOLDER).mkdir(parents=True, exist_ok=True)
Path(MOUTH_CLOSED_FOLDER).mkdir(parents=True, exist_ok=True)
dlib_landmarks_file = download_utils.download_and_unpack_dlib_68_landmarks(TEMP_FOLDER)
# dlib predictor for 68pts, mouth
predictor = dlib.shape_predictor(dlib_landmarks_file)
# initialize dlib's face detector (HOG-based)
detector = dlib.get_frontal_face_detector()
caffe_weights, caffe_config = download_utils.download_caffe(TEMP_FOLDER)
# Reads the network model stored in Caffe framework's format.
face_model = cv2.dnn.readNetFromCaffe(caffe_config, caffe_weights)
ssd_face_detector = SSDFaceDetector(face_model)
import tensorflow as tf
bf_model = download_utils.download_blazeface(TEMP_FOLDER)
blazeface_tf = tf.keras.models.load_model(bf_model, compile=False)
blazefaceDetector = BlazeFaceDetector(blazeface_tf)
# img = cv2.imread(
# '/Users/igla/Desktop/Screenshot 2021-01-14 at 12.29.25.png', cv2.IMREAD_GRAYSCALE)
# ultrafacedetector = UltraFaceDetector("/Users/igla/Downloads/version-RFB-320_simplified.onnx")
"""
Take mouth ratio only from dlib rect. Use dnn frame for output
"""
def should_process_video(video_name: str) -> bool:
is_video_sunglasses = video_name.rfind('SunGlasses') != -1
if is_video_sunglasses:
# inaccurate landmarks in sunglasses
print('Video contains sunglasses. Skip', video_name)
return False
return video_name.endswith('-Normal.avi') or \
video_name.endswith('-Talking.avi') or \
video_name.endswith('-Yawning.avi')
pred_type = collections.namedtuple('prediction_type', ['slice', 'color'])
pred_types = {'face': pred_type(slice(0, 17), (0.682, 0.780, 0.909, 0.5)),
'eyebrow1': pred_type(slice(17, 22), (1.0, 0.498, 0.055, 0.4)),
'eyebrow2': pred_type(slice(22, 27), (1.0, 0.498, 0.055, 0.4)),
'nose': pred_type(slice(27, 31), (0.345, 0.239, 0.443, 0.4)),
'nostril': pred_type(slice(31, 36), (0.345, 0.239, 0.443, 0.4)),
'eye1': pred_type(slice(36, 42), (0.596, 0.875, 0.541, 0.3)),
'eye2': pred_type(slice(42, 48), (0.596, 0.875, 0.541, 0.3)),
'lips': pred_type(slice(48, 60), (0.596, 0.875, 0.541, 0.3)),
'teeth': pred_type(slice(60, 68), (0.596, 0.875, 0.541, 0.4))
}
face_detector = 'sfd'
face_detector_kwargs = {
"filter_threshold": 0.8
}
fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._3D, flip_input=True, device='cpu',
face_detector=face_detector)
def get_mouth_opened(frame, start_x, start_y, end_x, end_y) -> tuple:
mouth_shape = predictor(frame, dlib.rectangle(start_x, start_y, end_x, end_y))
mouth_shape = face_utils.shape_to_np(mouth_shape)
mouth_arr = mouth_shape[mStart:mEnd]
mouth_mar_dlib = detect_utils.mouth_aspect_ratio(mouth_arr)
mouth_mar_dlib = round(mouth_mar_dlib, 2)
# print(mouth_mar_dlib)
face_roi_dlib = frame[start_y:end_y, start_x:end_x]
height_frame, width_frame = face_roi_dlib.shape[:2]
# swapping the read and green channels
# https://stackoverflow.com/a/56933474/1461625
detected_faces = []
detected_faces.append([0, 0, width_frame, height_frame])
preds = fa.get_landmarks_from_image(face_roi_dlib, detected_faces)[-1]
pred_type = pred_types['lips']
X = preds[pred_type.slice, 0]
Y = preds[pred_type.slice, 1]
mouth_shape_3ddfa = []
for x, y in zip(X, Y):
mouth_shape_3ddfa.append((x, y))
# shape = []
# for idx, pred_type in enumerate(pred_types.values()):
# X = preds[pred_type.slice, 0]
# Y = preds[pred_type.slice, 1]
# for x, y in zip(X, Y):
# shape.append((x, y))
mouth_mar_3ddfa = detect_utils.mouth_aspect_ratio(mouth_shape_3ddfa)
mouth_mar_3ddfa = round(mouth_mar_3ddfa, 2)
# print(mouth_mar_3ddfa)
is_opened_mouth_3ddfa = mouth_mar_3ddfa >= 0.75
is_opened_mouth_dlib = mouth_mar_dlib >= MOUTH_AR_THRESH
if is_opened_mouth_3ddfa == is_opened_mouth_dlib:
return is_opened_mouth_3ddfa, mouth_mar_dlib, LNDMR_TYPE.DLIB # correct, same as dlib, return dlib ratio
else:
return is_opened_mouth_3ddfa, mouth_mar_3ddfa, LNDMR_TYPE.FACEALIGN # return 3ddfa, as it's more accurate
def recognize_image(video_id: int, video_path: str, frame, frame_id: int, face_type: FACE_TYPE, face_rect_dlib,
face_rect_dnn=None) -> ImageResult:
(start_x, start_y, end_x, end_y) = face_rect_dlib
start_x = max(start_x, 0)
start_y = max(start_y, 0)
if start_x >= end_x or start_y >= end_y:
print('Invalid detection. Skip', face_rect_dlib)
return ImageResult.not_processed()
face_roi_dlib = frame[start_y:end_y, start_x:end_x]
if face_roi_dlib is None:
print('Cropped face is None. Skip')
return ImageResult.not_processed()
height_frame, width_frame = face_roi_dlib.shape[:2]
if height_frame < 50 or width_frame < 50: # some images have invalid dlib face rect
print('Too small face. Skip')
return ImageResult.not_processed()
# https://pyimagesearch.com/wp-content/uploads/2017/04/facial_landmarks_68markup.jpg
is_mouth_opened, open_mouth_ratio, lndmk_type = get_mouth_opened(frame, start_x, start_y, end_x, end_y)
# skip frames in normal and talking, containing opened mouth (we detect only yawn)
video_name = os.path.basename(video_path)
is_video_no_yawn = video_name.endswith('-Normal.avi') or \
video_name.endswith('-Talking.avi')
if is_mouth_opened and is_video_no_yawn:
# some videos may contain opened mouth, skip these situations
return ImageResult.not_processed()
prefix = 'dlib'
target_face_roi = None
if face_rect_dnn is not None:
(start_x, start_y, end_x, end_y) = face_rect_dnn
start_x = max(start_x, 0)
start_y = max(start_y, 0)
if start_x < end_x and start_y < end_y:
face_roi_dnn = frame[start_y:end_y, start_x:end_x]
target_face_roi = face_roi_dnn
prefix = face_type.name.lower()
if target_face_roi is None:
target_face_roi = face_roi_dlib
if len(frame.shape) == 2 or COLOR_IMG: # single channel
gray_img = target_face_roi
else:
gray_img = cv2.cvtColor(target_face_roi, cv2.COLOR_BGR2GRAY)
gray_img = detect_utils.resize_img(gray_img, MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT)
lndmk_type_name = lndmk_type.name.lower()
if is_mouth_opened:
global read_mouth_open_counter
read_mouth_open_counter = read_mouth_open_counter + 1
# reduce img count
if read_mouth_open_counter % SAMPLE_STEP_IMG_OPENED != 0:
return ImageResult.not_processed()
global saved_mouth_open_counter
saved_mouth_open_counter = saved_mouth_open_counter + 1
file_name = os.path.join(MOUTH_OPENED_FOLDER,
f'{read_mouth_open_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg')
cv2.imwrite(file_name, gray_img)
return ImageResult(is_processed=True, is_opened_image=True)
else:
global read_mouth_close_counter
read_mouth_close_counter = read_mouth_close_counter + 1
# reduce img count
if read_mouth_close_counter % SAMPLE_STEP_IMG_CLOSED != 0:
return ImageResult.not_processed()
global saved_mouth_close_counter
saved_mouth_close_counter = saved_mouth_close_counter + 1
file_name = os.path.join(MOUTH_CLOSED_FOLDER,
f'{read_mouth_close_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg')
cv2.imwrite(file_name, gray_img)
return ImageResult(is_processed=True, is_opened_image=False)
def detect_faces_complex(frame):
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
face_list_dlib = inference_utils.detect_face_dlib(detector, gray_frame)
if len(face_list_dlib) > 0:
return face_list_dlib, FACE_TYPE.DLIB
face_list_dnn_cafe = ssd_face_detector.detect_face(frame)
if len(face_list_dnn_cafe) > 0:
return face_list_dnn_cafe, FACE_TYPE.CAFFE
face_list_dnn_blaze = blazefaceDetector.detect_face(frame)
if len(face_list_dnn_blaze) > 0:
return face_list_dnn_blaze, FACE_TYPE.BLAZEFACE
return [], None
def process_video(video_id, video_path) -> VideoResult:
video_name = os.path.basename(video_path)
if should_process_video(video_name) is False:
print('Video should not be processed', video_path)
return VideoResult.empty()
cap = cv2.VideoCapture(video_path)
if cap.isOpened() is False:
print('Video is not opened', video_path)
return VideoResult.empty()
face_dlib_counter = 0
face_caffe_counter = 0
face_blazeface_counter = 0
opened_img_counter = 0
closed_img_counter = 0
frame_id = 0
face_type = FACE_TYPE.DLIB
while True:
ret, frame = cap.read()
if ret is False:
break
if frame is None:
print('No images left in', video_path)
break
if np.shape(frame) == ():
print('Empty image. Skip')
continue
frame_id = frame_id + 1
face_list, f_type = detect_faces_complex(frame)
if len(face_list) == 0:
# skip images not recognized by dlib or other detectors
continue
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
recognize_frame = frame if COLOR_IMG else gray_frame
if face_type == FACE_TYPE.DLIB:
image_result = recognize_image(video_id, video_path, recognize_frame, frame_id, face_type,
face_list[0])
is_processed = image_result.is_processed
if is_processed:
face_type = face_type.get_next()
face_dlib_counter = face_dlib_counter + 1
if image_result.is_opened_image:
opened_img_counter = opened_img_counter + 1
else:
closed_img_counter = closed_img_counter + 1
continue
if face_type == FACE_TYPE.CAFFE:
face_list_dnn = ssd_face_detector.detect_face(frame)
if len(face_list_dnn) == 0:
face_type = face_type.get_next()
print('Face not found with Caffe DNN')
continue
image_result = recognize_image(video_id, video_path, recognize_frame, frame_id, face_type,
face_list[0],
face_list_dnn[0])
is_processed = image_result.is_processed
if is_processed:
face_type = face_type.get_next()
face_caffe_counter = face_caffe_counter + 1
if image_result.is_opened_image:
opened_img_counter = opened_img_counter + 1
else:
closed_img_counter = closed_img_counter + 1
if face_type == FACE_TYPE.BLAZEFACE:
face_list_dnn = blazefaceDetector.detect_face(frame)
if len(face_list_dnn) == 0:
face_type = face_type.get_next()
print('Face not found with Blazeface')
continue
image_result = recognize_image(video_id, video_path, recognize_frame, frame_id, face_type,
face_list[0],
face_list_dnn[0])
is_processed = image_result.is_processed
if is_processed:
face_type = face_type.get_next()
face_blazeface_counter = face_blazeface_counter + 1
if image_result.is_opened_image:
opened_img_counter = opened_img_counter + 1
else:
closed_img_counter = closed_img_counter + 1
print(
f"Total images: {face_dlib_counter + face_caffe_counter + face_blazeface_counter}"
f', dlib: {face_dlib_counter} images'
f', blazeface: {face_blazeface_counter} images'
f', caffe: {face_caffe_counter} images in video {video_name}'
)
cap.release()
# The function is not implemented. Rebuild the library with Windows, GTK+ 2.x or Cocoa support. If you are on
# Ubuntu or Debian, install libgtk2.0-dev and pkg-config, then re-run cmake or configure script in function
# 'cvDestroyAllWindows'
try:
cv2.destroyAllWindows()
except:
print('No destroy windows')
return VideoResult(
frame_id,
face_dlib_counter,
face_blazeface_counter,
face_caffe_counter,
opened_img_counter,
closed_img_counter
)
def write_csv_stat(filename, video_count, video_result: VideoResult):
video_stat_dict_path = os.path.join(MOUTH_FOLDER, CSV_STATS)
if os.path.isfile(video_stat_dict_path) is False:
with open(video_stat_dict_path, 'w') as f:
w = csv.writer(f)
w.writerow(['Video id', 'File name', 'Total frames', 'Image saved', 'Opened img', 'Closed img'])
# mode 'a' append
with open(video_stat_dict_path, 'a') as f:
w = csv.writer(f)
img_counter = video_result.caffe_counter + video_result.dlib_counter + video_result.blazeface_counter
w.writerow((
video_count,
filename,
video_result.total_frames,
img_counter,
video_result.opened_counter,
video_result.closed_counter
))
def process_videos():
video_count = 0
total_frames = 0
for root, dirs, files in os.walk(YAWDD_DATASET_FOLDER):
for file in files:
if file.endswith(".avi"):
video_count = video_count + 1
file_name = os.path.join(root, file)
print('Current video', file_name)
video_result = process_video(video_count, file_name)
total_frames = total_frames + video_result.total_frames
write_csv_stat(file_name, video_count, video_result)
print(f'Videos processed: {video_count}')
print(f'Total read images: {total_frames}')
print(f'Total saved images: {saved_mouth_open_counter + saved_mouth_close_counter}')
print(f'Saved opened mouth images: {saved_mouth_open_counter}')
print(f'Saved closed mouth images: {saved_mouth_close_counter}')
if __name__ == '__main__':
process_videos()
| 37.625551 | 135 | 0.669652 | 1,098 | 0.064278 | 0 | 0 | 244 | 0.014284 | 0 | 0 | 3,048 | 0.178433 |
35665e1b39e67d688ac135c0ce7cb34d35d57e66 | 1,223 | py | Python | homeassistant/components/launch_library/diagnostics.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 30,023 | 2016-04-13T10:17:53.000Z | 2020-03-02T12:56:31.000Z | homeassistant/components/launch_library/diagnostics.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 24,710 | 2016-04-13T08:27:26.000Z | 2020-03-02T12:59:13.000Z | homeassistant/components/launch_library/diagnostics.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 11,956 | 2016-04-13T18:42:31.000Z | 2020-03-02T09:32:12.000Z | """Diagnostics support for Launch Library."""
from __future__ import annotations
from typing import Any
from pylaunches.objects.event import Event
from pylaunches.objects.launch import Launch
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import LaunchLibraryData
from .const import DOMAIN
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: DataUpdateCoordinator[LaunchLibraryData] = hass.data[DOMAIN]
if coordinator.data is None:
return {}
def _first_element(data: list[Launch | Event]) -> dict[str, Any] | None:
if not data:
return None
return data[0].raw_data_contents
return {
"next_launch": _first_element(coordinator.data["upcoming_launches"]),
"starship_launch": _first_element(
coordinator.data["starship_events"].upcoming.launches
),
"starship_event": _first_element(
coordinator.data["starship_events"].upcoming.events
),
}
| 29.829268 | 77 | 0.72036 | 0 | 0 | 0 | 0 | 0 | 0 | 793 | 0.648406 | 188 | 0.15372 |
3567e722e33bfee718b3bdecb716ef40a5ef9cda | 2,894 | py | Python | py_tests/test_vision_pipeline_manager.py | machine2learn/mlpiot.base | da0b77fccbb0e42d1ddbb6dbc490313433dc7575 | [
"Apache-2.0"
] | 1 | 2021-03-30T20:49:54.000Z | 2021-03-30T20:49:54.000Z | py_tests/test_vision_pipeline_manager.py | machine2learn/mlpiot.base | da0b77fccbb0e42d1ddbb6dbc490313433dc7575 | [
"Apache-2.0"
] | null | null | null | py_tests/test_vision_pipeline_manager.py | machine2learn/mlpiot.base | da0b77fccbb0e42d1ddbb6dbc490313433dc7575 | [
"Apache-2.0"
] | null | null | null | """Tests for mlpiot.base.vision_pipeline_manager"""
import unittest
from mlpiot.base.action_executor import ActionExecutor
from mlpiot.base.event_extractor import EventExtractor
from mlpiot.base.scene_descriptor import SceneDescriptor
from mlpiot.base.trainer import Trainer
from mlpiot.base.vision_pipeline_manager import VisionPipelineManager
from mlpiot.proto import \
Image, ImageWithHelpers, \
VisionPipelineData, VisionPipelineManagerMetadata
class DummySceneDescriptor(SceneDescriptor):
def initialize(self, environ):
pass
def prepare_for_describing(self, output_metadata):
pass
def describe_scene(self, input_image, output_scene_description):
pass
class DummyEventExtractor(EventExtractor):
def initialize(self, environ):
pass
def prepare_for_event_extraction(self, output_metadata):
pass
def extract_events(
self, input_scene_description, output_event_extraction):
pass
class DummyActionExecutor(ActionExecutor):
def initialize(self, environ):
pass
def prepare_for_action_execution(self, output_metadata):
pass
def execute_action(
self, input_event_extraction, output_action_execution):
pass
class DummyTrainer(Trainer):
def initialize(self, environ):
pass
def prepare_for_training(self, output_metadata):
pass
def train(self, dataset, validation_dataset=None):
pass
class TestVisionPipelineManager(unittest.TestCase):
"""Test mlpiot.base.vision_pipeline_manager.VisionPipelineManager"""
def test_smoke(self):
"A simple test to check if everything is importable"
dummy_scene_descriptor = DummySceneDescriptor()
dummy_event_extractor = DummyEventExtractor()
dummy_action_executor = DummyActionExecutor()
dummy_trainer = DummyTrainer()
vision_pipeline_manager = VisionPipelineManager(
dummy_scene_descriptor,
dummy_event_extractor,
[dummy_action_executor],
dummy_trainer)
vpmm = VisionPipelineManagerMetadata()
vision_pipeline_manager.initialize({}, vpmm)
with vision_pipeline_manager.\
prepare_for_running_pipeline() as pipeline_runner:
input_image_proto = Image()
input_image_proto.height = 1
input_image_proto.width = 1
input_image_proto.channels = 1
input_image = ImageWithHelpers(input_image_proto)
vision_pipeline_data = VisionPipelineData()
vision_pipeline_data.id = 1001
pipeline_runner.run_pipeline(input_image, vision_pipeline_data)
initialized_trainer = vision_pipeline_manager.managed_trainer
with initialized_trainer.prepare_for_training() as ready_runner:
ready_runner.train([vision_pipeline_data])
| 29.232323 | 75 | 0.717346 | 2,420 | 0.836213 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 0.059088 |
3567ef1a903012178ca2379e19f67a46e3d91e09 | 1,176 | py | Python | day04/code2.py | jfdahl/Advent-of-Code-2019 | 3e5100e77eddc09f361c07a3c860a0fd97ecaa78 | [
"MIT"
] | null | null | null | day04/code2.py | jfdahl/Advent-of-Code-2019 | 3e5100e77eddc09f361c07a3c860a0fd97ecaa78 | [
"MIT"
] | null | null | null | day04/code2.py | jfdahl/Advent-of-Code-2019 | 3e5100e77eddc09f361c07a3c860a0fd97ecaa78 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import re
import numpy as np
start = 168630
stop = 718098
double = re.compile(r'(\d)\1')
triple = re.compile(r'(\d)\1\1')
def is_decreasing(num):
previous = None
for digit in str(num):
if not previous:
previous = digit
continue
if previous > digit:
return True
previous = digit
return False
v_is_decreasing = np.vectorize(is_decreasing)
def has_doubles(num):
return bool(double.search(str(num)))
v_has_doubles = np.vectorize(has_doubles)
def remove_triples(num):
num = str(num)
dbs = set(double.findall(num))
tps = set(triple.findall(num))
if dbs - tps:
return int(num)
else:
return False
v_remove_triples = np.vectorize(remove_triples)
data = np.arange(start, stop) # Create the initial data set
data = data[~v_is_decreasing(data)] # Remove the items containing decreasing sequences
data = data[v_has_doubles(data)] # Remove the items not containing doubles
print(f'Part 1: {len(data)}')
data = data[v_remove_triples(data)] # Remove the items containing triplets without doubles
print(f'Part 2: {len(data)}')
| 27.348837 | 94 | 0.661565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 260 | 0.221088 |
35680159b6dd0a0218a24c732bdcc58c5848a6a2 | 4,170 | py | Python | layint_api/models/stats_history_inner.py | LayeredInsight/layint_api_python | a5c9a5b24098bd823c5102b7ab9e4745432f19b4 | [
"Apache-2.0"
] | null | null | null | layint_api/models/stats_history_inner.py | LayeredInsight/layint_api_python | a5c9a5b24098bd823c5102b7ab9e4745432f19b4 | [
"Apache-2.0"
] | null | null | null | layint_api/models/stats_history_inner.py | LayeredInsight/layint_api_python | a5c9a5b24098bd823c5102b7ab9e4745432f19b4 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Layered Insight Assessment, Compliance, Witness & Control
LI Assessment & Compliance performs static vulnerability analysis, license and package compliance. LI Witness provides deep insight and analytics into containerized applications. Control provides dynamic runtime security and analytics for containerized applications. You can find out more about the Layered Insight Suite at [http://layeredinsight.com](http://layeredinsight.com).
OpenAPI spec version: 0.10
Contact: help@layeredinsight.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class StatsHistoryInner(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'date': 'str',
'vulnerabilites': 'list[StatsHistoryInnerVulnerabilites]'
}
attribute_map = {
'date': 'Date',
'vulnerabilites': 'Vulnerabilites'
}
def __init__(self, date=None, vulnerabilites=None):
"""
StatsHistoryInner - a model defined in Swagger
"""
self._date = None
self._vulnerabilites = None
if date is not None:
self.date = date
if vulnerabilites is not None:
self.vulnerabilites = vulnerabilites
@property
def date(self):
"""
Gets the date of this StatsHistoryInner.
:return: The date of this StatsHistoryInner.
:rtype: str
"""
return self._date
@date.setter
def date(self, date):
"""
Sets the date of this StatsHistoryInner.
:param date: The date of this StatsHistoryInner.
:type: str
"""
self._date = date
@property
def vulnerabilites(self):
"""
Gets the vulnerabilites of this StatsHistoryInner.
:return: The vulnerabilites of this StatsHistoryInner.
:rtype: list[StatsHistoryInnerVulnerabilites]
"""
return self._vulnerabilites
@vulnerabilites.setter
def vulnerabilites(self, vulnerabilites):
"""
Sets the vulnerabilites of this StatsHistoryInner.
:param vulnerabilites: The vulnerabilites of this StatsHistoryInner.
:type: list[StatsHistoryInnerVulnerabilites]
"""
self._vulnerabilites = vulnerabilites
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, StatsHistoryInner):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 27.8 | 383 | 0.586091 | 3,492 | 0.83741 | 0 | 0 | 1,022 | 0.245084 | 0 | 0 | 2,188 | 0.5247 |
3568fe33f1a70bcbcc1a0632feb3804b638fd10e | 6,056 | py | Python | powerapi/database/influxdb2.py | jorgermurillo/powerapi | 0636a693db2a0f3491ffd45a623de98a563fbd4d | [
"BSD-3-Clause"
] | null | null | null | powerapi/database/influxdb2.py | jorgermurillo/powerapi | 0636a693db2a0f3491ffd45a623de98a563fbd4d | [
"BSD-3-Clause"
] | null | null | null | powerapi/database/influxdb2.py | jorgermurillo/powerapi | 0636a693db2a0f3491ffd45a623de98a563fbd4d | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2018, INRIA
# Copyright (c) 2018, University of Lille
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
try:
from influxdb_client import InfluxDBClient
from influxdb_client.client.write_api import SYNCHRONOUS
#from influxdb import InfluxDBClient
from requests.exceptions import ConnectionError
except ImportError:
logging.getLogger().info("influx_client is not installed.")
from typing import List
from powerapi.database import BaseDB, DBError
from powerapi.report import Report
from powerapi.report_model import ReportModel
class CantConnectToInfluxDB2Exception(DBError):
pass
class InfluxDB2(BaseDB):
"""
MongoDB class herited from BaseDB
Allow to handle a InfluxDB database in reading or writing.
"""
def __init__(self, uri: str, port: int, token: str, org: str, bucket: str):
"""
:param str url: URL of the InfluxDB server
:param int port: port of the InfluxDB server
:param str db_name: database name in the influxdb
(ex: "powerapi")
:param str token access token Needed to connect to the influxdb instance
:param str org org that holds the data (??)
:param str bucket bucket where the data is going to be stored
:param report_model: XXXModel object. Allow to read specific
report with a specific format in a database
:type report_model: powerapi.ReportModel
"""
BaseDB.__init__(self)
self.uri = uri
self.port = port
self.complete_url ="http://%s:%s" %( self.uri , str(self.port))
#self.db_name = db_name
self.token=token
self.org = org
self.org_id = None
self.bucket = bucket
self.client = None
self.write_api= None
def _ping_client(self):
if hasattr(self.client, 'health'):
self.client.health()
else:
self.client.request(url="ping", method='GET', expected_response_code=204)
def connect(self):
"""
Override from BaseDB.
Create the connection to the influxdb database with the current
configuration (hostname/port/db_name), then check if the connection has
been created without failure.
"""
# close connection if reload
if self.client is not None:
self.client.close()
self.client = InfluxDBClient(url=self.complete_url, token=self.token, org=self.org)
#self.client = InfluxDBClient(host=self.uri, port=self.port, database=self.db_name)
# retrieve the org_id
org_api = self.client.organizations_api()
for org_response in org_api.find_organizations():
if org_response.name==self.org:
self.org_id=org_response.id
self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
try:
self._ping_client()
except ConnectionError:
raise CantConnectToInfluxDB2Exception('connexion error')
# Not sure we need to keep the buckeapi object longer than this
bucket_api= self.client.buckets_api()
if bucket_api.find_bucket_by_name(self.bucket)== None:
#If we can't find the bucket, we create it.
bucket_api.create_bucket(bucket_name=self.bucket, org_id=self.org_id)
# We need the org_id in order to create a bucket
#bucket_api.create_database(self.db_name, org_id="")
# TO DO
def save(self, report: Report, report_model: ReportModel):
"""
Override from BaseDB
:param report: Report to save
:param report_model: ReportModel
"""
## Let's print the data to see its schema.
#print("printing report")
#print(report)
#print("Printing serialized report")
#print(report.serialize())
data = report_model.to_influxdb(report.serialize())
self.write_api.write(bucket= this.bucket, record= data)
#self.client.write_points([data])
# TO DO
def save_many(self, reports: List[Report], report_model: ReportModel):
"""
Save a batch of data
:param reports: Batch of data.
:param report_model: ReportModel
"""
data_list = list(map(lambda r: report_model.to_influxdb(r.serialize()), reports))
self.write_api.write(bucket= self.bucket, record= data_list)
| 37.153374 | 91 | 0.662483 | 4,020 | 0.663804 | 0 | 0 | 0 | 0 | 0 | 0 | 3,551 | 0.586361 |
356a02e72caa007b811a18656a6e2973c6b20aee | 3,700 | py | Python | client/verta/verta/_tracking/organization.py | coutureai/CoutureModelDB | a799c0e3d6239bf79ac1462a936742af03492607 | [
"Apache-2.0"
] | null | null | null | client/verta/verta/_tracking/organization.py | coutureai/CoutureModelDB | a799c0e3d6239bf79ac1462a936742af03492607 | [
"Apache-2.0"
] | null | null | null | client/verta/verta/_tracking/organization.py | coutureai/CoutureModelDB | a799c0e3d6239bf79ac1462a936742af03492607 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from .._protos.public.uac import Organization_pb2 as _Organization
from .._protos.public.common import CommonService_pb2 as _CommonCommonService
class CollaboratorType:
def __init__(self, global_collaborator_type=None, default_repo_collaborator_type=None,
default_endpoint_collaborator_type=None, default_dataset_collaborator_type=None):
self.global_collaborator_type = global_collaborator_type
self.default_repo_collaborator_type = default_repo_collaborator_type
self.default_endpoint_collaborator_type = default_endpoint_collaborator_type
self.default_dataset_collaborator_type = default_dataset_collaborator_type
class Organization:
"""
Object representing an Organization.
"""
def __init__(self, conn, msg):
self.conn = conn
self.msg = msg
self.id = msg.id
self.name = msg.name
@classmethod
def _create(cls, conn, name, desc=None, collaborator_type=None, global_can_deploy=False):
Message = _Organization.SetOrganization
msg = cls._create_msg(name, desc, collaborator_type, global_can_deploy)
response = conn.make_proto_request("POST",
"/api/v1/uac-proxy/organization/setOrganization",
body=Message(organization=msg))
org = conn.must_proto_response(response, Message.Response).organization
print("created new Organization: {}".format(org.name))
return cls(conn, org)
@classmethod
def _create_msg(cls, name, desc, collaborator_type, global_can_deploy):
Message = _Organization.Organization
if not collaborator_type:
collaborator_type = CollaboratorType()
if global_can_deploy:
can_deploy_value = _CommonCommonService.TernaryEnum.Ternary.TRUE
else:
can_deploy_value = _CommonCommonService.TernaryEnum.Ternary.FALSE
msg = Message(name=name, description=desc, global_can_deploy=can_deploy_value)
for key in collaborator_type.__dict__:
try:
attr = getattr(collaborator_type, key)
if not attr:
value = _CommonCommonService.CollaboratorTypeEnum.CollaboratorType.READ_ONLY
else:
value = _CommonCommonService.CollaboratorTypeEnum.CollaboratorType.Value(attr)
setattr(msg, key, value)
except ValueError:
unknown_value_error = "Unknown value specified for {}. Possible values are READ_ONLY, READ_WRITE."
raise ValueError(unknown_value_error.format(key))
return msg
@classmethod
def _get_by_name(cls, conn, name):
Message = _Organization.GetOrganizationByName
msg = Message(org_name=name)
response = conn.make_proto_request("GET",
"/api/v1/uac-proxy/organization/getOrganizationByName",
params=msg)
org = conn.must_proto_response(response, Message.Response).organization
return cls(conn, org)
"""
Adds member to an organization
Parameters
----------
share_with : str
Represents email or username.
"""
def add_member(self, share_with):
Message = _Organization.AddUser
response = self.conn.make_proto_request("POST",
"/api/v1/uac-proxy/organization/addUser",
body=Message(org_id=self.id, share_with=share_with))
status = self.conn.must_proto_response(response, Message.Response).status
| 40.659341 | 114 | 0.647297 | 3,525 | 0.952703 | 0 | 0 | 2,227 | 0.601892 | 0 | 0 | 478 | 0.129189 |
356afbbf16bb44c8c36e543c3986db0e42688ac8 | 25,770 | py | Python | experiment/core/utmLib/ml/BN.py | LeonDong1993/TractableDE-ContCNet | 30e050eeef802308f4124bf56a161bae7f2e11c4 | [
"MIT"
] | null | null | null | experiment/core/utmLib/ml/BN.py | LeonDong1993/TractableDE-ContCNet | 30e050eeef802308f4124bf56a161bae7f2e11c4 | [
"MIT"
] | null | null | null | experiment/core/utmLib/ml/BN.py | LeonDong1993/TractableDE-ContCNet | 30e050eeef802308f4124bf56a161bae7f2e11c4 | [
"MIT"
] | null | null | null | # coding: utf-8
import numpy as np
from copy import deepcopy
from functools import partial
from utmLib import utils
from utmLib.ml.graph import Node, Graph
from pdb import set_trace
class BayesianNetwork:
def __init__(self,g,ev):
assert(g.digraph), "Only directed graph allowed"
for i in range(g.N):
parents = g.find_parents(i)
assert(len(parents) <=1), "At most one parent is allowed for each node"
self.graph = g
self.ev = ev
@staticmethod
def chowliu_tree(data):
'''
Learn a chowliu tree structure based on give data
data: S*N numpy array, where S is #samples, N is #RV (Discrete)
'''
_,D = data.shape
marginals = {}
# compute single r.v. marginals
for i in range(D):
values, counts = np.unique(data[:,i], return_counts=True)
marginals[i] = dict(zip(values, counts))
# compute joint marginal for each pair
for i,j in utils.halfprod(range(D)):
values, counts = np.unique(data[:,(i,j)], axis=0 ,return_counts=True)
values = list(map(lambda x:tuple(x),values))
marginals[i,j] = dict(zip(values, counts))
allcomb = utils.crossprod(list(marginals[i].keys()),list(marginals[j].keys()))
for v in allcomb:
if v not in marginals[i,j]: marginals[i,j][v] = 0
# normalize all marginals
for key in marginals:
dist = marginals[key]
summation = sum(dist.values())
for k in dist: dist[k] = (dist[k]+1) / float(summation) # 1- correction
mutual = {}
# compute mutual information
for i,j in utils.halfprod(range(D)):
mutual[i,j] = 0
for vi,vj in marginals[i,j]:
mutual[i,j] += np.log(marginals[i,j][vi,vj] / (marginals[i][vi] * marginals[j][vj])) * marginals[i,j][vi,vj]
# find the maximum spanning tree
G = Graph(digraph=False)
for i in range(D):
node = Node('N{}'.format(i))
node.domain = list(marginals[i].keys())
G.add_vertice(node)
for i,j in mutual:
G.add_edge(i,j,weight = mutual[i,j])
G = G.max_spanning_tree()
root = int(D/2)
G = G.todirect(root)
return G
def fit(self,traindata):
'''
MLE learning, basically empirical distribution
traindata: S*N numpy array, where S is #samples, N is #RV (Discrete)
'''
_,D = traindata.shape
assert(self.graph.N == D), "Input data not valid"
self.cpt = {}
for i in range(self.graph.N):
domain = self.graph.V[i].domain
parents = self.graph.find_parents(i)
if len(parents) == 0: # root node
# learn the node potential
values, counts = np.unique(traindata[:,i], return_counts=True)
dist = dict(zip(values, counts))
for v in domain:
if v not in dist: dist[v] = 1 # 1-correction
# normalize
summation = sum(dist.values())
for k in dist:dist[k] /= float(summation)
self.cpt[i] = dist
else:
# create uniform node potential
self.cpt[i] = dict(zip(domain, [1]*len(domain) ))
# learn the edge potential
dist = {}
assert(len(parents) == 1), "Each vertice can only have at most one parent!"
j = parents[0]
jdomain = self.graph.V[j].domain
values, counts = np.unique(traindata[:,(i,j)], axis=0 ,return_counts=True)
values = list(map(lambda x:tuple(x),values))
dist= dict(zip(values, counts))
allcomb = utils.crossprod(domain,jdomain)
for v in allcomb:
if v not in dist: dist[v] = 1 #1-correction
# normalize
for vj in jdomain:
summation = sum(map(lambda vi:dist[vi,vj],domain))
for vi in domain:
dist[vi,vj] /= float(summation)
self.cpt[i,j] = dist
return self
def predict(self,testdata):
'''
predict the values of non-evidence RV given the value of evidence RV
testdata: 1*N list / numpy array, N is #RV
'''
# first, from leaves to root
order = self.graph.toposort(reverse = True)
message = {}
for i in order:
parents = self.graph.find_parents(i)
children = self.graph.find_children(i)
if len(parents) == 0:
continue
msg = {}
assert(len(parents) == 1)
j = parents[0]
if j in self.ev:
continue
ivals = self.graph.V[i].domain
jvals = self.graph.V[j].domain
if i not in self.ev:
for vj in jvals:
msg[vj] = []
for vi in ivals:
production = 1.0
for c in children:
production *= message[c,i][vi]
msg[vj].append(self.cpt[i][vi] * self.cpt[i,j][vi,vj] * production)
msg[vj] = max(msg[vj])
else:
for vj in jvals:
msg[vj] = self.cpt[i,j][testdata[i], vj]
# normalize message
summation = sum(msg.values())
for k in msg: msg[k] /= float(summation)
message[i,j] = msg
# second, from root to leaves
order.reverse()
for i in order:
parents = self.graph.find_parents(i)
children = self.graph.find_children(i)
ivals = self.graph.V[i].domain
for j in children:
if j in self.ev:
continue
jvals = self.graph.V[j].domain
msg = {}
if i not in self.ev:
for vj in jvals:
msg[vj] = []
for vi in ivals:
production = 1.0
for p in parents:
production *= message[p,i][vi]
for c in children:
if c == j:continue
production *= message[c,i][vi]
msg[vj].append(self.cpt[i][vi] * self.cpt[j,i][vj,vi] * production)
msg[vj] = max(msg[vj])
else:
for vj in jvals:
msg[vj] = self.cpt[j,i][vj,testdata[i]]
# normalize message
summation = sum(msg.values())
for k in msg: msg[k] /= float(summation)
message[i,j] = msg
# calculate node belief
prediction = deepcopy(testdata)
for i in range(self.graph.N):
if i not in self.ev:
belief = {}
parents = self.graph.find_parents(i)
children = self.graph.find_children(i)
nodes = parents + children
for v in self.graph.V[i].domain:
belief[v] = self.cpt[i][v]
for n in nodes:
belief[v] *= message[n,i][v]
prediction[i] = max(belief.items(),key= lambda x:x[1])[0]
return prediction
class Clique(Node):
def __init__(self):
self.ids = []
self.desc='|'
def add_node(self,nid,n):
self.ids.append(nid)
self.desc += '{}|'.format(n)
def moralize(g):
# will lost weight information
assert(g.digraph)
ret = deepcopy(g)
ret.digraph = False
ret.remove_all_edges()
for v in range(g.N):
parents = g.find_parents(v)
parents.append(v)
for s,t in utils.halfprod(parents):
ret.add_edge(s,t)
return ret
def find_small_clique(X):
for i,xi in enumerate(X):
for j,xj in enumerate(X):
if i!=j and utils.allin(xi.ids,xj.ids):
return (i,j)
return None
def get_elim_order(g,alg = 'min-fill',preserve=None):
assert(not g.digraph)
assert(alg in ['min-degree','min-fill'])
unmarked = list(range(g.N))
edges = g.get_edges()
elim_order = []
cliques = []
while len(unmarked) > 0:
cost = [0 for x in unmarked]
for i,v in enumerate(unmarked):
unmarked_neighbor = list(filter(lambda x: (v,x) in edges,unmarked))
if alg == 'min-degree':
cost[i] = len(unmarked_neighbor)
if alg == 'min-fill':
cost[i] = len(unmarked_neighbor)*(len(unmarked_neighbor)-1)/2
for s,t in utils.halfprod(unmarked_neighbor):
if (s,t) in edges:
cost[i] -= 1
besti = None
bestv = None
if preserve == None:
besti = cost.index(min(cost))
bestv = unmarked[besti]
else:
tmp = list(zip(unmarked,cost))
tmp.sort(key = lambda x:x[1])
for v,_ in tmp:
children = preserve.find_children(v)
marked = list(filter(lambda x:x not in unmarked ,list(range(g.N))))
if utils.allin(children,marked):
bestv = v
besti = unmarked.index(bestv)
break
elim_order.append(bestv)
best_neighbor = list(filter(lambda x: (bestv,x) in edges,unmarked))
for s,t in utils.diffprod(best_neighbor):
if (s,t) not in edges:
edges.append( (s,t) )
best_neighbor.append(bestv)
cliques.append(best_neighbor)
unmarked.pop(besti)
return elim_order,cliques
def get_junction_tree(g, preserve = None):
assert(not g.digraph)
order,cliques = get_elim_order(g,preserve = preserve)
CLIQUE = []
for i,elem in enumerate(cliques):
cq = Clique()
for rid in elem:
cq.add_node(rid,g.V[rid])
cq.elim = [order[i]]
CLIQUE.append(cq)
while 1:
pair = find_small_clique(CLIQUE)
if pair == None:
break
i,j = pair
for eid in CLIQUE[i].elim:
CLIQUE[j].elim.append(eid)
CLIQUE.pop(i)
# find the maximum spanning tree over the clique graph
newg = Graph(digraph = False)
for c in CLIQUE:
newg.add_vertice(c)
vertices = range(newg.N)
for (i,j) in utils.halfprod(vertices):
cinodes = newg.V[i].ids
cjnodes = newg.V[j].ids
weight = sum(map(lambda x:x in cinodes,cjnodes))
if weight>0: newg.add_edge(i,j,weight=weight)
newg = newg.max_spanning_tree()
return newg
class Potential:
def __repr__(self):
return 'ids:{} P:{}'.format(self.ids,self.P)
class DynamicBayesianNetwork:
def __init__(self,g,ev,intercnt='default'):
'''
intercnt - the connection between two time slice, 'default' means we have 1-to-1 connection between
all the state variables.
'''
assert(g.digraph), "Only directed graph allowed"
sv = list(filter(lambda x:x not in ev,range(g.N)))
self.G = g
self.EV = ev
self.SV = sv
self.CPT = {}
self.ICPT = {}
self.filter = partial(self.smooth,smooth = False)
self.predict = partial(self.smooth,smooth = False)
# construt two time slice graph G2
mapping = {}
G2 = deepcopy(self.G)
for i in self.SV:
node = deepcopy(self.G.V[i])
node.desc = 'f-' + node.desc
nid = G2.add_vertice(node)
mapping[i]=nid
if intercnt == 'default':
intercnt = [(i,i) for i in self.SV]
for i,j in intercnt:
G2.add_edge(mapping[i],j)
self.G2 = G2
self.M = mapping
# add edges in G
edges = self.G.get_edges()
for i,j in edges:
if (i in self.SV) and (j in self.SV):
a,b = self.M[i],self.M[j]
self.G2.add_edge(a,b)
# reverse mapping used for backward message passing
self.rM = {}
for k,v in self.M.items():
self.rM[v] = k
# init node CPT
for i in self.SV:
parents = self.G.find_parents(i)
self.ICPT[i] = self.init_CPT(i,parents)
for i in range(self.G.N):
parents = self.G2.find_parents(i)
self.CPT[i] = self.init_CPT(i,parents)
def logprob(self,sequence):
T,F = sequence.shape
assert(F == self.G.N), "Invalid input data"
sum_log_prob = 0.0
for t in range(T):
if t == 0:
cur_slice = sequence[0,:]
for i in range(F):
if i in self.EV:
potential = self.CPT[i]
else:
potential = self.ICPT[i]
ind = tuple(cur_slice[potential.ids])
sum_log_prob += np.log(potential.P[ind])
else:
slice_two = sequence[(t-1):(t+1),:]
ex_slice = slice_two.flatten()
for i in range(F):
potential = self.CPT[i]
ind = tuple(ex_slice[potential.ids])
sum_log_prob += np.log(potential.P[ind])
avg_log_prob = sum_log_prob/float(T)
return avg_log_prob
def init_CPT(self,i,parents):
cpt = Potential()
ids = [i] + parents
table_size = tuple(map(lambda x: len(self.G2.V[x].domain) ,ids))
cpt.ids = ids
cpt.P = np.zeros(table_size)
return cpt
def min_clique(self,G,ids):
# find the minimum clique in G contains all id in ids
candidates = []
for i in range(G.N):
nids = G.V[i].ids
if utils.allin(ids,nids):
candidates.append( (i,len(nids)) )
best = min(candidates,key=lambda x:x[1])
return best[0]
def init_potential(self,ids):
potential = Potential()
table_size = tuple(map(lambda x: len(self.G2.V[x].domain) ,ids))
potential.ids = ids
potential.P = np.ones(table_size)
return potential
def init_message(self,G,ret):
for i in range(G.N):
clique = G.V[i]
ret[i] = self.init_potential(clique.ids)
return
def multiply_potential(self,p1,p2):
if p1.ids == p2.ids:
newp = self.init_potential(p1.ids)
newp.P = p1.P * p2.P
return newp
if len(p1.ids) >= len(p2.ids):
pb = p1; ps = p2
else:
pb = p2; ps = p1
assert(utils.allin(ps.ids,pb.ids))
pt = deepcopy(pb)
for npi,npv in np.ndenumerate(ps.P):
idx = []
for v in pt.ids:
if v in ps.ids:
idx.append( npi[ps.ids.index(v)] )
else:
idx.append( slice(None) )
idx = tuple(idx)
pt.P[idx] *= npv
pt.P = pt.P/np.sum(pt.P)
return pt
def multiply_CPT(self,G,E,ret,init=False):
for i in range(G.N):
clique = G.V[i]
assert(ret[i].ids == clique.ids)
for eid in clique.elim:
if eid in self.SV:
if init:
ret[i] = self.multiply_potential(ret[i],self.ICPT[eid])
else:
ret[i] = self.multiply_potential(ret[i],self.CPT[eid])
if eid in self.EV:
ret[i] = self.multiply_potential(ret[i],self.CPT[eid])
# condition out the evidence variable
newids = list(filter(lambda x:x not in self.EV,clique.ids))
if len(newids) < len(clique.ids):
potential = self.init_potential(newids)
for npi,_ in np.ndenumerate(potential.P):
idx = [-1 for v in clique.ids]
for si,v in enumerate(clique.ids):
if v in self.EV:
idx[si] = E[v]
else:
idx[si] = npi[newids.index(v)]
idx = tuple(idx)
potential.P[npi] = ret[i].P[idx]
ret[i] = potential
def marginalize(self,pt,ids):
if pt.ids == ids:
newp = deepcopy(pt)
newp.P = newp.P/np.sum(newp.P)
return newp
newp = deepcopy(pt)
sumout = list(filter(lambda v:v not in ids,pt.ids))
for s in sumout:
dim = newp.ids.index(s)
if self.mode == 'max':
newp.P = np.amax(newp.P,axis = dim)
else:
assert(self.mode == 'sum')
newp.P = np.sum(newp.P,axis = dim)
newp.ids.remove(s)
return newp
def get_message(self,p1,p2,timestep = 0):
assert(timestep in [-1,0,1])
# get the message pass from p1 -> p2
ids = []
if timestep > 0:
for i in p1.ids:
assert(i not in self.EV)
if (i in self.SV) and (self.M[i] in p2.ids): ids.append(i)
elif timestep <0:
for i in p1.ids:
assert(i not in self.EV)
if (i not in self.SV) and (self.rM[i] in p2.ids): ids.append(i)
else:
ids = list(filter(lambda x:x in p2.ids,p1.ids))
msg = self.marginalize(p1,ids)
if timestep > 0:
msg.ids = list(map(lambda x:self.M[x],msg.ids))
if timestep < 0:
msg.ids = list(map(lambda x:self.rM[x],msg.ids))
return msg
def calculate_msg(self,G,npt):
message = {}
g = G.todirect(0)
order = g.toposort(reverse = True)
# do message passing
for i in order:
parents = g.find_parents(i)
children = g.find_children(i)
if len(parents) == 0: continue
assert(len(parents) == 1)
j = parents[0]
msg = npt[i]
for c in children:
msg = self.multiply_potential(msg,message[c,i])
message[i,j] = self.get_message(msg,npt[j])
# from root to leaves
order.reverse()
for i in order:
parents = g.find_parents(i)
children = g.find_children(i)
for j in children:
msg = npt[i]
for p in parents:
msg = self.multiply_potential(msg,message[p,i])
for c in children:
if c == j: continue
msg = self.multiply_potential(msg,message[c,i])
message[i,j] = self.get_message(msg,npt[j])
return message
def collect_msg(self,G,r,npt,msg):
neignbors = G.find_neighbor(r)
pt = npt[r]
for n in neignbors:
pt = self.multiply_potential(pt,msg[n,r])
return pt
def smooth(self,data,numnodes=4,smooth=True):
self.mode = 'max'
assert(numnodes > 1)
st = 0
appro = []
while st < len(self.SV):
ed = st + numnodes
if ed > len(self.SV):
ed = len(self.SV)
appro.append(self.SV[st:ed])
st = ed
# create junction tree J1
T1G = deepcopy(self.G)
T1G = moralize(T1G)
for bkc in appro:
for s,t in utils.halfprod(bkc):
T1G.add_edge(s,t)
self.J1 = get_junction_tree(T1G,preserve=self.G)
# find come and out node
self.J1.out = []
for bkc in appro:
self.J1.out.append( self.min_clique(self.J1,bkc) )
self.J1.come = deepcopy(self.J1.out)
# create junction tree Jt
T2G = moralize(self.G2)
for bkc in appro:
for s,t in utils.halfprod(bkc):
T2G.add_edge(s,t)
fbkc = list(map(lambda x:self.M[x],bkc))
for s,t in utils.halfprod(fbkc):
T2G.add_edge(s,t)
self.J2 = get_junction_tree(T2G,preserve = self.G2)
# find come and out node
self.J2.out = []
for bkc in appro:
self.J2.out.append( self.min_clique(self.J2,bkc) )
self.J2.come = []
for bkc in appro:
fbkc = list(map(lambda x:self.M[x],bkc))
self.J2.come.append( self.min_clique(self.J2,fbkc) )
T,N = data.shape
assert(N == self.G.N)
fmsg = {}
for t in range(T):
fmsg[t] = {}
evidence = data[t,:]
if t==0:
self.init_message(self.J1,fmsg[t])
self.multiply_CPT(self.J1,evidence,fmsg[t],init=True)
# collect message to out node for each bk cluster
npt = deepcopy(fmsg[t])
message = self.calculate_msg(self.J1,npt)
for i in self.J1.out:
fmsg[t][i] = self.collect_msg(self.J1,i,npt,message)
else:
pt = t-1
self.init_message(self.J2,fmsg[t])
self.multiply_CPT(self.J2,evidence,fmsg[t])
# absorb message from the previous time slice
for i,inid in enumerate(self.J2.come):
if pt == 0:
outid = self.J1.out[i]
else:
outid = self.J2.out[i]
msg = self.get_message(fmsg[pt][outid],fmsg[t][inid],timestep = 1)
fmsg[pt][outid,-1] = msg
fmsg[t][inid] = self.multiply_potential(msg,fmsg[t][inid])
npt = deepcopy(fmsg[t])
message = self.calculate_msg(self.J2,npt)
for i in self.J2.out:
fmsg[t][i] = self.collect_msg(self.J2,i,npt,message)
if t==(T-1):
for i,outid in enumerate(self.J2.out):
inid = self.J2.come[i]
fmsg[t][outid,-1] = self.get_message(fmsg[t][outid],fmsg[t][inid],timestep = 1)
if smooth:
endtime = -1
else:
endtime = T
bmsg = {}
for t in range(T-1,endtime,-1):
bmsg[t] = {}
evidence = data[t,:]
if t==(T-1):
curG = self.J2
self.init_message(curG,bmsg[t])
self.multiply_CPT(curG,evidence,bmsg[t])
npt = deepcopy(bmsg[t])
message = self.calculate_msg(curG,npt)
for i,inid in enumerate(curG.come):
bmsg[t][inid] = self.collect_msg(curG,inid,npt,message)
outid = curG.out[i]
bmsg[t][-1,outid] = self.init_potential(appro[i])
if t<(T-1):
nt = t+1
curG = self.J2
if t==0:
curG = self.J1
# initialize message
self.init_message(curG,bmsg[t])
if t==0:
self.multiply_CPT(curG,evidence,bmsg[t],init=True)
else:
self.multiply_CPT(curG,evidence,bmsg[t])
# absorb message from the previous time slice
for i,outid in enumerate(curG.out):
inid = self.J2.come[i]
msg = self.get_message(bmsg[nt][inid],bmsg[t][outid],timestep = -1)
bmsg[t][-1,outid] = msg
bmsg[t][outid] = self.multiply_potential(msg,bmsg[t][outid])
npt = deepcopy(bmsg[t])
message = self.calculate_msg(curG,npt)
for i in curG.come:
bmsg[t][i] = self.collect_msg(curG,i,npt,message)
prediction = deepcopy(data)
for t in range(T):
if t==0:
tg = self.J1
else:
tg = self.J2
for bki,outid in enumerate(tg.out):
fP = fmsg[t][outid,-1]
fP.ids = list(map(lambda x:self.rM[x],fP.ids))
potential = fP
if smooth:
bP = bmsg[t][-1,outid]
potential = self.multiply_potential(potential,bP)
P = potential.P/np.sum(potential.P)
idx = np.unravel_index(P.argmax(), P.shape)
for v in appro[bki]:
prediction[t,v] = idx[fP.ids.index(v)]
return prediction
def condLL(self,data,numnodes=4,smooth=False):
self.mode = 'sum'
assert(numnodes > 1)
st = 0
appro = []
while st < len(self.SV):
ed = st + numnodes
if ed > len(self.SV):
ed = len(self.SV)
appro.append(self.SV[st:ed])
st = ed
# create junction tree J1
T1G = deepcopy(self.G)
T1G = moralize(T1G)
for bkc in appro:
for s,t in utils.halfprod(bkc):
T1G.add_edge(s,t)
self.J1 = get_junction_tree(T1G,preserve=self.G)
# find come and out node
self.J1.out = []
for bkc in appro:
self.J1.out.append( self.min_clique(self.J1,bkc) )
self.J1.come = deepcopy(self.J1.out)
# create junction tree Jt
T2G = moralize(self.G2)
for bkc in appro:
for s,t in utils.halfprod(bkc):
T2G.add_edge(s,t)
fbkc = list(map(lambda x:self.M[x],bkc))
for s,t in utils.halfprod(fbkc):
T2G.add_edge(s,t)
self.J2 = get_junction_tree(T2G,preserve = self.G2)
# find come and out node
self.J2.out = []
for bkc in appro:
self.J2.out.append( self.min_clique(self.J2,bkc) )
self.J2.come = []
for bkc in appro:
fbkc = list(map(lambda x:self.M[x],bkc))
self.J2.come.append( self.min_clique(self.J2,fbkc) )
T,N = data.shape
assert(N == self.G.N)
fmsg = {}
for t in range(T):
fmsg[t] = {}
evidence = data[t,:]
if t==0:
self.init_message(self.J1,fmsg[t])
self.multiply_CPT(self.J1,evidence,fmsg[t],init=True)
# collect message to out node for each bk cluster
npt = deepcopy(fmsg[t])
message = self.calculate_msg(self.J1,npt)
for i in self.J1.out:
fmsg[t][i] = self.collect_msg(self.J1,i,npt,message)
else:
pt = t-1
self.init_message(self.J2,fmsg[t])
self.multiply_CPT(self.J2,evidence,fmsg[t])
# absorb message from the previous time slice
for i,inid in enumerate(self.J2.come):
if pt == 0:
outid = self.J1.out[i]
else:
outid = self.J2.out[i]
msg = self.get_message(fmsg[pt][outid],fmsg[t][inid],timestep = 1)
fmsg[pt][outid,-1] = msg
fmsg[t][inid] = self.multiply_potential(msg,fmsg[t][inid])
npt = deepcopy(fmsg[t])
message = self.calculate_msg(self.J2,npt)
for i in self.J2.out:
fmsg[t][i] = self.collect_msg(self.J2,i,npt,message)
if t==(T-1):
for i,outid in enumerate(self.J2.out):
inid = self.J2.come[i]
fmsg[t][outid,-1] = self.get_message(fmsg[t][outid],fmsg[t][inid],timestep = 1)
if smooth:
endtime = -1
else:
endtime = T
bmsg = {}
for t in range(T-1,endtime,-1):
bmsg[t] = {}
evidence = data[t,:]
if t==(T-1):
curG = self.J2
self.init_message(curG,bmsg[t])
self.multiply_CPT(curG,evidence,bmsg[t])
npt = deepcopy(bmsg[t])
message = self.calculate_msg(curG,npt)
for i,inid in enumerate(curG.come):
bmsg[t][inid] = self.collect_msg(curG,inid,npt,message)
outid = curG.out[i]
bmsg[t][-1,outid] = self.init_potential(appro[i])
if t<(T-1):
nt = t+1
curG = self.J2
if t==0:
curG = self.J1
# initialize message
self.init_message(curG,bmsg[t])
if t==0:
self.multiply_CPT(curG,evidence,bmsg[t],init=True)
else:
self.multiply_CPT(curG,evidence,bmsg[t])
# absorb message from the previous time slice
for i,outid in enumerate(curG.out):
inid = self.J2.come[i]
msg = self.get_message(bmsg[nt][inid],bmsg[t][outid],timestep = -1)
bmsg[t][-1,outid] = msg
bmsg[t][outid] = self.multiply_potential(msg,bmsg[t][outid])
npt = deepcopy(bmsg[t])
message = self.calculate_msg(curG,npt)
for i in curG.come:
bmsg[t][i] = self.collect_msg(curG,i,npt,message)
logprob = 0.0
for t in range(T):
prob = 1.0
row = data[t,:]
if t==0:
tg = self.J1
else:
tg = self.J2
for bki,outid in enumerate(tg.out):
fP = fmsg[t][outid,-1]
fP.ids = list(map(lambda x:self.rM[x],fP.ids))
potential = fP
if smooth:
bP = bmsg[t][-1,outid]
potential = self.multiply_potential(potential,bP)
P = potential.P/np.sum(potential.P)
idx = tuple(row[potential.ids])
prob *= P[idx]
logprob += np.log(prob)
avg_logprob = logprob/T
return avg_logprob
def get_domain(self,nids):
n0 = self.G2.V[nids[0]]
D = n0.domain
for i in nids[1:]:
node = self.G2.V[i]
D = utils.crossprod(D,node.domain)
return D
def norm_CPT(self,cpt):
ratio = 1e-3
X = np.sum(cpt.P)
addv = int(X*ratio)
addv += int(addv==0)
cpt.P += addv
newP = deepcopy(cpt.P)
if len(cpt.ids) == 1:
newP = newP/np.sum(cpt.P)
else:
n = cpt.ids[0]
domain = self.get_domain(cpt.ids[1:])
for v in domain:
if not isinstance(v,tuple): v = tuple([v])
index = tuple([self.G.V[n].domain]) + v
summation = np.sum(cpt.P[index])
assert( summation!=0 )
newP[index] /= summation
# return
cpt.P = newP
def fit(self,traindata):
# traindata - list of 2D numpy array
M = len(traindata)
for i in range(M):
data = traindata[i]
T,N = data.shape
assert(N == self.G.N)
# basically learning the empirical distribution
for t in range(T):
now = data[t,:]
if t == 0:
for i in self.SV:
idx = tuple(now[self.ICPT[i].ids])
self.ICPT[i].P[idx] += 1
else:
prev = data[t-1,:]
exnow = np.append(now,[0 for i in self.SV])
for k,v in self.M.items():
exnow[v] = prev[k]
for i in self.SV:
idx = tuple(exnow[self.CPT[i].ids])
self.CPT[i].P[idx] += 1
for i in self.EV:
idx = tuple(now[self.CPT[i].ids])
self.CPT[i].P[idx] += 1
# normalize all CPT
for i in range(self.G.N):
self.norm_CPT(self.CPT[i])
for i in self.SV:
self.norm_CPT(self.ICPT[i])
return self
| 26.677019 | 113 | 0.60326 | 22,857 | 0.886962 | 0 | 0 | 1,578 | 0.061234 | 0 | 0 | 2,333 | 0.090532 |
356bb777f77682e18da9872000b9c4effea83dc3 | 20,411 | py | Python | examples/pybullet/examples/quadruped.py | felipeek/bullet3 | 6a59241074720e9df119f2f86bc01765917feb1e | [
"Zlib"
] | 9,136 | 2015-01-02T00:41:45.000Z | 2022-03-31T15:30:02.000Z | examples/pybullet/examples/quadruped.py | felipeek/bullet3 | 6a59241074720e9df119f2f86bc01765917feb1e | [
"Zlib"
] | 2,424 | 2015-01-05T08:55:58.000Z | 2022-03-30T19:34:55.000Z | examples/pybullet/examples/quadruped.py | felipeek/bullet3 | 6a59241074720e9df119f2f86bc01765917feb1e | [
"Zlib"
] | 2,921 | 2015-01-02T10:19:30.000Z | 2022-03-31T02:48:42.000Z | import pybullet as p
import time
import math
import pybullet_data
def drawInertiaBox(parentUid, parentLinkIndex, color):
dyn = p.getDynamicsInfo(parentUid, parentLinkIndex)
mass = dyn[0]
frictionCoeff = dyn[1]
inertia = dyn[2]
if (mass > 0):
Ixx = inertia[0]
Iyy = inertia[1]
Izz = inertia[2]
boxScaleX = 0.5 * math.sqrt(6 * (Izz + Iyy - Ixx) / mass)
boxScaleY = 0.5 * math.sqrt(6 * (Izz + Ixx - Iyy) / mass)
boxScaleZ = 0.5 * math.sqrt(6 * (Ixx + Iyy - Izz) / mass)
halfExtents = [boxScaleX, boxScaleY, boxScaleZ]
pts = [[halfExtents[0], halfExtents[1], halfExtents[2]],
[-halfExtents[0], halfExtents[1], halfExtents[2]],
[halfExtents[0], -halfExtents[1], halfExtents[2]],
[-halfExtents[0], -halfExtents[1], halfExtents[2]],
[halfExtents[0], halfExtents[1], -halfExtents[2]],
[-halfExtents[0], halfExtents[1], -halfExtents[2]],
[halfExtents[0], -halfExtents[1], -halfExtents[2]],
[-halfExtents[0], -halfExtents[1], -halfExtents[2]]]
p.addUserDebugLine(pts[0],
pts[1],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[1],
pts[3],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[3],
pts[2],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[2],
pts[0],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[0],
pts[4],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[1],
pts[5],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[2],
pts[6],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[3],
pts[7],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[4 + 0],
pts[4 + 1],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[4 + 1],
pts[4 + 3],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[4 + 3],
pts[4 + 2],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
p.addUserDebugLine(pts[4 + 2],
pts[4 + 0],
color,
1,
parentObjectUniqueId=parentUid,
parentLinkIndex=parentLinkIndex)
toeConstraint = True
useMaximalCoordinates = False
useRealTime = 0
#the fixedTimeStep and numSolverIterations are the most important parameters to trade-off quality versus performance
fixedTimeStep = 1. / 100
numSolverIterations = 50
if (useMaximalCoordinates):
fixedTimeStep = 1. / 500
numSolverIterations = 200
speed = 10
amplitude = 0.8
jump_amp = 0.5
maxForce = 3.5
kneeFrictionForce = 0
kp = 1
kd = .5
maxKneeForce = 1000
physId = p.connect(p.SHARED_MEMORY_GUI)
if (physId < 0):
p.connect(p.GUI)
#p.resetSimulation()
p.setAdditionalSearchPath(pybullet_data.getDataPath())
angle = 0 # pick in range 0..0.2 radians
orn = p.getQuaternionFromEuler([0, angle, 0])
p.loadURDF("plane.urdf", [0, 0, 0], orn)
p.setPhysicsEngineParameter(numSolverIterations=numSolverIterations)
p.startStateLogging(p.STATE_LOGGING_GENERIC_ROBOT,
"genericlogdata.bin",
maxLogDof=16,
logFlags=p.STATE_LOG_JOINT_TORQUES)
p.setTimeOut(4000000)
p.setGravity(0, 0, 0)
p.setTimeStep(fixedTimeStep)
orn = p.getQuaternionFromEuler([0, 0, 0.4])
p.setRealTimeSimulation(0)
quadruped = p.loadURDF("quadruped/minitaur_v1.urdf", [1, -1, .3],
orn,
useFixedBase=False,
useMaximalCoordinates=useMaximalCoordinates,
flags=p.URDF_USE_IMPLICIT_CYLINDER)
nJoints = p.getNumJoints(quadruped)
jointNameToId = {}
for i in range(nJoints):
jointInfo = p.getJointInfo(quadruped, i)
jointNameToId[jointInfo[1].decode('UTF-8')] = jointInfo[0]
motor_front_rightR_joint = jointNameToId['motor_front_rightR_joint']
motor_front_rightL_joint = jointNameToId['motor_front_rightL_joint']
knee_front_rightL_link = jointNameToId['knee_front_rightL_link']
hip_front_rightR_link = jointNameToId['hip_front_rightR_link']
knee_front_rightR_link = jointNameToId['knee_front_rightR_link']
motor_front_rightL_link = jointNameToId['motor_front_rightL_link']
motor_front_leftR_joint = jointNameToId['motor_front_leftR_joint']
hip_front_leftR_link = jointNameToId['hip_front_leftR_link']
knee_front_leftR_link = jointNameToId['knee_front_leftR_link']
motor_front_leftL_joint = jointNameToId['motor_front_leftL_joint']
motor_front_leftL_link = jointNameToId['motor_front_leftL_link']
knee_front_leftL_link = jointNameToId['knee_front_leftL_link']
motor_back_rightR_joint = jointNameToId['motor_back_rightR_joint']
hip_rightR_link = jointNameToId['hip_rightR_link']
knee_back_rightR_link = jointNameToId['knee_back_rightR_link']
motor_back_rightL_joint = jointNameToId['motor_back_rightL_joint']
motor_back_rightL_link = jointNameToId['motor_back_rightL_link']
knee_back_rightL_link = jointNameToId['knee_back_rightL_link']
motor_back_leftR_joint = jointNameToId['motor_back_leftR_joint']
hip_leftR_link = jointNameToId['hip_leftR_link']
knee_back_leftR_link = jointNameToId['knee_back_leftR_link']
motor_back_leftL_joint = jointNameToId['motor_back_leftL_joint']
motor_back_leftL_link = jointNameToId['motor_back_leftL_link']
knee_back_leftL_link = jointNameToId['knee_back_leftL_link']
#fixtorso = p.createConstraint(-1,-1,quadruped,-1,p.JOINT_FIXED,[0,0,0],[0,0,0],[0,0,0])
motordir = [-1, -1, -1, -1, 1, 1, 1, 1]
halfpi = 1.57079632679
twopi = 4 * halfpi
kneeangle = -2.1834
dyn = p.getDynamicsInfo(quadruped, -1)
mass = dyn[0]
friction = dyn[1]
localInertiaDiagonal = dyn[2]
print("localInertiaDiagonal", localInertiaDiagonal)
#this is a no-op, just to show the API
p.changeDynamics(quadruped, -1, localInertiaDiagonal=localInertiaDiagonal)
#for i in range (nJoints):
# p.changeDynamics(quadruped,i,localInertiaDiagonal=[0.000001,0.000001,0.000001])
drawInertiaBox(quadruped, -1, [1, 0, 0])
#drawInertiaBox(quadruped,motor_front_rightR_joint, [1,0,0])
for i in range(nJoints):
drawInertiaBox(quadruped, i, [0, 1, 0])
if (useMaximalCoordinates):
steps = 400
for aa in range(steps):
p.setJointMotorControl2(quadruped, motor_front_leftL_joint, p.POSITION_CONTROL,
motordir[0] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, motor_front_leftR_joint, p.POSITION_CONTROL,
motordir[1] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, motor_back_leftL_joint, p.POSITION_CONTROL,
motordir[2] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, motor_back_leftR_joint, p.POSITION_CONTROL,
motordir[3] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, motor_front_rightL_joint, p.POSITION_CONTROL,
motordir[4] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, motor_front_rightR_joint, p.POSITION_CONTROL,
motordir[5] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, motor_back_rightL_joint, p.POSITION_CONTROL,
motordir[6] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, motor_back_rightR_joint, p.POSITION_CONTROL,
motordir[7] * halfpi * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_front_leftL_link, p.POSITION_CONTROL,
motordir[0] * (kneeangle + twopi) * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_front_leftR_link, p.POSITION_CONTROL,
motordir[1] * kneeangle * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_back_leftL_link, p.POSITION_CONTROL,
motordir[2] * kneeangle * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_back_leftR_link, p.POSITION_CONTROL,
motordir[3] * (kneeangle + twopi) * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_front_rightL_link, p.POSITION_CONTROL,
motordir[4] * (kneeangle) * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_front_rightR_link, p.POSITION_CONTROL,
motordir[5] * (kneeangle + twopi) * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_back_rightL_link, p.POSITION_CONTROL,
motordir[6] * (kneeangle + twopi) * float(aa) / steps)
p.setJointMotorControl2(quadruped, knee_back_rightR_link, p.POSITION_CONTROL,
motordir[7] * kneeangle * float(aa) / steps)
p.stepSimulation()
#time.sleep(fixedTimeStep)
else:
p.resetJointState(quadruped, motor_front_leftL_joint, motordir[0] * halfpi)
p.resetJointState(quadruped, knee_front_leftL_link, motordir[0] * kneeangle)
p.resetJointState(quadruped, motor_front_leftR_joint, motordir[1] * halfpi)
p.resetJointState(quadruped, knee_front_leftR_link, motordir[1] * kneeangle)
p.resetJointState(quadruped, motor_back_leftL_joint, motordir[2] * halfpi)
p.resetJointState(quadruped, knee_back_leftL_link, motordir[2] * kneeangle)
p.resetJointState(quadruped, motor_back_leftR_joint, motordir[3] * halfpi)
p.resetJointState(quadruped, knee_back_leftR_link, motordir[3] * kneeangle)
p.resetJointState(quadruped, motor_front_rightL_joint, motordir[4] * halfpi)
p.resetJointState(quadruped, knee_front_rightL_link, motordir[4] * kneeangle)
p.resetJointState(quadruped, motor_front_rightR_joint, motordir[5] * halfpi)
p.resetJointState(quadruped, knee_front_rightR_link, motordir[5] * kneeangle)
p.resetJointState(quadruped, motor_back_rightL_joint, motordir[6] * halfpi)
p.resetJointState(quadruped, knee_back_rightL_link, motordir[6] * kneeangle)
p.resetJointState(quadruped, motor_back_rightR_joint, motordir[7] * halfpi)
p.resetJointState(quadruped, knee_back_rightR_link, motordir[7] * kneeangle)
#p.getNumJoints(1)
if (toeConstraint):
cid = p.createConstraint(quadruped, knee_front_leftR_link, quadruped, knee_front_leftL_link,
p.JOINT_POINT2POINT, [0, 0, 0], [0, 0.005, 0.1], [0, 0.01, 0.1])
p.changeConstraint(cid, maxForce=maxKneeForce)
cid = p.createConstraint(quadruped, knee_front_rightR_link, quadruped, knee_front_rightL_link,
p.JOINT_POINT2POINT, [0, 0, 0], [0, 0.005, 0.1], [0, 0.01, 0.1])
p.changeConstraint(cid, maxForce=maxKneeForce)
cid = p.createConstraint(quadruped, knee_back_leftR_link, quadruped, knee_back_leftL_link,
p.JOINT_POINT2POINT, [0, 0, 0], [0, 0.005, 0.1], [0, 0.01, 0.1])
p.changeConstraint(cid, maxForce=maxKneeForce)
cid = p.createConstraint(quadruped, knee_back_rightR_link, quadruped, knee_back_rightL_link,
p.JOINT_POINT2POINT, [0, 0, 0], [0, 0.005, 0.1], [0, 0.01, 0.1])
p.changeConstraint(cid, maxForce=maxKneeForce)
if (1):
p.setJointMotorControl(quadruped, knee_front_leftL_link, p.VELOCITY_CONTROL, 0,
kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_front_leftR_link, p.VELOCITY_CONTROL, 0,
kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_front_rightL_link, p.VELOCITY_CONTROL, 0,
kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_front_rightR_link, p.VELOCITY_CONTROL, 0,
kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_back_leftL_link, p.VELOCITY_CONTROL, 0, kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_back_leftR_link, p.VELOCITY_CONTROL, 0, kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_back_leftL_link, p.VELOCITY_CONTROL, 0, kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_back_leftR_link, p.VELOCITY_CONTROL, 0, kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_back_rightL_link, p.VELOCITY_CONTROL, 0,
kneeFrictionForce)
p.setJointMotorControl(quadruped, knee_back_rightR_link, p.VELOCITY_CONTROL, 0,
kneeFrictionForce)
p.setGravity(0, 0, -10)
legnumbering = [
motor_front_leftL_joint, motor_front_leftR_joint, motor_back_leftL_joint,
motor_back_leftR_joint, motor_front_rightL_joint, motor_front_rightR_joint,
motor_back_rightL_joint, motor_back_rightR_joint
]
for i in range(8):
print(legnumbering[i])
#use the Minitaur leg numbering
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[0],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[0] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[1],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[1] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[2],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[2] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[3],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[3] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[4],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[4] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[5],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[5] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[6],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[6] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[7],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[7] * 1.57,
positionGain=kp,
velocityGain=kd,
force=maxForce)
#stand still
p.setRealTimeSimulation(useRealTime)
t = 0.0
t_end = t + 15
ref_time = time.time()
while (t < t_end):
p.setGravity(0, 0, -10)
if (useRealTime):
t = time.time() - ref_time
else:
t = t + fixedTimeStep
if (useRealTime == 0):
p.stepSimulation()
time.sleep(fixedTimeStep)
print("quadruped Id = ")
print(quadruped)
p.saveWorld("quadru.py")
logId = p.startStateLogging(p.STATE_LOGGING_MINITAUR, "quadrupedLog.bin", [quadruped])
#jump
t = 0.0
t_end = t + 100
i = 0
ref_time = time.time()
while (1):
if (useRealTime):
t = time.time() - ref_time
else:
t = t + fixedTimeStep
if (True):
target = math.sin(t * speed) * jump_amp + 1.57
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[0],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[0] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[1],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[1] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[2],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[2] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[3],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[3] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[4],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[4] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[5],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[5] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[6],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[6] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
p.setJointMotorControl2(bodyIndex=quadruped,
jointIndex=legnumbering[7],
controlMode=p.POSITION_CONTROL,
targetPosition=motordir[7] * target,
positionGain=kp,
velocityGain=kd,
force=maxForce)
if (useRealTime == 0):
p.stepSimulation()
time.sleep(fixedTimeStep)
| 43.42766 | 116 | 0.598109 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,244 | 0.060948 |
356cc44803c63961c21e1852560af9b7a53ad16e | 7,163 | py | Python | tensormonk/architectures/gans_esrgan.py | Tensor46/TensorMONK | 67617d3fdf8fde072ba9cab42de7d67c79b17494 | [
"MIT"
] | 29 | 2018-07-06T23:57:23.000Z | 2022-03-08T20:38:57.000Z | tensormonk/architectures/gans_esrgan.py | sparupat/TensorMONK | 7a2699a28299a89b186e0eb17ed6e9feaea5429e | [
"MIT"
] | 3 | 2018-12-14T22:21:26.000Z | 2020-06-19T02:13:34.000Z | tensormonk/architectures/gans_esrgan.py | sparupat/TensorMONK | 7a2699a28299a89b186e0eb17ed6e9feaea5429e | [
"MIT"
] | 8 | 2018-07-06T23:58:03.000Z | 2021-04-12T01:35:54.000Z | """ TensorMONK's :: architectures :: ESRGAN """
__all__ = ["Generator", "Discriminator", "VGG19"]
import torch
import torch.nn as nn
import torchvision
from ..layers import Convolution
class DenseBlock(nn.Module):
r"""From DenseNet - https://arxiv.org/pdf/1608.06993.pdf."""
def __init__(self, tensor_size: tuple, filter_size: int = 3,
activation: str = "lklu", normalization: str = None,
n_blocks: int = 5, beta: float = 0.2, **kwargs):
super(DenseBlock, self).__init__()
n, c, h, w = tensor_size
cnns = []
for i in range(n_blocks):
cnns.append(Convolution(
(1, c*(i+1), h, w), filter_size, out_channels=c, strides=1,
activation=None if (i + 1) == n_blocks else activation,
normalization=normalization, lklu_negslope=0.1))
self.cnns = nn.ModuleList(cnns)
self.tensor_size = tensor_size
# As defined in https://arxiv.org/pdf/1602.07261.pdf
self.beta = beta
def forward(self, tensor: torch.Tensor):
r"""Residual dense block with scaling."""
x, o = None, None
for i, cnn in enumerate(self.cnns):
x = tensor if i == 0 else torch.cat((x, o), 1)
o = cnn(x)
return tensor + (o * self.beta)
class RRDB(nn.Module):
r"""Residual-in-Residual Dense Block."""
def __init__(self, tensor_size: tuple, filter_size: int = 3,
activation: str = "lklu", normalization: str = None,
n_dense: int = 3, n_blocks: int = 5, beta: float = 0.2,
**kwargs):
super(RRDB, self).__init__()
cnns = []
for i in range(n_dense):
cnns.append(DenseBlock(
tensor_size, filter_size, activation, normalization,
n_blocks, beta, **kwargs))
self.cnn = nn.Sequential(*cnns)
self.tensor_size = tensor_size
# As defined in https://arxiv.org/pdf/1602.07261.pdf
self.beta = beta
def forward(self, tensor: torch.Tensor):
r"""Residual-in-Residual Dense Block with scaling (beta)."""
return tensor + self.cnn(tensor) * self.beta
class Generator(nn.Module):
r"""ESRGAN generator network using Residual-in-Residual Dense Blocks.
Paper: ESRGAN
URL: https://arxiv.org/pdf/1809.00219.pdf
Args:
tensor_size (tuple, required): Shape of tensor in
(None/any integer >0, channels, height, width).
n_filters (int): The number of filters used through out the network,
however, DenseBlock will have multiples of n_filters.
default = 64
n_rrdb (int): The number of Residual-in-Residual Dense Block (RRDB).
default = 16
n_dense (int): The number of dense blocks in RRDB.
default = 3
n_blocks (int): The number of convolutions in dense blocks.
default = 5
n_upscale (int): Number of upscale done on input shape using
pixel-shuffle.
default = 2
beta (float): The scale factor of output before adding to any residue.
default = 0.2
"""
def __init__(self,
tensor_size: tuple = (1, 3, 32, 32),
n_filters: int = 64,
n_rrdb: int = 16,
n_dense: int = 3,
n_blocks: int = 5,
n_upscale: int = 2,
beta: float = 0.2,
**kwargs):
super(Generator, self).__init__()
self.initial = Convolution(
tensor_size, 3, n_filters, 1, activation=None)
modules = []
t_size = self.initial.tensor_size
for _ in range(n_rrdb):
modules.append(RRDB(t_size, 3, "lklu", n_dense=n_dense,
n_blocks=n_blocks, beta=beta))
modules.append(Convolution(t_size, 3, n_filters, 1, activation=None))
self.rrdbs = nn.Sequential(*modules)
modules = []
for _ in range(n_upscale):
modules.append(
Convolution(t_size, 3, n_filters * 4, 1, activation="lklu"))
modules.append(nn.PixelShuffle(upscale_factor=2))
t_size = (t_size[0], t_size[1], t_size[2]*2, t_size[3]*2)
modules.append(Convolution(t_size, 3, n_filters, 1, activation="lklu"))
modules.append(Convolution(t_size, 3, tensor_size[1], activation=None))
self.upscale = nn.Sequential(*modules)
self.tensor_size = tensor_size
self.initialize()
def forward(self, tensor: torch.Tensor):
r"""Expects normalized tensor (mean = 0.5 and std = 0.25)."""
o = self.initial(tensor)
o = o + self.rrdbs(o)
o = self.upscale(o)
return o
def enhance(self, tensor: torch.Tensor):
with torch.no_grad():
return self(tensor).mul_(0.25).add_(0.5).clamp_(0, 1)
def initialize(self):
r"""As defined in https://arxiv.org/pdf/1809.00219.pdf."""
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
nn.init.kaiming_normal_(m.weight)
m.weight.data.mul_(0.1)
class Discriminator(nn.Module):
r"""ESRGAN discriminator network.
Paper: ESRGAN
URL: https://arxiv.org/pdf/1809.00219.pdf
Args:
tensor_size (tuple, required): Shape of tensor in
(None/any integer >0, channels, height, width).
"""
def __init__(self, tensor_size: tuple = (1, 3, 128, 128), **kwargs):
super(Discriminator, self).__init__()
self.t_size = tensor_size
self.tensor_size = None, 1
modules = []
t_size = self.t_size
for oc in (64, 128, 256, 512):
modules.append(Convolution(
t_size, 3, oc, 1, normalization=None if oc == 64 else "batch",
activation="lklu", lklu_negslope=0.2))
t_size = modules[-1].tensor_size
modules.append(Convolution(
t_size, 3, oc, 2, normalization="batch",
activation="lklu", lklu_negslope=0.2))
t_size = modules[-1].tensor_size
self.discriminator = nn.Sequential(*modules)
def forward(self, tensor: torch.Tensor):
r"""Expects normalized tensor (mean = 0.5 and std = 0.25)."""
return self.discriminator(tensor)
def initialize(self):
r"""As defined in https://arxiv.org/pdf/1809.00219.pdf."""
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
nn.init.kaiming_normal_(m.weight)
m.weight.data.mul_(0.1)
class VGG19(nn.Module):
r"""Pretrained VGG19 model from torchvision."""
def __init__(self, **kwargs):
super(VGG19, self).__init__()
self.vgg19 = torchvision.models.vgg19(pretrained=True).features[:35]
def forward(self, tensor: torch.Tensor):
r"""Expects normalized tensor (mean = 0.5 and std = 0.25)."""
return self.vgg19(tensor)
class ESRGAN:
Generator = Generator
Discriminator = Discriminator
VGG19 = VGG19
| 35.285714 | 79 | 0.580064 | 6,958 | 0.971381 | 0 | 0 | 0 | 0 | 0 | 0 | 1,973 | 0.275443 |
356d07de30049e8cf0f6aad0e41acef8b5f0d9cb | 1,291 | py | Python | ingenico/connect/sdk/domain/product/device_fingerprint_request.py | festicket/connect-sdk-python3 | c399c6443789dd978f319c89e1ebd387c812a77b | [
"MIT"
] | 12 | 2016-09-26T21:46:31.000Z | 2020-12-23T18:44:54.000Z | ingenico/connect/sdk/domain/product/device_fingerprint_request.py | festicket/connect-sdk-python3 | c399c6443789dd978f319c89e1ebd387c812a77b | [
"MIT"
] | 3 | 2020-05-02T16:53:02.000Z | 2020-06-02T12:49:51.000Z | ingenico/connect/sdk/domain/product/device_fingerprint_request.py | festicket/connect-sdk-python3 | c399c6443789dd978f319c89e1ebd387c812a77b | [
"MIT"
] | 11 | 2017-07-16T00:55:28.000Z | 2021-09-24T17:00:49.000Z | # -*- coding: utf-8 -*-
#
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
from ingenico.connect.sdk.data_object import DataObject
class DeviceFingerprintRequest(DataObject):
__collector_callback = None
@property
def collector_callback(self):
"""
| You can supply a JavaScript function call that will be called after the device fingerprint data collecting using the provided JavaScript snippet is finished. This will then be added to the snippet that is returned in the property html.
Type: str
"""
return self.__collector_callback
@collector_callback.setter
def collector_callback(self, value):
self.__collector_callback = value
def to_dictionary(self):
dictionary = super(DeviceFingerprintRequest, self).to_dictionary()
if self.collector_callback is not None:
dictionary['collectorCallback'] = self.collector_callback
return dictionary
def from_dictionary(self, dictionary):
super(DeviceFingerprintRequest, self).from_dictionary(dictionary)
if 'collectorCallback' in dictionary:
self.collector_callback = dictionary['collectorCallback']
return self
| 34.891892 | 245 | 0.7134 | 1,081 | 0.837335 | 0 | 0 | 490 | 0.379551 | 0 | 0 | 491 | 0.380325 |
356fae5d80236a3dbf63128736f174a103162b77 | 3,418 | py | Python | tests/nnapi/specs/skip/V1_2/space_to_batch_v1_2.mod.py | periannath/ONE | 61e0bdf2bcd0bc146faef42b85d469440e162886 | [
"Apache-2.0"
] | 255 | 2020-05-22T07:45:29.000Z | 2022-03-29T23:58:22.000Z | tests/nnapi/specs/skip/V1_2/space_to_batch_v1_2.mod.py | periannath/ONE | 61e0bdf2bcd0bc146faef42b85d469440e162886 | [
"Apache-2.0"
] | 5,102 | 2020-05-22T07:48:33.000Z | 2022-03-31T23:43:39.000Z | test/cts/tool/CTSConverter/src/nn/specs/V1_2/space_to_batch_v1_2.mod.py | ibelem/webml-polyfill | aaf1ba4f5357eaf6e89bf9990f5bdfb543cd2bc2 | [
"Apache-2.0"
] | 120 | 2020-05-22T07:51:08.000Z | 2022-02-16T19:08:05.000Z | #
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
layout = BoolScalar("layout", False) # NHWC
# TEST 1: SPACE_TO_BATCH_NCHW_1, block_size = [2, 2]
i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
pad1 = Parameter("paddings", "TENSOR_INT32", "{2, 2}", [0, 0, 0, 0])
o1 = Output("op4", "TENSOR_FLOAT32", "{4, 1, 1, 2}")
Model().Operation("SPACE_TO_BATCH_ND", i1, [2, 2], pad1, layout).To(o1)
# Additional data type
quant8 = DataTypeConverter().Identify({
i1: ("TENSOR_QUANT8_ASYMM", 0.1, 0),
o1: ("TENSOR_QUANT8_ASYMM", 0.1, 0)
})
# Instantiate an example
example = Example({
i1: [1.4, 2.3, 3.2, 4.1, 5.4, 6.3, 7.2, 8.1],
o1: [1.4, 2.3, 3.2, 4.1, 5.4, 6.3, 7.2, 8.1]
}).AddNchw(i1, o1, layout).AddVariations("relaxed", "float16", quant8)
# TEST 2: SPACE_TO_BATCH_NCHW_2, block_size = [2, 2]
i2 = Input("op1", "TENSOR_FLOAT32", "{1, 4, 4, 1}")
o2 = Output("op4", "TENSOR_FLOAT32", "{4, 2, 2, 1}")
Model().Operation("SPACE_TO_BATCH_ND", i2, [2, 2], pad1, layout).To(o2)
# Additional data type
quant8 = DataTypeConverter().Identify({
i2: ("TENSOR_QUANT8_ASYMM", 0.5, 0),
o2: ("TENSOR_QUANT8_ASYMM", 0.5, 0)
})
# Instantiate an example
example = Example({
i2: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
o2: [1, 3, 9, 11, 2, 4, 10, 12, 5, 7, 13, 15, 6, 8, 14, 16]
}).AddNchw(i2, o2, layout).AddVariations("relaxed", "float16", quant8)
# TEST 3: SPACE_TO_BATCH_NCHW_3, block_size = [3, 2]
i3 = Input("op1", "TENSOR_FLOAT32", "{1, 5, 2, 1}")
pad3 = Parameter("paddings", "TENSOR_INT32", "{2, 2}", [1, 0, 2, 0])
o3 = Output("op4", "TENSOR_FLOAT32", "{6, 2, 2, 1}")
Model().Operation("SPACE_TO_BATCH_ND", i3, [3, 2], pad3, layout).To(o3)
# Additional data type
quant8 = DataTypeConverter().Identify({
i3: ("TENSOR_QUANT8_ASYMM", 0.5, 128),
o3: ("TENSOR_QUANT8_ASYMM", 0.5, 128)
})
# Instantiate an example
example = Example({
i3: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
o3: [0, 0, 0, 5, 0, 0, 0, 6, 0, 1, 0, 7,
0, 2, 0, 8, 0, 3, 0, 9, 0, 4, 0, 10]
}).AddNchw(i3, o3, layout).AddVariations("relaxed", "float16", quant8)
# TEST 4: SPACE_TO_BATCH_NCHW_4, block_size = [3, 2]
i4 = Input("op1", "TENSOR_FLOAT32", "{1, 4, 2, 1}")
pad4 = Parameter("paddings", "TENSOR_INT32", "{2, 2}", [1, 1, 2, 4])
o4 = Output("op4", "TENSOR_FLOAT32", "{6, 2, 4, 1}")
Model().Operation("SPACE_TO_BATCH_ND", i4, [3, 2], pad4, layout).To(o4)
# Additional data type
quant8 = DataTypeConverter().Identify({
i4: ("TENSOR_QUANT8_ASYMM", 0.25, 128),
o4: ("TENSOR_QUANT8_ASYMM", 0.25, 128)
})
# Instantiate an example
example = Example({
i4: [1, 2, 3, 4, 5, 6, 7, 8],
o4: [0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0,
0, 1, 0, 0, 0, 7, 0, 0, 0, 2, 0, 0, 0, 8, 0, 0,
0, 3, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0]
}).AddNchw(i4, o4, layout).AddVariations("relaxed", "float16", quant8)
| 35.978947 | 74 | 0.614687 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,687 | 0.493563 |
357015263198b1279e9ef9410456f9db3904f6f9 | 559 | py | Python | print_dict_results.py | ofirtal/alice_google_wordcount | 0a0d648e688b80c3f34fd0250ae464bcd5d0de05 | [
"Apache-2.0"
] | 1 | 2020-09-29T03:08:35.000Z | 2020-09-29T03:08:35.000Z | print_dict_results.py | ofirtal/alice_google_wordcount | 0a0d648e688b80c3f34fd0250ae464bcd5d0de05 | [
"Apache-2.0"
] | null | null | null | print_dict_results.py | ofirtal/alice_google_wordcount | 0a0d648e688b80c3f34fd0250ae464bcd5d0de05 | [
"Apache-2.0"
] | null | null | null | class PrintDictResults:
def __init__(self, dict_of_sorted_words):
self.list_of_sorted_words = dict_of_sorted_words
def print_items(self, counter):
print(f'{counter + 1}. "{self.list_of_sorted_words[counter][1]}" : {self.list_of_sorted_words[counter][0]}')
def get_all_words(self):
for item in self.list_of_sorted_words:
self.print_items((self.list_of_sorted_words.index(item)))
def get_top(self, number_of_wanted_items):
for i in range(0, number_of_wanted_items):
self.print_items(i)
| 37.266667 | 116 | 0.697674 | 558 | 0.998211 | 0 | 0 | 0 | 0 | 0 | 0 | 101 | 0.18068 |
35702e98a69fbbc4d367cef348aeb837ce0c6f47 | 20,594 | py | Python | Scripts/8Bit_Characters_Binary.py | Ifelsethendo/Blender-Game-Engine-Templates | b4cd92ee7e1c4e9ac8d10b4d843392856285f6ba | [
"Apache-2.0"
] | 5 | 2021-12-11T02:27:43.000Z | 2022-01-08T08:51:21.000Z | Scripts/8Bit_Characters_Binary.py | Ifelsethendo/Blender-Game-Engine-Templates | b4cd92ee7e1c4e9ac8d10b4d843392856285f6ba | [
"Apache-2.0"
] | null | null | null | Scripts/8Bit_Characters_Binary.py | Ifelsethendo/Blender-Game-Engine-Templates | b4cd92ee7e1c4e9ac8d10b4d843392856285f6ba | [
"Apache-2.0"
] | 2 | 2021-12-29T05:48:58.000Z | 2022-03-05T15:31:33.000Z | import bpy
try:
import bge
except:
print("no bge module")
## This array of binary strings defines each of 256 characters
binary=["0011110000100000001000000111100000100000011000001010110000000000","0100010001001000010100000010110001000100000010000001110000000000","0100010001001000010100000010110001010100000111000000010000000000","0110010000101000010100000010110001010100000111000000010000000000","0000000000010000000000000111110000000000000100000000000000000000","0011110001000010100110011010000110100001100110010100001000111100","0000000000000100000000101111111111111111000000100000010000000000","0000000000100000010000001111111111111111010000000010000000000000","0001100000111100010110100001100000011000000110000001100000011000","0001100000011000000110000001100000011000010110100011110000011000","0000111100000111000011110001110100111000011100000010000000000000","0000000000000100000011100001110010111000111100001110000011110000","0000000000100000011100000011100000011101000011110000011100001111","1111000011100000111100001011100000011100000011100000010000000000","0000000000111100001111000000000001111110111111111111111111111111","1111110011111100001111000011000000110000001100000011000000110000","1111111100111100001111000011110000111100000000000000000000000000","0111111101111111011110000001100000011000000110000001100000011000","0011110000111100001111000011110000111100001111000011110000111100","0000000000111100001111000000000001111110111111111011110111011011","0111111000111100011001100110011011100111110000111100001111000011","0000000000111000001111000000000000111000011110000111110001111111","0111110000111110000110110001111000011100000110000011100000111000","0000000000111000001111000000000000111001011110011101111111011100","0111110000111111000111110011101111110011110000111000000000000000","0001100000111100011001100010010011100111101111011001100111011011","0000000000000000000000000000000000001111000011110000111100001111","0000111100001111000011110000111100000000000000000000000000000000","0000000000000000000000000000000011110000111100001111000011110000","1111000011110000111100001111000000000000000000000000000000000000","1111000011110000111100001111000000001111000011110000111100001111","0000000000000000000000000000000011111111111111111111111111111111","0000000000000000000000000000000000000000000000000000000000000000","0001000000010000000100000001000000010000000000000001000000000000","0010100000101000001010000000000000000000000000000000000000000000","0010100000101000011111000010100001111100001010000010100000000000","0001000000111100010100000011100000010100011110000001000000000000","0110000001100100000010000001000000100000010011000000110000000000","0010000001010000010100000010000001010100010010000011010000000000","0000100000001000000100000000000000000000000000000000000000000000","0001000000100000010000000100000001000000001000000001000000000000","0001000000001000000001000000010000000100000010000001000000000000","0001000001010100001110000001000000111000010101000001000000000000","0000000000010000000100000111110000010000000100000000000000000000","0000000000000000000000000000000000010000000100000010000000000000","0000000000000000000000000111110000000000000000000000000000000000","0000000000000000000000000000000000000000000000000001000000000000","0000000000000100000010000001000000100000010000000000000000000000","0011100001000100010011000101010001100100010001000011100000000000","0001000000110000000100000001000000010000000100000011100000000000","0011100001000100000001000001100000100000010000000111110000000000","0111110000000100000010000001100000000100010001000011100000000000","0000100000011000001010000100100001111000000010000000100000000000","0111110001000000011110000000010000000100010001000011100000000000","0001110000100000010000000111100001000100010001000011100000000000","0111110000000100000010000001000000100000001000000010000000000000","0011100001000100010001000011100001000100010001000011100000000000","0011100001000100010001000011110000000100000010000111000000000000","0000000000000000000100000000000000010000000000000000000000000000","0000000000000000000100000000000000010000000100000010000000000000","0000100000010000001000000100000000100000000100000000100000000000","0000000000000000011111000000000001111100000000000000000000000000","0010000000010000000010000000010000001000000100000010000000000000","0011100001000100000010000001000000010000000000000001000000000000","0011100001000100010101000101110001011000010000000011100000000000","0001000000101000010001000100010001111100010001000100010000000000","0111100001000100010001000111100001000100010001000111100000000000","0011100001000100010000000100000001000000010001000011100000000000","0111100001000100010001000100010001000100010001000111100000000000","0111110001000000010000000111100001000000010000000111110000000000","0111110001000000010000000111100001000000010000000100000000000000","0011110001000000010000000100000001001100010001000011110000000000","0100010001000100010001000111110001000100010001000100010000000000","0011100000010000000100000001000000010000000100000011100000000000","0000010000000100000001000000010000000100010001000011100000000000","0100010001001000010100000110000001010000010010000100010000000000","0100000001000000010000000100000001000000010000000111110000000000","0100010001101100010101000101010001000100010001000100010000000000","0100010001000100011001000101010001001100010001000100010000000000","0011100001000100010001000100010001000100010001000011100000000000","0111100001000100010001000111100001000000010000000100000000000000","0011100001000100010001000100010001010100010010000011010000000000","0111100001000100010001000111100001010000010010000100010000000000","0011100001000100010000000011100000000100010001000011100000000000","0111110000010000000100000001000000010000000100000001000000000000","0100010001000100010001000100010001000100010001000011100000000000","0100010001000100010001000100010001000100001010000001000000000000","0100010001000100010001000101010001010100011011000100010000000000","0100010001000100001010000001000000101000010001000100010000000000","0100010001000100001010000001000000010000000100000001000000000000","0111110000000100000010000001000000100000010000000111110000000000","0111110001100000011000000110000001100000011000000111110000000000","0000000001000000001000000001000000001000000001000000000000000000","0111110000001100000011000000110000001100000011000111110000000000","0000000000000000000100000010100001000100000000000000000000000000","0000000000000000000000000000000000000000000000000111110000000000","0010000000100000000100000000000000000000000000000000000000000000","0000000000000000001101000100110001000100010011000011010000000000","0100000001000000010110000110010001000100011001000101100000000000","0000000000000000000111000010000000100000001000000001110000000000","0000010000000100001101000100110001000100010011000011010000000000","0000000000000000001110000100010001111100010000000011100000000000","0000100000010000000100000011100000010000000100000001000000000000","0000000000000000001101000100110001000100001111000000010000000000","0100000001000000011110000100010001000100010001000100010000000000","0001000000000000001100000001000000010000000100000011100000000000","0000100000000000000010000000100000001000000010000000100000000000","0100000001000000010010000101000001110000010010000100010000000000","0011000000010000000100000001000000010000000100000011100000000000","0000000000000000011011000101001001010010010100100101001000000000","0000000000000000011110000100010001000100010001000100010000000000","0000000000000000001110000100010001000100010001000011100000000000","0000000000000000010110000110010001000100011001000101100001000000","0000000000000000001101000100110001000100010011000011010000000110","0000000000000000010110000110000001000000010000000100000000000000","0000000000000000001111000100000000111000000001000111100000000000","0001000000010000011111000001000000010000000100000001000000000000","0000000000000000010001000100010001000100010001000011110000000000","0000000000000000010001000100010000101000001010000001000000000000","0000000000000000010100100101001001010010010100100010110000000000","0000000000000000010001000010100000010000001010000100010000000000","0000000000000000001001000010010000100100001111000000010000111000","0000000000000000011111000000100000010000001000000111110000000000","0000110000010000000100000010000000010000000100000000110000000000","0001000000010000000100000000000000010000000100000001000000000000","0110000000010000000100000000100000010000000100000110000000000000","0000000000000000000001000011100001000000000000000000000000000000","1111111111111111111111111111111111111111111111111111111111111111","0000000011111111111111111111111111111111111111111111111111111111","1000000010000000100000001000000010000000100000001000000010000000","0000000000011100001111000000000000011100000111100011111011111110","0011111001111100110110000111100000111000000110000001110000011100","0000000000000000000000000000000010101010010101011010101001010101","1010000001010000101000000101000010100000010100001010000001010000","1010101001010101101010100101010110101010010101011010101001010101","0000000000011000001111000111111001111110001111000001100000000000","0000000000000000000000000000000000000000000000001111111111111111","0000000000000000111111111111111111111111111111111111111111111111","0001100000011000001111000111111011111111110110110001100000111100","0011110000011000110110111111111101111110001111000001100000011000","0000000000011100001111000000000010011100100111101111101100111011","0011111011111100111110001101110011001111110000110000000100000000","1100000011110000111111001111111111111111111111001111000011000000","0001100000011000001111000011110001111110011111101111111111111111","0000000000000000000000000000000000000000000000000000000011111111","1111111011111110111111101111111011111110111111101111111011111110","0011110001010010001111001000000010111100111111110011110100111101","0011110001001010001111000000000100111101111111111011110010111100","1010101001010101101010100101010000000000000000000000000000000000","0000101000000101000010100000010100001010000001010000101000000101","0011110001111110111111111111111111111111111111110111111000111100","1100000011000000110000001100000011000000110000001100000011000000","1110000011100000111000001110000011100000111000001110000011100000","1111000011110000111100001111000011110000111100001111000011110000","0011000000111000100111001111111111111111100111000011100000110000","0000110000011100001110011111111111111111001110010001110000001100","0000000001100110011111100100001011000011111111110001100000000000","0000000000011000111111111100001101000010011111100110011000000000","0000001100001111001111111111111111111111001111110000111100000011","1111111111111111011111100111111000111100001111000001100000011000","0000000000000000000000000000000000000000000000000000000000000000","1111000011110000111100000000000000000000000000000000000000000000","0000111100001111000011110000000000000000000000000000000000000000","1111111111111111111111110000000000000000000000000000000000000000","0000000000000000000000001111000011110000000000000000000000000000","1111000011110000111100001111000011110000000000000000000000000000","0000111100001111000011111111000011110000000000000000000000000000","1111111111111111111111111111000011110000000000000000000000000000","0000000000000000000000000000111100001111000000000000000000000000","1111000011110000111100000000111100001111000000000000000000000000","0000111100001111000011110000111100001111000000000000000000000000","1111111111111111111111110000111100001111000000000000000000000000","0000000000000000000000001111111111111111000000000000000000000000","1111000011110000111100001111111111111111000000000000000000000000","0000111100001111000011111111111111111111000000000000000000000000","1111111111111111111111111111111111111111000000000000000000000000","0000000000000000000000000000000000000000111100001111000011110000","1111000011110000111100000000000000000000111100001111000011110000","0000111100001111000011110000000000000000111100001111000011110000","1111111111111111111111110000000000000000111100001111000011110000","0000000000000000000000001111000011110000111100001111000011110000","1111000011110000111100001111000011110000111100001111000011110000","0000111100001111000011111111000011110000111100001111000011110000","1111111111111111111111111111000011110000111100001111000011110000","0000000000000000000000000000111100001111111100001111000011110000","1111000011110000111100000000111100001111111100001111000011110000","0000111100001111000011110000111100001111111100001111000011110000","1111111111111111111111110000111100001111111100001111000011110000","0000000000000000000000001111111111111111111100001111000011110000","1111000011110000111100001111111111111111111100001111000011110000","0000111100001111000011111111111111111111111100001111000011110000","1111111111111111111111111111111111110000111100001111000011110000","0000000100000011000001110000111100011111001111110111111111111111","1000000011000000111000001111000011111000111111001111111011111111","1111111111111111011111100011110000000000000000000000000000000000","1111110011111100111111001111110011111100111111001111110011111100","0000000000000000001111000011110000111100001111000000000000000000","0001100000111100011111101111111111111111011111100011110000011000","0000000000000000000000000001100000011000000000000000000000000000","0000111100001111000001110000001100000000000000000000000000000000","0001100000011000000110001111111111111111000110000001100000011000","0000000000000000000000000000000011000000111000001111000011110000","0000000100000010000001000000100000010000001000000100000010000000","0000001100000111000011110000111100001111000011110000011100000011","0001100000011000000110001111111111111111000000000000000000000000","0001100000011000000110000001111100011111000110000001100000011000","0000000000000000000000001111100011111000000110000001100000011000","0001100000011000000110000001111100011111000000000000000000000000","0000100100100000000001001000000000010001010000000000100000000010","0101001001000100001011011100010000010001101101000010001101001010","0000000000000000000000000000000000111100011111101111111111111111","0000000000010000001011000011101001011100001101000000010000000000","0110011011111111111111111111111101111110001111000001100000011000","0001100000111100000110000100001011100111010000100001100000111100","0001100000011000000110000001100000011000000110000001100000011000","0000000000000000000000000000000000000011000001110001111100011111","1000000101000010001001000001100000011000001001000100001010000001","1111000011110000111000001100000000000000000000000000000000000000","1000000001000000001000000001000000001000000001000000001000000001","1100000011100000111100001111000011110000111100001110000011000000","0000000000000000000000001111111111111111000110000001100000011000","0001100000011000000110001111100011111000000110000001100000011000","0000000000000000000000000001111100011111000110000001100000011000","0001100000011000000110001111100011111000000000000000000000000000","0000000000000000000000000000000000001111000011110000111100001111","1111000011110000111100000000000000000000000011110000111100001111","0000111100001111000011110000000000000000000011110000111100001111","1111111111111111111111110000000000000000000011110000111100001111","0000000000000000000000001111000011110000000011110000111100001111","1111000011110000111100001111000011110000000011110000111100001111","0000111100001111000011111111000011110000000011110000111100001111","1111111111111111111111111111000011110000000011110000111100001111","0000000000000000000000000000111100001111000011110000111100001111","1111000011110000111100000000111100001111000011110000111100001111","0000111100001111000011110000111100001111000011110000111100001111","1111111111111111111111110000111100001111000011110000111100001111","0000000000000000000000001111111111111111000011110000111100001111","1111000011110000111100001111111111111111000011110000111100001111","0000111100001111000011111111111111111111000011110000111100001111","1111111111111111111111111111111111111111000011110000111100001111","0000000000000000000000000000000000000000111111111111111111111111","1111000011110000111100000000000000000000111111111111111111111111","0000111100001111000011110000000000000000111111111111111111111111","1111111111111111111111110000000000000000111111111111111111111111","0000000000000000000000001111000011110000111111111111111111111111","1111000011110000111100001111000011110000111111111111111111111111","0000111100001111000011111111000011110000111111111111111111111111","1111111111111111111111111111000011110000111111111111111111111111","0000000000000000000000000000111100001111111111111111111111111111","1111000011110000111100000000111100001111111111111111111111111111","0000111100001111000011110000111100001111111111111111111111111111","1111111111111111111111110000111100001111111111111111111111111111","0000000000000000000000001111111111111111111111111111111111111111","1111000011110000111100001111111111111111111111111111111111111111","0000111100001111000011111111111111111111111111111111111111111111","1111111111111111111111111111111111111111111111111111111111111111"]
## run the print command to see how they look as character blocks
def print_8bit_Binary():
x =0
for bytes in binary:
# print empty line
L = ""
print()
#
for bit in bytes:
L = L + bit
x = x + 1
# print each 8 bits
if x == 8:
print(L)
x = 0
L = ""
## create individual mesh groups from binary data
def parse_8bit_Binary_To_Mesh():
x =0
char = -1
global currentCharacter
for bytes in binary:
y = 0
char += 1
character = "chr_" + str(char)
# empty object for holding the mesh items objects
currentCharacter = make_nullMesh(character)
for bit in bytes:
x = x + 1
if int(bit) == 1:
make_plane(x,y*-1,0,char)
# each row = 8 bits
if x == 8:
x = 0
y += 1
## use an empty mesh as parent to the bits
def make_nullMesh(nameString):
verts = [(0,0,0),(0,0.0,0),(0.0,0.0,0),(0.0,0,0)]
faces = [(0,1,2,3)]
meshName = nameString
mymesh = bpy.data.meshes.new(meshName)
myobject = bpy.data.objects.new(meshName,mymesh)
myobject.location = bpy.data.objects[meshName].location = (0,0,0) #x,y,z
bpy.data.collections["Collection"].objects.link(myobject)
#mymesh.from_pydata(verts,[],faces) ## no need for polygons here
#mymesh.update(calc_edges=True)
return myobject
## draw a mesh plane
def make_plane(x,y,z,n):
verts = [(0,0,0),(0,1,0),(1,1,0),(1,0,0)]
faces = [(0,1,2,3)]
meshName = str(n) + "_"+str(x) +"_"+ str(y)
mymesh = bpy.data.meshes.new(meshName)
myobject = bpy.data.objects.new(meshName,mymesh)
myobject.location = bpy.data.objects[meshName].location = (x,y,0) #x,y,z
bpy.data.collections["Collection"].objects.link(myobject)
mymesh.from_pydata(verts,[],faces)
mymesh.update(calc_edges=True)
myobject.parent = currentCharacter
## this is slow
## merge all children into the first mesh of each group
def unify_mesh():
for i in range(256):
chr = "chr_" + str(i)
#P is for Parent
P = bpy.data.scenes['Scene'].objects[chr]
N = len(P.children)
# some characters are empty
if N > 0:
for bit in P.children:
currentBit = bit.name
## bool the meshes together with modifier
if P.name != currentBit:
active_obj = bpy.context.window.scene.objects[P.name]
bpy.context.view_layer.objects.active = active_obj
bool_mod = active_obj.modifiers.new(name=P.name +"+" +currentBit, type='BOOLEAN')
bool_mod.operation = 'UNION'
bool_mod.object = bit
bpy.ops.object.modifier_apply(modifier=active_obj.modifiers.active.name)
## remove the extra mesh
object_to_delete = bpy.data.objects[currentBit]
bpy.data.objects.remove(object_to_delete, do_unlink=True)
## first parse the bits to mesh objects
#parse_8bit_Binary_To_Mesh()
''' then unify all the bits to their parent mesh
WARNING IT TAKES LIKE 30 minutes '''
#unify_mesh() | 185.531532 | 17,160 | 0.891036 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 17,814 | 0.865009 |
3570648818248f506b4acc069aadab42f128c69d | 2,732 | py | Python | examples/steps/test_calc.py | bigbirdcode/pt_gh | 2c1775c6141eee4594f27028d0c87cc783ee08af | [
"MIT"
] | null | null | null | examples/steps/test_calc.py | bigbirdcode/pt_gh | 2c1775c6141eee4594f27028d0c87cc783ee08af | [
"MIT"
] | null | null | null | examples/steps/test_calc.py | bigbirdcode/pt_gh | 2c1775c6141eee4594f27028d0c87cc783ee08af | [
"MIT"
] | null | null | null | """Example for Pytest-Gherkin"""
import ast
from pytest import approx
from pt_gh import step, value_options
operator = value_options("add", "subtract")
@step("I have {num1:d} and {num2:d}")
def given_numbers_i(num1, num2, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
context["nums"] = []
context["nums"].append(num1)
context["nums"].append(num2)
context["ans"] = 0
@step("I have floats {num1:f} and {num2:f}")
def given_numbers_f(num1, num2, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
context["nums"] = []
context["nums"].append(num1)
context["nums"].append(num2)
context["ans"] = 0.0
@step("I have list of floats {float_list}")
def i_have_list_of_floats(float_list, context):
"""I have list of floats"""
float_list = ast.literal_eval(float_list)
context["nums"] = float_list
context["ans"] = 0.0
@step("I {operator:operator} them", dict(operator=operator))
def i_en_de_crypt(operator, context):
"""Example of parameter created and checked based on ValueList
and context is a fixture as in Pytest"""
if operator == "add":
for num in context["nums"]:
context["ans"] += num
else:
context["ans"] = context["nums"][0]
for num in context["nums"][1:]:
context["ans"] -= num
@step("I have {result:d} as result")
def i_get_answer_i(result, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
assert context["ans"] == result
@step("I have float {result:f} as result")
def i_get_answer_f(result, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
assert context["ans"] == approx(result)
@step("I have a matrix:")
def i_have_a_matrix(data_table, context):
"""data_table is a special parameter, a Python 2D list
created from Gherkin data table
and context is a fixture as in Pytest
Note: data_table contains strings, user has to convert"""
context["matrix"] = [[int(x) for x in row] for row in data_table]
@step("I sum all rows")
def i_sum_all_rows(context):
"""Just a simple fixture parameter"""
context["vector"] = [sum(row) for row in context["matrix"]]
@step("I have a vector:")
def i_have_a_vector(data_table, context):
"""data_table is a special parameter, a Python 2D list
created from Gherkin data table
and context is a fixture as in Pytest
Note: data_table contains strings, user has to convert"""
assert context["vector"] == [int(x[0]) for x in data_table]
| 30.355556 | 69 | 0.673499 | 0 | 0 | 0 | 0 | 2,548 | 0.93265 | 0 | 0 | 1,406 | 0.514641 |
35708b1a267084c409e146c96cce29f59f8a1bdd | 8,901 | py | Python | runtime/opt/taupage/init.d/02-register-td-agent.py | a1exsh/taupage | aa3f31ec84d28db0f63b0300b5aabeb081bfcb35 | [
"Apache-2.0"
] | 49 | 2015-04-14T13:55:10.000Z | 2020-02-14T22:55:43.000Z | runtime/opt/taupage/init.d/02-register-td-agent.py | a1exsh/taupage | aa3f31ec84d28db0f63b0300b5aabeb081bfcb35 | [
"Apache-2.0"
] | 538 | 2015-04-01T10:53:09.000Z | 2020-04-17T08:43:36.000Z | runtime/opt/taupage/init.d/02-register-td-agent.py | a1exsh/taupage | aa3f31ec84d28db0f63b0300b5aabeb081bfcb35 | [
"Apache-2.0"
] | 67 | 2015-05-05T19:48:30.000Z | 2020-11-04T04:59:00.000Z | #!/usr/bin/env python3
import logging
import subprocess
import re
import boto.utils
from jinja2 import Environment, FileSystemLoader
from taupage import get_config
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
TPL_NAME = 'td-agent.conf.jinja2'
TD_AGENT_TEMPLATE_PATH = '/etc/td-agent/templates/'
TD_AGENT_OUTPUT_PATH = '/etc/td-agent/td-agent.conf'
def restart_td_agent_process():
''' Restart Fluentd '''
process = subprocess.Popen(['service', 'td-agent', 'restart'])
exit_code = process.wait(timeout=5)
if exit_code:
raise Exception("'service td-agent restart' failed with exit code: {0}".format(exit_code))
def get_scalyr_api_key():
''' Read Scalyr API key from Taupage config and set in template file '''
main_config = get_config()
config = main_config.get('logging')
scalyr_api_key = config.get('scalyr_account_key', main_config.get('scalyr_account_key'))
if scalyr_api_key:
# If scalyr_api_key starts with "aws:kms:" then decrypt key
match_kms_key = re.search('aws:kms:', scalyr_api_key, re.IGNORECASE)
if match_kms_key:
scalyr_api_key = re.sub(r'aws:kms:', '', scalyr_api_key)
try:
scalyr_api_key = subprocess.check_output(['python3',
'/opt/taupage/bin/decrypt-kms.py',
scalyr_api_key]).decode('UTF-8').strip()
except Exception:
logger.error('Failed to run /opt/taupage/bin/decrypt-kms.py')
raise SystemExit()
if scalyr_api_key == "Invalid KMS key.":
logger.error('Failed to decrypt KMS Key')
raise SystemExit(1)
return scalyr_api_key
def update_configuration_from_template(s3_default):
''' Update Jinja Template to create configuration file for Scalyr '''
fluentd_destinations = dict(scalyr=False, s3=False, rsyslog=False, scalyr_s3=False)
config = get_config()
logging_config = config.get('logging', {})
application_id = config.get('application_id')
application_version = config.get('application_version')
stack = config.get('notify_cfn', {}).get('stack')
source = config.get('source')
image = config.get('source').split(':', 1)[0]
instance_data = boto.utils.get_instance_identity()['document']
aws_region = instance_data['region']
aws_account = instance_data['accountId']
hostname = boto.utils.get_instance_metadata()['local-hostname'].split('.')[0]
customlog = config.get('mount_custom_log')
if config.get('rsyslog_aws_metadata'):
scalyr_syslog_log_parser = 'systemLogMetadata'
else:
scalyr_syslog_log_parser = 'systemLog'
scalyr_application_log_parser = logging_config.get('scalyr_application_log_parser', 'slf4j')
scalyr_custom_log_parser = logging_config.get('scalyr_custom_log_parser', 'slf4j')
fluentd_log_destination = logging_config.get('log_destination', 's3')
fluentd_syslog_destination = logging_config.get('syslog_destination', fluentd_log_destination)
fluentd_applog_destination = logging_config.get('applog_destination', fluentd_log_destination)
fluentd_authlog_destination = logging_config.get('authlog_destination', fluentd_log_destination)
fluentd_customlog_destination = logging_config.get('customlog_destination', fluentd_log_destination)
fluentd_applog_filter_exclude = logging_config.get('applog_filter_exclude', None)
fluentd_customlog_filter_exclude = logging_config.get('customlog_filter_exclude', None)
fluentd_loglevel = logging_config.get('fluentd_loglevel', 'error')
fluentd_s3_raw_log_format = logging_config.get('s3_raw_log_format', 'true')
fluentd_s3_region = logging_config.get('s3_region', aws_region)
fluentd_s3_bucket = logging_config.get('s3_bucket', 'zalando-logging-'+aws_account+'-'+aws_region)
fluentd_s3_timekey = logging_config.get('s3_timekey', '5m')
fluentd_s3_acl = logging_config.get('s3_acl', 'bucket-owner-full-control')
fluentd_rsyslog_host = logging_config.get('rsyslog_host')
fluentd_rsyslog_port = logging_config.get('rsyslog_port', '514')
fluentd_rsyslog_protocol = logging_config.get('rsyslog_protocol', 'tcp')
fluentd_rsyslog_severity = logging_config.get('rsyslog_severity', 'notice')
fluentd_rsyslog_program = logging_config.get('rsyslog_program', 'fluentd')
fluentd_rsyslog_hostname = logging_config.get('rsyslog_hostname', hostname)
for destination in (fluentd_applog_destination,
fluentd_authlog_destination,
fluentd_customlog_destination,
fluentd_syslog_destination):
fluentd_destinations[destination] = True
# Get Scalyr key only if configured
if fluentd_destinations.get('scalyr') or fluentd_destinations.get('scalyr_s3'):
scalyr_api_key = get_scalyr_api_key()
else:
scalyr_api_key = None
if fluentd_destinations.get('s3') or fluentd_destinations.get('scalyr_s3'):
try:
with open('/etc/cron.d/s3-iam-check', 'w') as file:
file.write('#!/bin/bash\n')
file.write('PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\n')
file.write('*/5 * * * * root /opt/taupage/bin/s3-iam-check.py test {!s}\n'.format(fluentd_s3_bucket))
except Exception:
logger.exception('Failed to write file /etc/cron.d/s3-iam-check')
raise SystemExit(1)
env = Environment(loader=FileSystemLoader(TD_AGENT_TEMPLATE_PATH), trim_blocks=True)
template_data = env.get_template(TPL_NAME).render(
scalyr_api_key=scalyr_api_key,
application_id=application_id,
application_version=application_version,
stack=stack,
source=source,
image=image,
aws_region=aws_region,
aws_account=aws_account,
customlog=customlog,
scalyr_application_log_parser=scalyr_application_log_parser,
scalyr_syslog_log_parser=scalyr_syslog_log_parser,
scalyr_custom_log_parser=scalyr_custom_log_parser,
fluentd_syslog_destination=fluentd_syslog_destination,
fluentd_applog_destination=fluentd_applog_destination,
fluentd_applog_filter_exclude=fluentd_applog_filter_exclude,
fluentd_authlog_destination=fluentd_authlog_destination,
fluentd_customlog_destination=fluentd_customlog_destination,
fluentd_customlog_filter_exclude=fluentd_customlog_filter_exclude,
fluentd_loglevel=fluentd_loglevel,
fluentd_s3_raw_log_format=fluentd_s3_raw_log_format,
fluentd_s3_region=fluentd_s3_region,
fluentd_s3_bucket=fluentd_s3_bucket,
fluentd_s3_timekey=fluentd_s3_timekey,
fluentd_s3_acl=fluentd_s3_acl,
fluentd_rsyslog_host=fluentd_rsyslog_host,
fluentd_rsyslog_port=fluentd_rsyslog_port,
fluentd_rsyslog_protocol=fluentd_rsyslog_protocol,
fluentd_rsyslog_severity=fluentd_rsyslog_severity,
fluentd_rsyslog_program=fluentd_rsyslog_program,
fluentd_rsyslog_hostname=fluentd_rsyslog_hostname,
fluentd_destinations=fluentd_destinations
)
try:
with open(TD_AGENT_OUTPUT_PATH, 'w') as f:
f.write(template_data)
except Exception:
logger.exception('Failed to write file td-agent.conf')
raise SystemExit(1)
if __name__ == '__main__':
hostname = boto.utils.get_instance_metadata()['local-hostname'].split('.')[0]
config = get_config()
logging_config = config.get('logging')
s3_default = False
if logging_config:
if not logging_config.get('fluentd_enabled'):
logger.info('Fluentd disabled; skipping Fluentd initialization')
raise SystemExit()
if not logging_config:
logger.info('Found no logging section in senza.yaml; enable dafault logging to s3')
s3_default = True
try:
with open('/var/local/textfile_collector/fluentd_default_s3.prom', 'w') as file:
file.write('fluentd_default_s3_logging{{tag=\"td-agent\",hostname=\"{!s}\"}} 1.0\n'
.format(hostname))
except Exception:
logger.exception('Failed to write file /var/local/textfile_collector/fluentd_default_s3.prom')
raise SystemExit(1)
try:
with open('/etc/cron.d/get_fluentd_metrics', 'w') as file:
file.write('#!/bin/bash\n')
file.write('PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\n')
file.write('* * * * * root /opt/taupage/bin/get-fluentd-metrics.sh\n')
except Exception:
logger.exception('Failed to write file /etc/cron.d/get_fluentd_metrics')
raise SystemExit(1)
update_configuration_from_template(s3_default)
restart_td_agent_process()
| 47.854839 | 117 | 0.700371 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,260 | 0.253904 |
3573a03f2f0531d7422c7f22c5b3d8a3458c39f0 | 9,323 | py | Python | yagocd/resources/property.py | 1and1/yagocd | 4c75336ae6f107c8723d37b15e52169151822127 | [
"ISC"
] | null | null | null | yagocd/resources/property.py | 1and1/yagocd | 4c75336ae6f107c8723d37b15e52169151822127 | [
"ISC"
] | null | null | null | yagocd/resources/property.py | 1and1/yagocd | 4c75336ae6f107c8723d37b15e52169151822127 | [
"ISC"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import csv
from six import StringIO
from yagocd.resources import BaseManager
from yagocd.util import RequireParamMixin, since
@since('14.3.0')
class PropertyManager(BaseManager, RequireParamMixin):
"""
The properties API allows managing of job properties.
`Official documentation. <https://api.go.cd/current/#properties>`_
:versionadded: 14.3.0.
This class implements dictionary like methods for similar use.
"""
RESOURCE_PATH = '{base_api}/properties/{pipeline_name}/{pipeline_counter}/{stage_name}/{stage_counter}/{job_name}'
PATH_PARAMETERS = ['pipeline_name', 'pipeline_counter', 'stage_name', 'stage_counter', 'job_name']
def __init__(
self,
session,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Constructs instance of ``PropertyManager``.
Parameters to the constructor and methods of the class could be duplicated. That is because of two use cases
of this class:
1. When the class being instantiated from :class:`yagocd.client.Client`, we don't know all the necessary
parameters yet, but we need an instance to work with. So we skip parameters instantiation in constructor,
but require them for each method.
2. When the class being used from :class:`yagocd.resources.job.JobInstance` - in this case we already
know all required parameters, so we can instantiate `PropertyManager` with them.
:param session: session object from client.
:type session: yagocd.session.Session.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
"""
super(PropertyManager, self).__init__(session)
self.base_api = self._session.base_api(api_path='')
self._pipeline_name = pipeline_name
self._pipeline_counter = pipeline_counter
self._stage_name = stage_name
self._stage_counter = stage_counter
self._job_name = job_name
def __len__(self):
return len(self.list())
def __iter__(self):
"""
Method for iterating over all properties.
:return: dictionary of properties.
:rtype: dict[str, str]
"""
return iter(self.list())
def __getitem__(self, name):
"""
Method for accessing to specific property in array-like manner by name.
Method for downloading artifact or directory zip by given path.
:param name: name of property to get.
:return: single property as a dictionary.
"""
return self.get(name=name)
def __contains__(self, key):
return key in self.list()
def keys(self):
return self.list().keys()
def values(self):
return self.list().values()
def items(self):
return self.list().items()
def list(
self,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Lists all job properties.
:versionadded: 14.3.0.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
:return: dictionary of properties.
:rtype: dict[str, str]
"""
func_args = locals()
parameters = {p: self._require_param(p, func_args) for p in self.PATH_PARAMETERS}
response = self._session.get(
path=self.RESOURCE_PATH.format(base_api=self.base_api, **parameters),
headers={'Accept': 'application/json'},
)
text = StringIO(response.text)
parsed = list(csv.reader(text))
properties = dict(zip(parsed[0], parsed[1]))
return properties
def get(
self,
name,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Gets a property value by it's name.
:info: You can use keyword `latest` as a pipeline counter or a stage counter.
:versionadded: 14.3.0.
:param name: name of property to get.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
:return: value of requested property.
"""
func_args = locals()
parameters = {p: self._require_param(p, func_args) for p in self.PATH_PARAMETERS}
response = self._session.get(
path=self._session.urljoin(self.RESOURCE_PATH, name).format(base_api=self.base_api, **parameters),
headers={'Accept': 'application/json'},
)
text = StringIO(response.text)
parsed = list(csv.reader(text))
try:
return parsed[1][0]
except IndexError:
return None
def historical(self, pipeline_name=None, stage_name=None, job_name=None, limit_pipeline=None, limit_count=None):
"""
Get historical properties.
:info: `limitPipeline` and `limitCount` are optional parameters. The default value of
`limitPipeline` is latest pipeline instance’s counter. The default value of `limitCount` is `100`.
:versionadded: 14.3.0.
:param pipeline_name: name of the pipeline.
:param stage_name: name of the stage.
:param job_name: name of the job.
:param limit_pipeline: pipeline limit for returned properties.
:param limit_count: count limit for returned properties.
:return: list of dictionaries as historical values.
"""
func_args = locals()
parameters = {
'pipelineName': self._require_param('pipeline_name', func_args),
'stageName': self._require_param('stage_name', func_args),
'jobName': self._require_param('job_name', func_args),
}
if limit_pipeline is not None:
parameters['limitPipeline'] = limit_pipeline
if limit_count is not None:
parameters['limitCount'] = limit_count
response = self._session.get(
path='{base_api}/properties/search'.format(base_api=self.base_api),
params=parameters,
headers={'Accept': 'application/json'},
)
text = StringIO(response.text)
result = list(csv.DictReader(text))
return result
def create(
self,
name,
value,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Defines a property on a specific job instance.
:versionadded: 14.3.0.
:param name: name of property.
:param value: value of property.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
:return: an acknowledgement that the property was created.
"""
func_args = locals()
parameters = {p: self._require_param(p, func_args) for p in self.PATH_PARAMETERS}
response = self._session.post(
path=self._session.urljoin(self.RESOURCE_PATH, name).format(base_api=self.base_api, **parameters),
data={'value': value},
headers={
'Accept': 'application/json',
'Confirm': 'true'
},
)
return response.text
| 34.150183 | 118 | 0.627909 | 7,850 | 0.841823 | 0 | 0 | 7,867 | 0.843646 | 0 | 0 | 5,411 | 0.580268 |
3575d5d1e8e3f443d0861039be6502d8ce1a15c4 | 1,313 | py | Python | bb2cogs/tasks.py | Team-EG/j-bot | 2e160707d13cc4988f370713fc9f57c7cff3f5bb | [
"MIT"
] | 2 | 2020-07-07T01:15:15.000Z | 2021-08-15T19:49:32.000Z | bb2cogs/tasks.py | Team-EG/j-bot | 2e160707d13cc4988f370713fc9f57c7cff3f5bb | [
"MIT"
] | null | null | null | bb2cogs/tasks.py | Team-EG/j-bot | 2e160707d13cc4988f370713fc9f57c7cff3f5bb | [
"MIT"
] | 1 | 2020-04-08T04:23:10.000Z | 2020-04-08T04:23:10.000Z | import discord
import asyncio
import json
import asyncpg
from discord.ext import commands
from discord.ext import tasks
class Tasks(commands.Cog):
def __init__(self, client):
self.client = client
print(f'{__name__} 로드 완료!')
self.change_status.add_exception_type(asyncpg.PostgresConnectionError)
self.change_status.start()
def cog_unload(self):
self.change_status.cancel()
@tasks.loop()
async def change_status(self):
with open('botsetup.json', 'r') as f:
data = json.load(f)
prefix = data['default prefix']
await self.client.change_presence(status=discord.Status.online, activity=discord.Game(f'"{prefix}도움" 이라고 말해보세요!'))
await asyncio.sleep(5)
await self.client.change_presence(status=discord.Status.online, activity=discord.Game(f'{len(self.client.guilds)}개 서버에서 작동'))
await asyncio.sleep(5)
await self.client.change_presence(status=discord.Status.online, activity=discord.Game(f'유저 {len(list(self.client.get_all_members()))}명과 함께 '))
await asyncio.sleep(5)
@change_status.before_loop
async def before_change_status(self):
await self.client.wait_until_ready()
def setup(client):
client.add_cog(Tasks(client))
| 33.666667 | 151 | 0.672506 | 1,176 | 0.860278 | 0 | 0 | 842 | 0.615947 | 791 | 0.578639 | 225 | 0.164594 |
3575f69fbe1be7943d3fbe746ae637c360e21a03 | 1,103 | py | Python | controllers/api_error.py | elandcloud/python-api | fcdc0abe3e3273d5ec0aac6b9bdefbf00ca0f58b | [
"Apache-2.0"
] | null | null | null | controllers/api_error.py | elandcloud/python-api | fcdc0abe3e3273d5ec0aac6b9bdefbf00ca0f58b | [
"Apache-2.0"
] | 1 | 2021-03-31T19:27:16.000Z | 2021-03-31T19:27:16.000Z | controllers/api_error.py | elandcloud/python-api | fcdc0abe3e3273d5ec0aac6b9bdefbf00ca0f58b | [
"Apache-2.0"
] | 1 | 2020-01-02T02:33:39.000Z | 2020-01-02T02:33:39.000Z | from controllers.type_result import Error
def unknownError(err):
return Error(10001,"Unknown error",err)
def invalidParamError(field,condition,err):
return Error(10007, "Invalid field(%s: %s)"%(field, condition), err)
def parameterParsingError(err):
return Error(10008, "Parameter parsing error", err)
def missRequiredParamError(v):
return Error(10009,"'%s' is required parameter" % (v), None)
def notFoundError():
return Error(10010, "Resource is not found", None)
def notAuthorizedError():
return Error(10011, "Resource is not authorized", None)
def notAuthorizedActionError():
return Error(10012, "Action is not authorized", None)
def statusError(v):
return Error(10013, "'%s', Status not Allowed"% (v), None)
def notUpdatedError():
return Error(10014, "Resource is not updated", None)
def notDeletedError():
return Error(10015, "Resource is not deleted", None)
def notCreatedError():
return Error(10016, "Resource is not created", None)
def invalidFieldError(field):
return Error(10018, "Invalid fields [ %v ]" % field, None)
# print(statusError(111).code) | 26.902439 | 72 | 0.728921 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 322 | 0.291931 |
3576496a708959dfe8356700f2ca28d993c28dc4 | 3,357 | py | Python | Optimization/optimize.py | cty123/TriNet | 9c43ae4bd3bae7308289ba1e1980f693d692585b | [
"MIT"
] | 10 | 2020-02-12T20:54:10.000Z | 2022-03-17T03:18:54.000Z | Optimization/optimize.py | cty123/TriNet | 9c43ae4bd3bae7308289ba1e1980f693d692585b | [
"MIT"
] | 2 | 2020-04-24T22:18:41.000Z | 2021-12-27T05:22:09.000Z | Optimization/optimize.py | chuzcjoe/TriNet | d2689de7362f1845b5c399f1f9f66f42eb4df23f | [
"MIT"
] | 2 | 2020-02-12T20:54:37.000Z | 2020-03-26T05:17:33.000Z | import numpy as np
import math
from scipy.optimize import minimize
class Optimize():
def __init__(self):
self.c_rad2deg = 180.0 / np.pi
self.c_deg2rad = np.pi / 180.0
def isRotationMatrix(self, R) :
Rt = np.transpose(R)
shouldBeIdentity = np.dot(Rt, R)
I = np.identity(3, dtype = R.dtype)
n = np.linalg.norm(I - shouldBeIdentity)
# print('n: ' + str(n))
return n < 1e-6
def Rot_Matrix_2_Euler_Angles(self, R):
assert(self.isRotationMatrix(R))
pitch = -math.asin(R[1, 2])
roll = -math.atan2(R[1, 0], R[1, 1])
yaw = -math.atan2(R[0, 2], R[2, 2])
return np.array([roll, pitch, yaw])
def Get_Init_Guess(self, l_vec, b_vec, f_vec):
f_vec = np.cross(b_vec, l_vec)
l_vec = np.cross(f_vec, b_vec)
l_norm = np.linalg.norm(l_vec)
l_vec /= l_norm
b_norm = np.linalg.norm(b_vec)
b_vec /= b_norm
f_norm = np.linalg.norm(f_vec)
f_vec /= f_norm
l_vec = l_vec.reshape(3, 1)
b_vec = b_vec.reshape(3, 1)
f_vec = f_vec.reshape(3, 1)
l = np.array([1, 0, 0]).reshape(1, 3)
b = np.array([0, 1, 0]).reshape(1, 3)
f = np.array([0, 0, 1]).reshape(1, 3)
R = l_vec @ l + b_vec @ b + f_vec @ f
assert (R.shape == (3, 3))
roll, pitch, yaw = self.Rot_Matrix_2_Euler_Angles(R)
return np.array([roll, pitch, yaw])
def Euler_Angles_2_Vectors(self, rx, ry, rz):
'''
rx: pitch
ry: yaw
rz: roll
'''
ry *= -1
rz *= -1
R_x = np.array([[1.0, 0.0, 0.0],
[0.0, np.cos(rx), -np.sin(rx)],
[0.0, np.sin(rx), np.cos(rx)]])
R_y = np.array([[np.cos(ry), 0.0, np.sin(ry)],
[0.0, 1.0, 0.0],
[-np.sin(ry), 0.0, np.cos(ry)]])
R_z = np.array([[np.cos(rz), -np.sin(rz), 0.0],
[np.sin(rz), np.cos(rz), 0.0],
[0.0, 0.0, 1.0]])
R = R_y @ R_x @ R_z
l_vec = R @ np.array([1, 0, 0])
b_vec = R @ np.array([0, 1, 0])
f_vec = R @ np.array([0, 0, 1])
return np.array([l_vec, b_vec, f_vec])
def Objective(self, x, l_vec, b_vec, f_vec):
rx = x[0]
ry = x[1]
rz = x[2]
l_hat, b_hat, f_hat = self.Euler_Angles_2_Vectors(rx, ry, rz)
l_vec_dot = np.clip(l_hat[0] * l_vec[0] + l_hat[1] * l_vec[1] + l_hat[2] * l_vec[2], -1, 1)
b_vec_dot = np.clip(b_hat[0] * b_vec[0] + b_hat[1] * b_vec[1] + b_hat[2] * b_vec[2], -1, 1)
f_vec_dot = np.clip(f_hat[0] * f_vec[0] + f_hat[1] * f_vec[1] + f_hat[2] * f_vec[2], -1, 1)
return math.acos(l_vec_dot) ** 2 + math.acos(b_vec_dot) ** 2 + math.acos(f_vec_dot) ** 2
def Get_Ortho_Vectors(self, l_vec, b_vec, f_vec):
x0 = self.Get_Init_Guess(l_vec, b_vec, f_vec)
sol = minimize(self.Objective, x0, args=(l_vec, b_vec, f_vec), method='nelder-mead', options={'xatol': 1e-7, 'disp': False})
pitch_rad, yaw_rad, roll_rad = sol.x
v1, v2, v3 = self.Euler_Angles_2_Vectors(pitch_rad, yaw_rad, roll_rad)
return np.array([v1, v2, v3]) | 31.083333 | 132 | 0.494191 | 3,287 | 0.979148 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.034257 |
3578741287ad3fdaadbb1724717ecc78eefadf0d | 9,133 | py | Python | freyr/utils/agents.py | gutogirardon/freyer-stocks-api | 38dd69066e41cabfb2977a151ebf063003252fb0 | [
"MIT"
] | 3 | 2021-04-08T03:08:52.000Z | 2021-07-08T01:09:21.000Z | freyr/utils/agents.py | webclinic017/freyr-stocks-api | 38dd69066e41cabfb2977a151ebf063003252fb0 | [
"MIT"
] | null | null | null | freyr/utils/agents.py | webclinic017/freyr-stocks-api | 38dd69066e41cabfb2977a151ebf063003252fb0 | [
"MIT"
] | 2 | 2021-09-24T13:35:32.000Z | 2021-09-26T18:44:52.000Z | """
Freyr - A Free stock API
"""
import random
import requests.utils
header = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.16; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.2 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.1 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.1 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.1 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36 OPR/72.0.3815.320",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.193 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 Edg/86.0.622.69",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 OPR/72.0.3815.400",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36 Edg/87.0.664.41",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36 Edg/87.0.664.47",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36 Edg/87.0.664.52",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36 Edg/87.0.664.55",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 Edg/87.0.664.57",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 Edg/87.0.664.60",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 OPR/72.0.3815.400",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.92 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0",
]
def default_user_agent():
"""
Pick and set a random header user agent
:return:
"""
requests.utils.default_user_agent = lambda: random.choice(header)
return requests.utils.default_user_agent
| 96.136842 | 140 | 0.698894 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8,472 | 0.927625 |
3578b0766410e85213ce6ed8b351520882df04bf | 8,581 | py | Python | mixcoatl/resource_utils.py | zomGreg/mixcoatl | dd8d7e206682955b251d7f858fffee56b11df8c6 | [
"Apache-2.0"
] | null | null | null | mixcoatl/resource_utils.py | zomGreg/mixcoatl | dd8d7e206682955b251d7f858fffee56b11df8c6 | [
"Apache-2.0"
] | null | null | null | mixcoatl/resource_utils.py | zomGreg/mixcoatl | dd8d7e206682955b251d7f858fffee56b11df8c6 | [
"Apache-2.0"
] | null | null | null | from mixcoatl.admin.billing_code import BillingCode
from mixcoatl.geography.region import Region
from mixcoatl.admin.group import Group
from mixcoatl.admin.user import User
def get_servers(servers, **kwargs):
""" Returns a list of servers
Arguments:
:param servers: a list of servers that needs to be filtered.
Keyword arguments:
:param account_user_id: owning user's account user ID.
:param vm_login_id: owning user's VM login ID.
:param email: owning user's email address.
:param group_id: owning group's group ID.
:param budget_id: budget ID.
:returns: a list of filtered servers.
:rtype: list
"""
filtered_servers = servers
if 'account_user_id' in kwargs and kwargs['account_user_id'] is not None:
filtered_servers = [server for server in servers if hasattr(server, 'owning_user') and
'account_user_id' in server.owning_user and
server.owning_user['account_user_id'] == kwargs['account_user_id']]
if 'vm_login_id' in kwargs and kwargs['vm_login_id'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'owning_user') and
'vm_login_id' in server.owning_user and
server.owning_user['vm_login_id'] == kwargs['vm_login_id']]
if 'email' in kwargs and kwargs['email'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'owning_user') and
server.owning_user.has_key('email') and
server.owning_user['email'] == kwargs['email']]
if 'group_id' in kwargs and kwargs['group_id'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'owning_groups')
for group in server.owning_groups if group['group_id'] == int(kwargs['group_id'])]
if 'budget_id' in kwargs and kwargs['budget_id'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'budget') and
server.budget == int(kwargs['budget_id'])]
return filtered_servers
def get_snapshots(snapshots, **kwargs):
""" Returns a list of snapshots
Arguments:
:param snapshots: a list of snapshots that needs to be filtered.
Keyword arguments:
:param group_id: owning group's group ID.
:param budget_id: budget ID.
:returns: a list of filtered snapshots.
:rtype: list
"""
filtered_snapshots = snapshots
if 'group_id' in kwargs and kwargs['group_id'] is not None:
filtered_snapshots = [snapshot for snapshot in snapshots if hasattr(snapshot, 'owning_groups')
for g in snapshot.owning_groups if g['group_id'] == int(kwargs['group_id'])]
if 'budget_id' in kwargs and kwargs['budget_id'] is not None:
if filtered_snapshots is not None: snapshots = filtered_snapshots
filtered_snapshots = [snapshot for snapshot in snapshots if hasattr(snapshot, 'budget') and
snapshot.budget == int(kwargs['budget_id'])]
return filtered_snapshots
def get_volumes(volumes, **kwargs):
""" Returns a list of volumes
Arguments:
:param volumes: a list of volumes that needs to be filtered.
Keyword arguments:
:param vm_login_id: owning user's VM login ID.
:param email: owning user's email address.
:param group_id: owning group's group ID.
:param budget_id: budget ID.
:param size: minimum size of the volume.
:returns: a list of filtered volumes.
:rtype: list
"""
filtered_volumes = volumes
if 'vm_login_id' in kwargs and kwargs['vm_login_id'] is not None:
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'owning_user') and
'vm_login_id' in volume.owning_user and
volume.owning_user['vm_login_id'] == kwargs['vm_login_id']]
if 'email' in kwargs and kwargs['email'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'owning_user') and
'email' in volume.owning_user and
volume.owning_user['email'] == kwargs['email']]
if 'group_id' in kwargs and kwargs['group_id'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'owning_groups')
for group in volume.owning_groups if group['group_id'] == int(kwargs['group_id'])]
if 'budget_id' in kwargs and kwargs['budget_id'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'budget') and
volume.budget == int(kwargs['budget_id'])]
if 'size' in kwargs and kwargs['size'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if volume.size_in_gb >= int(kwargs['size'])]
return filtered_volumes
def get_user(users, **kwargs):
""" Returns a user that matches with arguments.
Arguments:
:param users: a list of users that needs to be filtered.
Keyword arguments:
:param vm_login_id: owning user's VM login ID.
:param email: owning user's email address.
:returns: a list of filtered users.
:rtype: list
"""
selected_user = users
if 'vm_login_id' in kwargs and kwargs['vm_login_id'] is not None:
for user in users:
if hasattr(user, 'vm_login_id') and user.vm_login_id == kwargs['vm_login_id']:
selected_user = user
elif 'email' in kwargs and kwargs['email'] is not None:
for user in users:
if hasattr(user, 'email') and user.email == kwargs['email']:
selected_user = user
return selected_user
def get_account_user_id(**kwargs):
""" Returns account_user_id from arguments
Keyword arguments:
:param vm_login_id: user's VM login ID like p100
:param email: user's E-Mail address
:returns: account_user_id
:rtype: int
"""
if 'vm_login_id' in kwargs:
users = User.all()
selected_user = get_user(users, vm_login_id=kwargs['vm_login_id'])
elif 'email' in kwargs:
users = User.all()
selected_user = get_user(users, email=kwargs['email'])
return selected_user.account_user_id
def get_vm_login_id(**kwargs):
""" Returns vm_login_id from arguments
Keyword arguments:
:param email: user's E-Mail address
:returns: vm_login_id
:rtype: str
"""
if 'email' in kwargs:
users = User.all()
selected_user = get_user(users, email=kwargs['email'])
return selected_user.vm_login_id
def get_budget_id(budget_name):
""" Returns budget_id from arguments
Arguments:
:param budget_name: budget name
:returns: budget_id
:rtype: int
"""
budgets = BillingCode.all(detail='basic')
for budget in budgets:
if hasattr(budget, 'name') and budget.name == budget_name:
selected_budget = budget
return selected_budget.billing_code_id
def get_group_id(group_name):
""" Returns a group ID from group name
Arguments:
:param group_name: name of the group
:returns: group_id
:rtype: int
"""
groups = Group.all(detail='basic')
for group in groups:
if hasattr(group, 'name') and group.name == group_name:
selected_group = group
return selected_group.group_id
def get_region_id(region_pid):
""" Returns a region ID from provider_id such as us-east-1.
Arguments:
:param region_pid: provider ID of the region such as us-east-1
:returns: region_id such as 19343
:rtype: int
"""
regions = Region.all(detail='basic')
for region in regions:
if hasattr(region, 'provider_id') and region.provider_id == region_pid:
selected_region = region
return selected_region.region_id
| 35.754167 | 110 | 0.646778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,142 | 0.366158 |
3579494bf1e826f12f91e8fb3c0875d2c71c341e | 8,443 | py | Python | PWGJE/EMCALJetTasks/Tracks/analysis/old/ComparePeriodsTriggerToMB.py | maroozm/AliPhysics | 22ec256928cfdf8f800e05bfc1a6e124d90b6eaf | [
"BSD-3-Clause"
] | 114 | 2017-03-03T09:12:23.000Z | 2022-03-03T20:29:42.000Z | PWGJE/EMCALJetTasks/Tracks/analysis/old/ComparePeriodsTriggerToMB.py | maroozm/AliPhysics | 22ec256928cfdf8f800e05bfc1a6e124d90b6eaf | [
"BSD-3-Clause"
] | 19,637 | 2017-01-16T12:34:41.000Z | 2022-03-31T22:02:40.000Z | PWGJE/EMCALJetTasks/Tracks/analysis/old/ComparePeriodsTriggerToMB.py | maroozm/AliPhysics | 22ec256928cfdf8f800e05bfc1a6e124d90b6eaf | [
"BSD-3-Clause"
] | 1,021 | 2016-07-14T22:41:16.000Z | 2022-03-31T05:15:51.000Z | #! /usr/bin/env python
from ROOT import TCanvas, TGraphErrors, TLegend, TPaveText
from ROOT import kBlack, kBlue, kRed
from Helper import Frame, ReadHistList
from Graphics import Style
from SpectrumContainer import DataContainer
from copy import deepcopy
class PeriodComparisonPlot:
def __init__(self):
self.__comparisons = []
self.__canvas = None
self.__frames = {}
self.__legend = None
def AddComparison(self, comp):
self.__comparisons.append(comp)
def SetPlotRange(self, min ,max):
for comp in self.__comparisons:
comp.SetPlotRange(min, max)
def Draw(self):
self.__canvas = TCanvas("comparison%s" %(self.__comparisons[0].GetTriggerName()), "Comparison of different periods for trigger %s" %(self.__comparisons[0].GetTriggerName()), 1000, 600)
self.__canvas.Divide(2,1)
self.__legend = TLegend(0.15, 0.15, 0.45, 0.45)
self.__legend.SetBorderSize(0)
self.__legend.SetFillStyle(0)
self.__legend.SetTextFont(42)
specpad = self.__canvas.cd(1)
specpad.SetGrid(False,False)
specpad.SetLogx(True)
specpad.SetLogy(True)
self.__frames["Spectra"] = Frame("axisSpec%s" %(self.__comparisons[0].GetTriggerName()), 0, 100, 1e-10, 100)
self.__frames["Spectra"].SetXtitle("p_{t} (GeV/c)")
self.__frames["Spectra"].SetYtitle("1/N_{event} 1/(#Delta p_{t}) dN/dp_{t} ((GeV/c)^{-2})")
self.__frames["Spectra"].Draw()
self.__comparisons[0].DrawMinBiasSpectrum()
self.__comparisons[0].AddMBtoLegend(self.__legend)
for comp in sorted(self.__comparisons):
comp.DrawTriggeredSpectrum()
comp.AddTriggeredSpectrumToLegend(self.__legend)
self.__legend.Draw()
self.__label = self.__comparisons[0].CreateLabel(0.5, 0.75, 0.89, 0.85)
self.__label.Draw()
rpad = self.__canvas.cd(2)
rpad.SetGrid(False, False)
self.__frames["Ratios"] = Frame("axisRatio%s" %(self.__comparisons[0].GetTriggerName()), 0, 100, 0, 2000)
self.__frames["Ratios"].SetXtitle("p_{t} (GeV/c)")
self.__frames["Ratios"].SetYtitle("%s / Min. Bias" %(self.__comparisons[0].GetTriggerName()))
self.__frames["Ratios"].Draw()
for comp in sorted(self.__comparisons):
comp.DrawRatioTriggeredMinBias()
self.__canvas.cd()
def SaveAs(self, filenamebase):
"""
Save plot as image file
"""
types = ["eps", "pdf", "jpeg", "gif", "png"]
for t in types:
self.__canvas.SaveAs("%s.%s" %(filenamebase, t))
class TriggerComparison:
def __init__(self, trgspec, mbspec, triggername, dataname):
self.__triggeredspectrum = trgspec
self.__minbiasspectrum = mbspec
self.__ratiospectra = self.__triggeredspectrum.MakeRatio(self.__minbiasspectrum)
self.__ratiospectra.SetStyle(self.__triggeredspectrum.GetStyle())
self.__triggername = triggername
self.__dataname = dataname
def __cmp__(self, other):
othername = other.GetDataName()
if self.__dataname == othername:
return 0
elif self.__dataname < othername:
return -1
else:
return 1
def SetPlotRange(self, min, max):
self.__triggeredspectrum.SetPlotRange(min, max)
self.__minbiasspectrum.SetPlotRange(min, max)
self.__ratiospectra.SetPlotRange(min, max)
def GetTriggerName(self):
return self.__triggername
def GetDataName(self):
return self.__dataname
def DrawTriggeredSpectrum(self):
self.__triggeredspectrum.Draw()
def DrawMinBiasSpectrum(self):
self.__minbiasspectrum.Draw()
def DrawRatioTriggeredMinBias(self):
self.__ratiospectra.Draw()
def AddMBtoLegend(self, leg):
self.__minbiasspectrum.AddToLegend(leg, "MinBias")
def AddTriggeredSpectrumToLegend(self, leg):
self.__triggeredspectrum.AddToLegend(leg, self.__dataname)
def CreateLabel(self, xmin, ymin, xmax, ymax):
label = TPaveText(xmin, ymin, xmax, ymax, "NDC")
label.SetBorderSize(0)
label.SetFillStyle(0)
label.SetTextFont(42)
label.AddText("Trigger: %s" %(self.__triggername))
return label
class GraphicsObject:
def __init__(self, data, name):
self._data = data
self._graphics = None
self._style = Style(kBlack, 20)
self._plotrange = {"Min":None, "Max":None}
self._name = name
def SetPlotRange(self, min, max):
self._plotrange[min] = min
self._plotrange[max] = max
def SetStyle(self, style):
self._style = style
def SetName(self, name):
self._name = name
def GetData(self):
return self._data
def GetGraphics(self):
return self._graphics
def GetStyle(self):
return self._style
def Draw(self):
if not self._graphics:
self._graphics = TGraphErrors()
np = 0
for bin in range(1, self._data.GetXaxis().GetNbins()+1):
if self._plotrange["Min"] and self._data.GetXaxis().GetBinLowEdge(bin) < self._plotrange["Min"]:
continue
if self._plotrange["Max"] and self._data.GetXaxis().GetBinUpEdge(bin) > self._plotrange["Max"]:
break
self._graphics.SetPoint(np, self._data.GetXaxis().GetBinCenter(bin), self._data.GetBinContent(bin))
self._graphics.SetPointError(np, self._data.GetXaxis().GetBinWidth(bin)/2., self._data.GetBinError(bin))
np = np + 1
self._graphics.SetMarkerColor(self._style.GetColor())
self._graphics.SetLineColor(self._style.GetColor())
self._graphics.SetMarkerStyle(self._style.GetMarker())
self._graphics.Draw("epsame")
def AddToLegend(self, legend, title = None):
if self._graphics:
tit = self._name
if title:
tit = title
legend.AddEntry(self._graphics, tit, "lep")
class Spectrum(GraphicsObject):
def __init__(self, data, name):
GraphicsObject.__init__(self, data, name)
def MakeRatio(self, denominator):
result = deepcopy(self._data)
result.Divide(denominator.GetData())
ratio = Ratio(result)
if self._plotrange["Min"] or self._plotrange["Max"]:
ratio.SetPlotRange(self._plotrange["Min"], self._plotrange["Max"])
return ratio
class Ratio(GraphicsObject):
def __init__(self, data, name = None):
GraphicsObject.__init__(self, data, name)
def ReadSpectra(filename, trigger):
"""
Read the spectra for different trigger classes from the root file
Returns a dictionary of triggers - spectrum container
"""
hlist = ReadHistList(filename, "PtEMCalTriggerTask")
return DataContainer(eventHist = hlist.FindObject("hEventHist%s" %(trigger)), trackHist = hlist.FindObject("hTrackHist%s" %(trigger)))
def MakeNormalisedSpectrum(inputdata, name):
"""
Normalise spectrum by the number of events and by the bin width
"""
inputdata.SetVertexRange(-10., 10.)
inputdata.SetPileupRejection(True)
inputdata.SelectTrackCuts(1)
return inputdata.MakeProjection(0, "ptSpectrum%s" %(name), "p_{t} (GeV/c)", "1/N_{event} 1/(#Delta p_{t}) dN/dp_{t} ((GeV/c)^{-2})")
def ComparePeriods(filea, fileb, filemb, namea, nameb, trigger):
triggers = {}
dataA = ReadSpectra(filea, trigger)
dataB = ReadSpectra(fileb, trigger)
dataMB = ReadSpectra(filemb, "MinBias")
specA = Spectrum(MakeNormalisedSpectrum(dataA, namea), namea)
specA.SetStyle(Style(kBlue, 24))
specB = Spectrum(MakeNormalisedSpectrum(dataB, nameb), nameb)
specB.SetStyle(Style(kRed, 25))
specMB = Spectrum(MakeNormalisedSpectrum(dataMB, "MinBias"), "MinBias")
specMB.SetStyle(Style(kBlack, 25))
plot = PeriodComparisonPlot()
plot.AddComparison(TriggerComparison(specA, specMB, trigger, namea))
plot.AddComparison(TriggerComparison(specB, specMB, trigger, nameb))
plot.SetPlotRange(2., 100.)
plot.Draw()
return plot
| 36.549784 | 192 | 0.627384 | 6,604 | 0.782186 | 0 | 0 | 0 | 0 | 0 | 0 | 825 | 0.097714 |