text stringlengths 8 6.05M |
|---|
# coding: utf-8
## Enthought Python Worksheet
# In[7]:
get_ipython().magic(u'matplotlib inline')
from bs4 import BeautifulSoup
import matplotlib.pyplot as plt
import numpy as np
x = np.linspace(0, 10, 30)
y = np.sin(x)
mu, sigma = 0, 1
z = y + np.random.normal(mu, sigma, 30) * 0.1
plt.clf()
plt.grid()
plt.plot(x, y, 'ro-')
plt.plot(x, z, 'b-')
plt.legend(('Sine wave', 'Noisy wave'), loc='best', fancybox=True, shadow=True)
plt.xlabel('X axis')
plt.ylabel('Y axis')
# In[1]:
elmt = dict(day="Friday", month=2, year= 3012)
print elmt.keys()
print elmt.items()
print elmt.values()
for i in elmt:
print(elmt[i])
# In[ ]:
|
# -*- coding: utf-8 -*-
class Interval:
def __init__(self, s=0, e=0):
self.start = s
self.end = e
def __eq__(self, other):
return self.start == other.start and self.end == other.end
class Solution:
def insert(self, intervals, newInterval):
result = []
i = 0
while i < len(intervals) and intervals[i].end < newInterval.start:
result.append(intervals[i])
i += 1
while i < len(intervals) and intervals[i].start <= newInterval.end:
newInterval = Interval(
min(newInterval.start, intervals[i].start),
max(newInterval.end, intervals[i].end),
)
i += 1
result.append(newInterval)
result.extend(intervals[i:])
return result
if __name__ == "__main__":
solution = Solution()
assert [Interval(1, 5), Interval(6, 9),] == solution.insert(
[
Interval(1, 3),
Interval(6, 9),
],
Interval(2, 5),
)
assert [Interval(1, 2), Interval(3, 10), Interval(12, 16),] == solution.insert(
[
Interval(1, 2),
Interval(3, 5),
Interval(6, 7),
Interval(8, 10),
Interval(12, 16),
],
Interval(4, 8),
)
|
""" Playstore-Analysis Project """
import pandas as pd
import pygal as pg
from pygal.style import Style, CleanStyle, LightColorizedStyle, BlueStyle, RedBlueStyle
def main():
""" main function for data and making charts """
"""
Pulling Data from CSV file
"""
data = pd.read_csv("googleplaystore.csv", encoding="ISO-8859-1") #อ่านไฟล์ csv
name_data = data["App"] #data ชื่อแอพพลิเคชั่น
rating_data = data["Rating"] #data rating ของแต่ละแอพพลิเคชั่น
category_data = data["Category"] #data หมวดหมู่ของแต่ละแอพพลิเคชั่น
review_data = data["Reviews"] #data reviews ของแต่ละแอพพลิเคชั่น
size_data = data["Size"] #data ขนาดของแต่ละแอพพลิเคชั่น
install_data = data["Installs"] #data ยอดดาวน์โหลดของแต่ละแอพพลิเคชั่น
"""
Editing data into readable data
"""
size_data = [i.replace("M", "").replace("Varies with device", "0").replace("+", "").replace(',', '') for i in size_data] #ทำให้ data ของ Size เป็นตัวเลขทั้งหมด
for i in range(len(size_data)): #ทำให้ขนาดของ data เป็นหน่วยเดียวกัน
if 'k' in size_data[i]:
size_data[i] = size_data[i].replace('k', "")
size_data[i] = int(float(size_data[i]))
size_data[i] /= 1024
size_data = [float(i) for i in size_data]
install_data = [i.replace("+", "").replace(",", "") for i in install_data] #ทำให้ data ของยอดดาวน์โหลดเป็นตัวเลขทั้งหมด
install_data = [int(i) for i in install_data]
"""
Using function and returning data in to variables
"""
category_review_data = category_review(category_data, review_data)
category_rating_data = category_rating(category_data, rating_data)
name_install_data = name_install(name_data, install_data)
category_size_data = category_size(category_data, size_data)
"""
Making an Average reviews of each Categories chart
"""
category_review_chart = pg.Bar(style=CleanStyle)
category_review_chart.title = "Average Reviews of each Categories"
for i in category_review_data:
category_review_chart.add(i, category_review_data[i])
category_review_chart.render_to_file("category_review_chart.svg")
"""
Making an Average ratings of each Categories chart
"""
category_rating_chart = pg.HorizontalBar(style=LightColorizedStyle)
category_rating_chart.title = "Average Ratings of each Categories"
for i in category_rating_data:
category_rating_chart.add(i, category_rating_data[i])
category_rating_chart.render_to_file("category_rating_chart.svg")
"""
Making an Average sizes of each Categories chart
"""
category_size_chart = pg.Bar(style=BlueStyle)
category_size_chart.title = "Average Sizes of Application of each Categories(MBs)"
for i in category_size_data.keys():
category_size_chart.add(i, category_size_data[i])
category_size_chart.render_to_file("category_size_chart.svg")
"""
Making a Most install application chart
"""
name_install_chart = pg.SolidGauge(inner_radius=0.7, style=RedBlueStyle)
name_install_chart.title = "The Most Installs Applications on Google Playstore"
for i in name_install_data:
name_install_chart.add(i, [{"value": name_install_data[i], 'max_value': 1000}])
percent_formatter = lambda x: '100%'.format(x)
name_install_chart.legend_at_bottom = True
name_install_chart.value_formatter = percent_formatter
name_install_chart.render_to_file("name_install_chart.svg")
def category_rating(category, rating):
"""นำ rating ของ category มาหาค่าเฉลี่ย"""
category_rating_data = {}
category_rating_len = {}
index = 0
index_2 = 0
for i in rating:
if category[index] not in category_rating_data:
category_rating_data[category[index]] = i
category_rating_len[category[index]] = 1
else:
category_rating_len[category[index]] += 1
category_rating_data[category[index]] += i
index += 1
for i in category_rating_len:
category_rating_data[i] /= category_rating_len[i]
index_2 += 1
return category_rating_data
def category_review(category, review):
""" นำ review และ category มาทำ dict """
data_dict = dict()
data_lst = list()
for i in range(len(category)):
data_lst.append([category[i], review[i]])
for i in data_lst:
if i[0] not in data_dict:
data_dict.update({i[0]:[i[1]]})
else:
lst1 = data_dict[i[0]]
lst1.append(i[1])
data_dict.update({i[0]:lst1})
for i in data_dict:
lst1 = data_dict[i]
ava = sum(lst1)/len(lst1)
data_dict.update({i:ava})
return data_dict
def category_size(category, size):
"""นำขนาดของแต่ละแอพพลิเคชั่นในแต่ละหมวดมาหาค่าเฉลี่ย"""
category_size_data = {}
category_size_len = {}
index = 0
index_2 = 0
for i in size:
if category[index] not in category_size_data:
category_size_data[category[index]] = i
category_size_len[category[index]] = 1
else:
category_size_len[category[index]] += 1
category_size_data[category[index]] += i
index += 1
for i in category_size_len:
category_size_data[i] /= category_size_len[i]
index_2 += 1
return category_size_data
def name_install(name, install):
"""นำยอดดาวน์โหลดของแต่ละแอพมาเชื่อมกันโดยแยกเป็นแต่ละหมวดหมู่"""
name_install_data = {}
index = 0
for i in install:
if i > 500000000:
name_install_data[name[index]] = i
index += 1
return name_install_data
main()
|
import re
def displaymatch(match):
if match is None:
return None
return '<Match: %r, groups=%r>' % (match.group(), match.groups())
data = "";
with open ("C:/applog/logigng.log", "r+") as myfile:
data=myfile.read().replace('\n', '')
#print(data)
#prog = re.compile("^select*$")
#result = prog.match(data)
#for var in result:
# print(var)
print("Lenght of the string ",len(data))
#print(displaymatch(re.search("^([\nA-Za-z0-9_\.\s,=?>()']*select[\nA-Za-z0-9_\.\s,=?>()']*group by[\nA-Za-z\s_\.a-z,0-9]*)",data,re.MULTILINE)))
searches = re.findall(r"(/[\s]*select.+?group by)",data,re.DOTALL)
#match=re.search("select[\n\S\s]*group",data,re.MULTILINE)
count =1
for selected in searches:
fo = open("C:/applog/foo"+str(count)+".txt", "w+")
fo.write(selected.strip())
fo.close()
#fo.write(match.group())
#fo.write("\n");
#with open("C:/applog/logigng.log") as f:
# print re.findall("^(select[\nA-Za-z0-9_\.\s,=?>\(\)\']*)$",f.read(),re.MULTILINE)
|
#!/usr/bin/env python
####
# Script to make PostgreSQL constraints deferrable to avoid foreign key issues
####
import re
import yaml
from sqlalchemy import create_engine
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.ext.declarative.api import DeclarativeMeta
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy.orm.relationships import RelationshipProperty
class Database(object):
def __init__(self, config):
self.url = "{0}://{1}:{2}@{3}/{4}".format(config['type'],
config['user'],
config['pass'],
config['host'],
config['database'])
self.engine = create_engine(self.url, isolation_level='AUTOCOMMIT')
self.automap = automap_base()
self.automap.prepare(self.engine, schema=config.get('schema'), reflect=True)
def execute(self, sql):
print sql
self.engine.execute(sql)
def truncate_migrate_table(self):
self.execute("""TRUNCATE TABLE "migrate_version";""")
def make_constraints_deferrable(self):
for table_name, table in self.automap.classes.items():
for prop_name, prop in ((name, getattr(table, name)) for name in dir(table)):
if isinstance(prop, InstrumentedAttribute) and not isinstance(prop.prop, RelationshipProperty):
for fk in prop.property.columns[0].foreign_keys:
fk_name = fk.constraint.name
self.execute("""ALTER TABLE "{0}" ALTER CONSTRAINT "{1}" DEFERRABLE;""".format(table_name, fk_name))
def process(self):
self.truncate_migrate_table()
self.make_constraints_deferrable()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-c", help="config", type=str, default="config.yaml", dest="config")
args = parser.parse_args()
db = Database(yaml.load(file(args.config, 'r'))['postgresql'])
db.process()
|
# Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
from glareclient.osc.v1 import artifacts as osc_art
from glareclient.tests.unit.osc.v1 import fakes
from glareclient.v1 import artifacts as api_art
from osc_lib.tests.utils import ParserException
class TestArtifacts(fakes.TestArtifacts):
def setUp(self):
super(TestArtifacts, self).setUp()
self.artifact_mock = \
self.app.client_manager.artifact.artifacts
self.http = mock.MagicMock()
self.COLUMNS = set(['id', 'name', 'owner',
'status', 'version', 'visibility'])
class TestListArtifacts(TestArtifacts):
def setUp(self):
super(TestListArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.ListArtifacts(self.app, None)
self.COLUMNS = ['Id', 'Name', 'Version',
'Owner', 'Visibility', 'Status']
def test_artifact_list(self):
arglist = ['images']
verify = [('type_name', 'images')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
self.assertEqual(self.COLUMNS, columns)
self.check_parser(self.cmd, arglist, verify)
def test_artifact_list_all(self):
arglist = ['all']
verify = [('type_name', 'all')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
self.assertEqual(['Id', 'Name', 'Version', 'Type name',
'Owner', 'Visibility', 'Status'], columns)
self.check_parser(self.cmd, arglist, verify)
def test_artifact_list_with_multifilters(self):
arglist = ['images',
'--filter', 'spam:spam',
'--filter', 'maps:maps']
verify = [('type_name', 'images'),
('filter', ['spam:spam', 'maps:maps'])]
self.check_parser(self.cmd, arglist, verify)
def test_artifact_list_with_sort(self):
arglist = ['sample_artifact', '--sort', 'name:asc']
verify = [('type_name', 'sample_artifact'),
('sort', 'name:asc')]
self.check_parser(self.cmd, arglist, verify)
def test_artifact_list_with_multisort(self):
arglist = ['images',
'--sort', 'name:desc',
'--sort', 'name:asc']
verify = [('type_name', 'images'),
('sort', 'name:asc')]
self.check_parser(self.cmd, arglist, verify)
def test_artifact_list_page_size(self):
arglist = ['images', '--page-size', '1']
verify = [('type_name', 'images'),
('page_size', 1)]
self.check_parser(self.cmd, arglist, verify)
def test_artifact_list_limit(self):
arglist = ['images', '--limit', '2']
verify = [('type_name', 'images'),
('limit', 2)]
self.check_parser(self.cmd, arglist, verify)
def test_artifact_list_multilimit(self):
arglist = ['images', '--limit', '2', '--limit', '1']
verify = [('type_name', 'images'),
('limit', 1)]
self.check_parser(self.cmd, arglist, verify)
class TestShowArtifacts(TestArtifacts):
def setUp(self):
super(TestShowArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.ShowArtifact(self.app, None)
def test_artifact_show(self):
arglist = ['images', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba']
verify = [('type_name', 'images')]
COLUMNS = set(['blob', 'environment', 'id', 'image',
'name', 'owner', 'package', 'status',
'template', 'version', 'visibility'])
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(COLUMNS, name_fields)
def test_artifact_show_without_id(self):
arglist = ['images']
verify = [('type_name', 'images')]
with testtools.ExpectedException(ParserException):
self.check_parser(self.cmd, arglist, verify)
def test_artifact_show_without_type_id(self):
arglist = ['fc15c365-d4f9-4b8b-a090-d9e230f1f6ba']
verify = [('type_name', 'images')]
with testtools.ExpectedException(ParserException):
self.check_parser(self.cmd, arglist, verify)
def test_artifact_show_by_name(self):
arglist = ['images', 'name1']
verify = [('type_name', 'images'), ('id', False)]
COLUMNS = set(['blob', 'environment', 'id', 'image',
'name', 'owner', 'package', 'status',
'template', 'version', 'visibility'])
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(COLUMNS, name_fields)
class TestCreateArtifacts(TestArtifacts):
def setUp(self):
super(TestCreateArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.CreateArtifact(self.app, None)
def test_create_artifact(self):
arglist = ['images', 'art',
'--artifact-version', '0.2.4',
'--property', 'blah=10']
verify = [('type_name', 'images'),
('property', ['blah=10']),
('artifact_version', '0.2.4')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
def test_create_artifact_list_prop(self):
arglist = ['images', 'art',
'--artifact-version', '0.2.4',
'--list', 'l=10,11,12']
verify = [('type_name', 'images'),
('list', ['l=10,11,12']),
('artifact_version', '0.2.4')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
with mock.patch.object(
self.app.client_manager.artifact.artifacts,
'create') as patched_create:
self.cmd.take_action(parsed_args)
patched_create.assert_called_once_with(
'art',
l=['10', '11', '12'],
type_name='images',
version='0.2.4')
def test_create_artifact_dict_prop(self):
arglist = ['images', 'art',
'--artifact-version', '0.2.4',
'--dict', 'd=a:10,b:11,c:12']
verify = [('type_name', 'images'),
('dict', ['d=a:10,b:11,c:12']),
('artifact_version', '0.2.4')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
with mock.patch.object(
self.app.client_manager.artifact.artifacts,
'create') as patched_create:
self.cmd.take_action(parsed_args)
patched_create.assert_called_once_with(
'art',
d={'a': '10', 'c': '12', 'b': '11'},
type_name='images',
version='0.2.4')
def test_create_artifact_multiproperty(self):
arglist = ['images', 'art',
'--artifact-version', '0.2.4',
'--property', 'blah=1',
'--property', 'blag=2']
verify = [('type_name', 'images'),
('property', ['blah=1', 'blag=2']),
('artifact_version', '0.2.4')]
self.check_parser(self.cmd, arglist, verify)
def test_create_artifact_multiversion(self):
arglist = ['images', 'art',
'--artifact-version', '0.2.4',
'--artifact-version', '0.2.5']
verify = [('type_name', 'images'),
('artifact_version', '0.2.5')]
self.check_parser(self.cmd, arglist, verify)
class TestUpdateArtifacts(TestArtifacts):
def setUp(self):
super(TestUpdateArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.UpdateArtifact(self.app, None)
def test_artifact_update(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
'--property', 'blah=1',
'--property', 'blag=2']
verify = [('type_name', 'images'),
('property', ['blah=1', 'blag=2'])]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
def test_update_artifact_list_prop(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
'--list', 'l=10,11,12']
verify = [('type_name', 'images'),
('list', ['l=10,11,12'])]
parsed_args = self.check_parser(self.cmd, arglist, verify)
with mock.patch.object(
self.app.client_manager.artifact.artifacts,
'update') as patched_update:
self.cmd.take_action(parsed_args)
patched_update.assert_called_once_with(
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
l=['10', '11', '12'],
remove_props=[],
type_name='images')
def test_update_artifact_dict_prop(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
'--dict', 'd=a:10,b:11,c:12']
verify = [('type_name', 'images'),
('dict', ['d=a:10,b:11,c:12'])]
parsed_args = self.check_parser(self.cmd, arglist, verify)
with mock.patch.object(
self.app.client_manager.artifact.artifacts,
'update') as patched_update:
self.cmd.take_action(parsed_args)
patched_update.assert_called_once_with(
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
d={'a': '10', 'c': '12', 'b': '11'},
remove_props=[],
type_name='images')
def test_artifact_update_bad(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
'--property', 'blah',
'--property', 'blah'
]
verify = [('type_name', 'images')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
with testtools.ExpectedException(ValueError):
self.cmd.take_action(parsed_args)
def test_artifact_update_multiremove_prop(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
'--remove-property', 'prop1',
'--remove-property', 'prop2']
verify = [('type_name', 'images'),
('remove_property', ['prop1', 'prop2'])]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
class TestDeleteArtifacts(TestArtifacts):
def setUp(self):
super(TestDeleteArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.DeleteArtifact(self.app, None)
def test_artifact_delete(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba', '--id']
verify = [('type_name', 'images'),
('name', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba'),
('id', True)]
parsed_args = self.check_parser(self.cmd, arglist, verify)
self.assertIsNone(self.cmd.take_action(parsed_args))
class TestActivateArtifacts(TestArtifacts):
def setUp(self):
super(TestActivateArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.ActivateArtifact(self.app, None)
def test_artifact_activate(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba',
'--id']
verify = [('type_name', 'images'),
('name', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba'),
('id', True)]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
class TestDeactivateArtifacts(TestArtifacts):
def setUp(self):
super(TestDeactivateArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.DeactivateArtifact(self.app, None)
def test_artifact_deactivate(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba', '--id']
verify = [('type_name', 'images'),
('name', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba'),
('id', True)]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
class TestReactivateArtifacts(TestArtifacts):
def setUp(self):
super(TestReactivateArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.ReactivateArtifact(self.app, None)
def test_artifact_rectivate(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba', '--id']
verify = [('type_name', 'images'),
('name', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba'),
('id', True)]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
class TestAddTag(TestArtifacts):
def setUp(self):
super(TestAddTag, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.AddTag(self.app, None)
def test_artifact_add_tag(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba', '--id',
'123']
verify = [('type_name', 'images'),
('name', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba'),
('id', True),
('tag', '123')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
class TestRemoveTag(TestArtifacts):
def setUp(self):
super(TestRemoveTag, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.RemoveTag(self.app, None)
def test_artifact_add_tag(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba', '--id',
'123']
verify = [('type_name', 'images'),
('name', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba'),
('id', True),
('tag', '123')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
class TestPublishArtifacts(TestArtifacts):
def setUp(self):
super(TestPublishArtifacts, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.PublishArtifact(self.app, None)
def test_publish_delete(self):
arglist = ['images',
'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba', '--id']
verify = [('type_name', 'images'),
('name', 'fc15c365-d4f9-4b8b-a090-d9e230f1f6ba'),
('id', True)]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
name_fields = set([column[0] for column in data])
# Check that columns are correct
self.assertEqual(self.COLUMNS, name_fields)
class TypeSchema(TestArtifacts):
def setUp(self):
super(TypeSchema, self).setUp()
self.artifact_mock.call.return_value = \
api_art.Controller(self.http, type_name='images')
# Command to test
self.cmd = osc_art.TypeSchema(self.app, None)
def test_get_schema(self):
arglist = ['images']
verify = [('type_name', 'images')]
parsed_args = self.check_parser(self.cmd, arglist, verify)
columns, data = self.cmd.take_action(parsed_args)
exp_columns = ['Name', 'Glare_type', 'Mutable', 'Required',
'Sortable', 'Filters', 'Available_values']
exp_data = [
(u'image', u'Blob', False, False, False, [], ''),
(u'updated_at', u'DateTime', False, True, True,
[u'eq', u'neq', u'in', u'gt', u'gte', u'lt', u'lte'], ''),
(u'owner', u'String', False, False, True,
[u'eq', u'neq', u'in'], ''),
(u'provided_by', u'StringDict', False, False, False,
[u'eq', u'neq', u'in'], ''),
(u'id', u'String', False, True, True, [u'eq', u'neq', u'in'], ''),
(u'environment', u'Blob', False, True, False, [], ''),
(u'version', u'String', False, False, True,
[u'eq', u'neq', u'in', u'gt', u'gte', u'lt', u'lte'], ''),
(u'blob', u'Blob', True, False, False, [], ''),
(u'template', u'Blob', False, True, False, [], ''),
(u'metadata', u'StringDict', False, False, False,
[u'eq', u'neq'], ''),
(u'status', u'String', False, True, True, [u'eq', u'neq', u'in'],
[u'drafted', u'active', u'deactivated', u'deleted']),
(u'description', u'String', True, False, False,
[u'eq', u'neq', u'in'], ''),
(u'tags', u'StringList', True, False, False,
[u'eq', u'neq', u'in'], ''),
(u'activated_at', u'DateTime', False, False, True,
[u'eq', u'neq', u'in', u'gt', u'gte', u'lt', u'lte'], ''),
(u'supported_by', u'StringDict', False, False, False,
[u'eq', u'neq', u'in'], ''),
(u'visibility', u'String', False, True, True, [u'eq'], ''),
(u'icon', u'Blob', False, False, False, [], ''),
(u'name', u'String', False, False, True,
[u'eq', u'neq', u'in'], ''),
(u'license', u'String', False, False, False,
[u'eq', u'neq', u'in'], ''),
(u'package', u'Blob', False, False, False, [], ''),
(u'created_at', u'DateTime', False, True, True,
[u'eq', u'neq', u'in', u'gt', u'gte', u'lt', u'lte'], ''),
(u'license_url', u'String', False, False, False,
[u'eq', u'neq', u'in'], ''),
(u'release', u'StringList', False, False, False,
[u'eq', u'neq', u'in'], '')]
data.sort(key=lambda x: x[0])
exp_data.sort(key=lambda x: x[0])
# Check that columns are correct
self.assertEqual(exp_columns, columns)
self.assertEqual(exp_data, data)
|
import os
import sys
sys.path.insert(0, 'tools/plotters')
sys.path.insert(0, 'tools/families')
import plot_line
import plot_histogram
import saved_metrics
import missing_data_estimation
def get_metrics(datadir):
try:
metrics = saved_metrics.get_metrics(datadir, "species_coverage")
if (metrics == None):
raise
return metrics
except:
print("Failed to find species coverage information, computing it...")
missing_data_estimation.estimate(datadir)
return saved_metrics.get_metrics(datadir, "species_coverage")
return None
def plot(datadir):
output = os.path.basename(os.path.normpath(datadir)) + "_species_coverage.svg"
metrics = get_metrics(datadir)
x = []
y = []
i = 0
for species in metrics:
#x.append(species)
x.append(i)
i += 1
y.append(float(metrics[species]))
title = None
xcaption = None
ycaption = None
line_captions = None
plot_line.plot_line(x, [y], title, xcaption, ycaption, output, line_captions, sort_y = True)# marker = None)
#plot_histogram.plot_histogram(x, y, sort_y = True, rotation = 90, output = output)
print("Output file: " + output)
if (__name__ == "__main__"):
if (len(sys.argv) != 2):
print("Syntax python " + os.path.basename(__file__) + " datadir")
sys.exit(1)
datadir = sys.argv[1]
plot(datadir)
|
# marksheet programm
maths=int(input("Enter marks obtain in maths :"))
physics=int(input("Enter marks obtain in physics :"))
chemistry=int(input("Enter marks obtain in chemistry :"))
communication=int(input("Enter marks obtain in communication :"))
computer=int(input("Enter marks obtain in computer :"))
total=maths+physics+chemistry+communication+computer
percent=total/5
if percent>=60:
print("Congratulation you have got first grade.")
elif percent > 50 and percent<60:
print("You have got second grade, you need to work harder.")
elif percent>=35 and percent<=50:
print("You have got third grade, really poor work.")
else:
print("You are fail, you are not allowed to sit in new class.")
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tensorflow.contrib.slim as slim
def rconv_layer(name, glimpse, refresh):
"""
rconv, mix new features with older memories
Args:
name: a string, memory variable is named as name+'_memory'
glimpse: a tensor, new feature
refresh: a tf.bool variable, if True the memory restarts as zero
decay_ratio: a scalar indicating the decay rate of memory
Returns:
belief: a tensor, mixed input tensor
"""
initializer = tf.constant_initializer([0.])
shape = glimpse.get_shape().as_list()
memory = tf.get_variable(name=name+'_memory',shape=shape, initializer=initializer, trainable=False)
# decay memory or refresh memory when new stream starts
memory = tf.assign(memory,
tf.cond(refresh, lambda: tf.zeros_like(glimpse,tf.float32),
lambda: memory*tf.cast(decay_ratio, tf.float32)),
validate_shape=False)
# update memory
memory_update = tf.assign(memory, glimpse+memory, validate_shape=False)
# update belief
belief =
belief = tf.reshape(belief, shape)
return belief
|
import threading
from queue import Queue
from general import *
from domain import *
from spider import Spider
PROJECT_NAME = 'The_manit'
HOME_PAGE = 'http://www.manit.ac.in/'
DOMAIN_NAME = get_sub_domain_name(HOME_PAGE)
QUEUE_FILE = PROJECT_NAME + '/queue.txt'
CRAWLED_FILE = PROJECT_NAME + '/crawled.txt'
NUMBER_OF_THREADS = 4
queue = Queue()
Spider(PROJECT_NAME, HOME_PAGE, DOMAIN_NAME)
# Create Workers for multi threading
def create_workers():
for _ in range(NUMBER_OF_THREADS):
t = threading.Thread(target=work)
t.daemon = True
t.start()
# Do the next job in the queue
def work():
while True:
url = queue.get()
Spider.crawl_page(threading.current_thread().name, url)
queue.task_done()
# Create Jobs for multi threading
# Each queued link is a new job
def create_jobs():
for link in file_to_set(QUEUE_FILE):
queue.put(link)
# Blocks the thread until all the values in the queue are processed
queue.join()
crawl()
# Check if there are items in the queue, if so crawl them
def crawl():
queued_links = file_to_set(QUEUE_FILE)
if len(queued_links) > 0:
print(str(len(queued_links)) + ' links in the queue')
create_jobs()
create_workers()
crawl() |
HT = 'Weasel Conductor on Steel Tubular Poles'
LT1P = '1X35+1X25'
LT3P = '3X50+1X35'
NONE = 'None'
DTR25 = '25 KVA (3 Ph)'
DTR63 = '63 KVA (3 Ph)'
DTR100 = '100 KVA (3 Ph)'
def macroInfraToCol(row):
ht = row['htwv'] + row['htsv'] + row['htrv']
ltt1 = row['ltt1']
ltt2 = row['ltt2']
lt1 = 0.0
lt3 = 0.0
if(ltt1 == LT1P):
lt1 += row['lt1v']
if(ltt1 == LT3P):
lt3 += row['lt1v']
if(ltt2 == LT1P):
lt1 += row['lt2v']
if(ltt2 == LT3P):
lt3 += row['lt2v']
dtr1 = row['kvat1']
dtr2 = row['kvat2']
dtr25 = 0
dtr63 = 0
dtr100 = 0
if(dtr1 == DTR25):
dtr25 += row['kva1v']
if(dtr1 == DTR63):
dtr63 += row['kva1v']
if(dtr1 == DTR100):
dtr100 += row['kva1v']
if(dtr2 == DTR25):
dtr25 += row['kva2v']
if(dtr2 == DTR63):
dtr63 += row['kva2v']
if(dtr2 == DTR100):
dtr100 += row['kva2v']
return (ht, lt3, lt1, dtr100, dtr63, dtr25) |
#!/usr/bin/env python3
#
# checks a main/standby server's availability and takes control of the DigitalOcean floatingIP if necessary
#
# uses environment variables for configuration:
# - API_KEY: digitalocean.com API key
# - FAILOVER_MODE: 'main' or 'standby' (or disabled if not set)
# - FLOATING_IP: the IP that's been shared by the main and standby server
# - FAILOVER_CHECK: local URL(s) to check to determine the health of this server (split by '|')
# - FAILOVER_MAIN: set the base URL(s) of the main server (if we're on the standby server) (URLs are split by '|')
# - FAILOVER_MAIN_HOST (optional): Set the 'Host' header for requests to FAILOVER_MAIN URLs (allows to ignore DNS while still checking the validity of SSL certs)
import json
import logging
import os
import sys
import threading
import time
import urllib.request
class Watchdog(threading.Thread):
""" Runs the given callback function unless you call `kick()` at least once every `timeout` seconds """
def __init__(self, timeout, callback):
""" Watchdog constructor
- timeout: number of seconds without receiving a `kick()` before invoking `callback`
- callback: function to call after `timeout` seconds without a `kick()`"""
threading.Thread.__init__(self)
self._kick = threading.Event()
self._stop = threading.Event()
self.timeout = timeout
self.callback = callback
def kick(self):
""" Kick the watchdog, resettings its timeout """
self._kick.set()
def run(self):
""" internal Thread run() method. Use start() instead! """
while not self._stop.is_set():
if self._kick.wait(self.timeout):
# no timeout -> reset flag
self._kick.clear()
else:
self.callback()
def start(self):
""" Starts the watchdog """
self._kick.clear()
self._stop.clear()
threading.Thread.start(self)
def stop(self):
""" Stops the watchdog (and its background Thread) """
self._stop.set()
self._kick.set()
def _item(dict_, *keys, default=None):
""" Get dictionary value recursively (or 'default' if not found) """
#print('_item({0}, {1}, {2})'.format(dict_, default, keys))
if dict_ == None:
return default
if len(keys) == 0:
return dict_
if type(dict_) == list:
if len(dict_) <= keys[0]:
return default
return _item(dict_[keys[0]], *(keys[1:]), default=default)
elif keys[0] not in dict_:
return default
else:
return _item(dict_[keys[0]], *(keys[1:]), default=default)
def _get(url, hostname=None):
req = urllib.request.Request(url)
if hostname != None:
req.add_header('Host', hostname)
resp = urllib.request.urlopen(req, timeout=20)
if resp.status != 200:
raise Exception("Failed to GET '{0}' (code {1})".format(url, resp.status))
return resp.read()
def checkService(*urls, hostname=None):
try:
for url in urls:
resp = _get(url, hostname=hostname)
# TODO find a clean way to check if the service is actually healthy
return True
except urllib.request.HTTPError as e:
logging.error("checkService failed (code {0}): {1}".format(e.status, url))
return False
except Exception as e:
logging.error("checkService failed (url: '{0}'): {1}".format(url, str(e)))
return False
def getDropletID():
""" Requests this droplet's ID (or fail with an Exception) """
return getMetadata()['droplet_id']
def getMetadata(cache=True):
""" Queries this droplet's metadata (and caches the result) """
global _metadata
if _metadata != None and cache:
return _metadata
_metadata = json.loads(_get('http://169.254.169.254/metadata/v1.json').decode('utf8'))
return _metadata
def hasFloatingIP(ip, apiKey):
""" Returns True if this droplet owns the floatingIP in question """
# even though there's a 'floating_ip' section in getMetadata() I've observed a case where
# that returned 'active' for both droplets in question. That's why we're using the (slower, but safe)
# api.digitalocean.com here
dropletId = getDropletID()
req = urllib.request.Request('https://api.digitalocean.com/v2/floating_ips/{0}'.format(ip))
req.add_header('Authorization', 'Bearer {0}'.format(apiKey))
resp = urllib.request.urlopen(req, timeout=20)
data = json.loads(resp.read().decode('utf8'))
return dropletId == _item(data, 'floating_ip', 'droplet', 'id')
def takeFloatingIP(floatingIP, apiKey):
""" Takes control of the given floating IP """
dropletId = getDropletID()
req = urllib.request.Request("https://api.digitalocean.com/v2/floating_ips/{0}/actions".format(floatingIP))
req.add_header('Content-type', 'application/json')
req.add_header('Authorization', 'Bearer {0}'.format(apiKey))
resp = urllib.request.urlopen(req, json.dumps({'type': 'assign', 'droplet_id': dropletId}).encode('utf8'), timeout=20)
if resp.status not in [200, 201]:
logging.error('response body: {0}'.format(resp.read()))
raise Exception("Failed to acquire floating IP (code {0})".format(resp.status))
else:
logging.info("Acquired the floating IP {0}".format(floatingIP))
def main():
global watchdog
mode = _item(os.environ, 'FAILOVER_MODE')
apiKey = _item(os.environ, 'API_KEY')
floatingIP = _item(os.environ, 'FLOATING_IP')
checkURLs = _item(os.environ, 'FAILOVER_CHECK')
mainURLs = _item(os.environ, 'FAILOVER_MAIN')
mainHost = _item(os.environ, 'FAILOVER_MAIN_HOST')
if mode == None:
logging.info("Not set up for automatic failover, exiting")
return 0
if apiKey == None:
raise Exception("Missing 'API_KEY'!")
if floatingIP == None:
raise Exception("Missing 'FLOATING_IP'!")
if mode == 'main' and mainURLs != None:
checkURLs = mainURLs
if checkURLs == None:
raise Exception("Missing 'FAILOVER_CHECK' url(s)!")
checkURLs = checkURLs.split('|')
if type(mainURLs) == str:
mainURLs = mainURLs.split('|')
watchdog.start()
try:
while True:
watchdog.kick()
if checkService(*checkURLs):
if mode == 'main':
if not hasFloatingIP(floatingIP, apiKey):
logging.info("Lost control of the floating IP, getting it back")
takeFloatingIP(floatingIP, apiKey)
else:
logging.debug("We're in control of the floating IP {0}".format(floatingIP))
elif mode == 'standby':
if not hasFloatingIP(floatingIP, apiKey):
if mainURLs == None:
raise Exception("Missing 'FAILOVER_MAIN' url(s)!")
if not checkService(*mainURLs, hostname=mainHost):
logging.warn("MAIN SERVER IS DOWN, TAKING OVER!")
takeFloatingIP(floatingIP, apiKey)
else:
logging.debug("Main server up and running")
else:
logging.debug("We're in control of the floating IP {0}".format(floatingIP))
else:
raise Exception("Unsupported failover mode! Expected 'main' or 'standby'")
else:
logging.error('Service not running!')
time.sleep(60)
finally:
watchdog.stop()
def onWatchdogTimeout():
logging.fatal("Watchdog timeout!")
sys.exit(1)
_metadata = None
watchdog = Watchdog(180, onWatchdogTimeout)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
main()
|
import unittest
from katas.beta.remove_duplicates_from_list import distinct
class DistinctTestCase(unittest.TestCase):
def test_equal_1(self):
self.assertEqual(distinct([1]), [1])
def test_equal_2(self):
self.assertEqual(distinct([1, 2]), [1, 2])
def test_equal_3(self):
self.assertEqual(distinct([1, 1, 2]), [1, 2])
def test_equal_4(self):
self.assertEqual(distinct([1, 1, 1, 2, 3, 4, 5]), [1, 2, 3, 4, 5])
def test_equal_5(self):
self.assertEqual(distinct([1, 2, 2, 3, 3, 4, 4, 5, 6, 7, 7, 7]),
[1, 2, 3, 4, 5, 6, 7])
|
#!/usr/bin/python3
import sys
import logging
import json
import os
from app.views import app as application
logging.basicConfig(stream=sys.stderr)
application.secret_key = 'whatislove'
if __name__ == '__main__':
application.debug = True
port = int(os.environ.get("PORT", 5000))
application.run(host='0.0.0.0', port=port)
|
from colorsys import rgb_to_hls,hls_to_rgb
from math import sin,cos,atan2,pi
def average_colors(rgb1, rgb2):
h1, l1, s1 = rgb_to_hls(rgb1[0]/255., rgb1[1]/255., rgb1[2]/255.)
h2, l2, s2 = rgb_to_hls(rgb2[0]/255., rgb2[1]/255., rgb2[2]/255.)
s = 0.5 * (s1 + s2)
l = 0.5 * (l1 + l2)
x = cos(2*pi*h1) + cos(2*pi*h2)
y = sin(2*pi*h1) + sin(2*pi*h2)
if x != 0.0 or y != 0.0:
h = atan2(y, x) / (2*pi)
else:
h = 0.0
s = 0.0
r, g, b = hls_to_rgb(h, l, s)
return (r*255., g*255., b*255.)
|
from setuptools import setup, find_packages
setup(
name='guessadapt',
version='0.2',
packages=find_packages('src'),
package_dir={'': 'src'},
test_suite='tests',
entry_points={
'console_scripts': ['guessadapt = guessadapt.client:main']
},
python_requires='>=3.6',
author='Michael Knudsen',
author_email='micknudsen@gmail.com',
license='MIT'
)
|
"""
General networks for pytorch.
Algorithm-specific networks should go else-where.
"""
import torch
from torch import nn as nn
from torch.nn import functional as F
from rlkit.policies.base import Policy
from rlkit.torch import pytorch_util as ptu
from rlkit.torch.core import PyTorchModule
from rlkit.torch.data_management.normalizer import TorchFixedNormalizer, TorchNormalizer, CompositeNormalizer
from rlkit.torch.modules import LayerNorm
from rlkit.torch.relational.relational_util import fetch_preprocessing, invert_fetch_preprocessing
def identity(x):
return x
class Mlp(PyTorchModule):
def __init__(
self,
hidden_sizes,
output_size,
input_size,
init_w=3e-3,
hidden_activation=F.relu,
output_activation=identity,
hidden_init=ptu.fanin_init,
b_init_value=0.1,
layer_norm=False,
layer_norm_kwargs=None,
):
self.save_init_params(locals())
super().__init__()
if layer_norm_kwargs is None:
layer_norm_kwargs = dict()
self.input_size = input_size
self.output_size = output_size
self.hidden_activation = hidden_activation
if isinstance(output_activation, str):
output_activation = getattr(torch, output_activation)
self.output_activation = output_activation
self.layer_norm = layer_norm
self.fcs = nn.ModuleList([])
self.layer_norms = nn.ModuleList([])
in_size = input_size
for i, next_size in enumerate(hidden_sizes):
fc = nn.Linear(in_size, next_size)
in_size = next_size
hidden_init(fc.weight)
fc.bias.data.fill_(b_init_value)
self.__setattr__("fc{}".format(i), fc)
self.fcs.append(fc)
if self.layer_norm:
ln = LayerNorm(next_size)
self.__setattr__("layer_norm{}".format(i), ln)
self.layer_norms.append(ln)
self.last_fc = nn.Linear(in_size, output_size)
self.last_fc.weight.data.uniform_(-init_w, init_w)
self.last_fc.bias.data.uniform_(-init_w, init_w)
def forward(self, input, return_preactivations=False):
h = input
for i, fc in enumerate(self.fcs):
h = fc(h)
h = self.hidden_activation(h)
if self.layer_norm and i < len(self.fcs):
h = self.layer_norms[i](h)
preactivation = self.last_fc(h)
output = self.output_activation(preactivation)
if return_preactivations:
return output, preactivation
else:
return output
class FlattenMlp(Mlp):
"""
Flatten inputs along dimension 1 and then pass through MLP.
"""
def forward(self, *inputs, **kwargs):
flat_inputs = torch.cat(inputs, dim=1)
return super().forward(flat_inputs, **kwargs)
class CompositeNormalizedFlattenMlp(FlattenMlp):
def __init__(
self,
*args,
composite_normalizer: CompositeNormalizer = None,
**kwargs
):
self.save_init_params(locals())
super().__init__(*args, **kwargs)
assert composite_normalizer is not None
self.composite_normalizer = composite_normalizer
def forward(
self,
observations,
actions,
return_preactivations=False):
obs, _ = self.composite_normalizer.normalize_all(observations, None)
flat_input = torch.cat((obs, actions), dim=1)
return super().forward(flat_input, return_preactivations=return_preactivations)
class QNormalizedFlattenMlp(FlattenMlp):
def __init__(
self,
*args,
composite_normalizer: CompositeNormalizer = None,
clip_high=float('inf'),
clip_low=float('-inf'),
lop_state_dim=None,
preprocessing_kwargs=None,
num_blocks=None,
**kwargs
):
self.save_init_params(locals())
super().__init__(*args, **kwargs)
assert composite_normalizer is not None
self.composite_normalizer = composite_normalizer
self.clip_low = clip_low
self.clip_high = clip_high
self.lop_state_dim = lop_state_dim
self.preprocessing_kwargs = preprocessing_kwargs
self.num_blocks = num_blocks
def forward(
self,
observations,
actions,
return_preactivations=False, **kwargs):
# if self.lop_state_dim:
# observations = observations.narrow(1, 0,
# observations.size(1) - self.lop_state_dim) # Chop off the final 3 dimension of gripper position
# obs, _ = self.composite_normalizer.normalize_all(observations, None)
# if len(obs.size()) > len(actions.size()):
# # Unsqueeze along block dimension
# actions = actions.unsqueeze(1).expand(-1, obs.size(1), -1)
#
# flat_input = torch.cat((obs, actions), dim=-1)
shared_state, object_goal_state = fetch_preprocessing(observations, actions=actions, normalizer=self.composite_normalizer, **self.preprocessing_kwargs)
flat_input = invert_fetch_preprocessing(shared_state, object_goal_state, num_blocks=self.num_blocks, **self.preprocessing_kwargs)
assert observations.size(-1) - self.lop_state_dim + actions.size(-1) == flat_input.size(-1)
assert len(observations.size()) == len(flat_input.size()) == 2
if return_preactivations:
output, preactivation = super().forward(flat_input, return_preactivations=return_preactivations)
output = torch.clamp(output, self.clip_low, self.clip_high)
return output, preactivation
else:
output = super().forward(flat_input)
output = torch.clamp(output, self.clip_low, self.clip_high)
return output
class VNormalizedFlattenMlp(FlattenMlp):
def __init__(
self,
*args,
composite_normalizer: CompositeNormalizer = None,
clip_high=float('inf'),
clip_low=float('-inf'),
lop_state_dim=None,
preprocessing_kwargs=None,
num_blocks=None,
**kwargs
):
self.save_init_params(locals())
super().__init__(*args, **kwargs)
assert composite_normalizer is not None
self.composite_normalizer = composite_normalizer
self.clip_low = clip_low
self.clip_high = clip_high
self.lop_state_dim = lop_state_dim
self.preprocessing_kwargs = preprocessing_kwargs
self.num_blocks = num_blocks
def forward(
self,
observations,
return_preactivations=False, **kwargs):
# if self.lop_state_dim:
# observations = observations.narrow(1, 0,
# observations.size(1) - self.lop_state_dim) # Chop off the final 3 dimension of gripper position
# obs, _ = self.composite_normalizer.normalize_all(observations, None)
# flat_input = obs
shared_state, object_goal_state = fetch_preprocessing(observations,
normalizer=self.composite_normalizer,
**self.preprocessing_kwargs)
flat_input = invert_fetch_preprocessing(shared_state, object_goal_state, num_blocks=self.num_blocks, **self.preprocessing_kwargs)
assert observations.size(-1) - self.lop_state_dim == flat_input.size(-1)
assert len(observations.size()) == len(flat_input.size()) == 2
if return_preactivations:
output, preactivation = super().forward(flat_input, return_preactivations=return_preactivations)
output = torch.clamp(output, self.clip_low, self.clip_high)
return output, preactivation
else:
output = super().forward(flat_input)
output = torch.clamp(output, self.clip_low, self.clip_high)
return output
class MlpPolicy(Mlp, Policy):
"""
A simpler interface for creating policies.
"""
def __init__(
self,
*args,
obs_normalizer: TorchFixedNormalizer = None,
**kwargs
):
self.save_init_params(locals())
super().__init__(*args, **kwargs)
self.obs_normalizer = obs_normalizer
def forward(self, obs, **kwargs):
if self.obs_normalizer:
obs = self.obs_normalizer.normalize(obs)
return super().forward(obs, **kwargs)
def get_action(self, obs_np):
actions = self.get_actions(obs_np[None])
return actions[0, :], {}
def get_actions(self, obs):
return self.eval_np(obs)
class CompositeNormalizedMlpPolicy(MlpPolicy):
def __init__(
self,
*args,
composite_normalizer: CompositeNormalizer = None,
**kwargs
):
assert composite_normalizer is not None
self.save_init_params(locals())
super().__init__(*args, **kwargs)
self.composite_normalizer = composite_normalizer
def forward(self, obs, **kwargs):
if self.composite_normalizer:
obs, _ = self.composite_normalizer.normalize_all(obs, None)
return super().forward(obs, **kwargs)
class TanhMlpPolicy(MlpPolicy):
"""
A helper class since most policies have a tanh output activation.
"""
def __init__(self, *args, **kwargs):
self.save_init_params(locals())
super().__init__(*args, output_activation=torch.tanh, **kwargs)
class ActivationLoggingWrapper:
"""
Logs activations to a list
"""
|
from django.views.generic import TemplateView
from apps.utils.led import blink
from apps.utils.geo_ip import get_location
class ColorView(TemplateView):
template_name = 'art_app/index.jade'
def get(self, request, *args, **kwargs):
blink(17)
return super(ColorView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ColorView, self).get_context_data(**kwargs)
context['color'] = kwargs.get('color')
context['location'] = get_location()
return context
|
import logging
import threading
import http
from flask import Flask, request, jsonify
import urllib
from waitress import serve
from bot import BotExide
import core
from core.vk_event_processor import extract_events
from services.vk_service import VkService
from models.Command import Command
def run_api_server(bot: BotExide, vk_service: VkService):
app = Flask(__name__)
app.config["DEBUG"] = True
@app.route('/vk', methods=['POST'])
def vk():
raw_events = request.get_json()
events = extract_events(raw_events)
async def execute_in_loop():
for event in events:
discord_id = event.user.discord_id
vk_id = event.user.vk_id
user_name = event.user.name
if (not discord_id):
logging.info(f'Vk user id {vk_id} ({user_name}) has not bound discord profile')
continue
[is_audio, audios] = event.try_get_audio()
[is_media, medias] = event.try_get_media()
if (is_audio):
command = Command.advanced('vk-play', audios)
await bot.execute_command(discord_id, command)
if (is_media):
for media in medias:
await bot.send_message(media.url)
await bot.send_message(f"Memas from **{user_name}**")
bot.loop.create_task(execute_in_loop())
return ('', http.HTTPStatus.NO_CONTENT)
@app.route('/message/raw', methods=['POST'])
def message_raw():
message = request.get_json()
if (type(message) is not str):
return ("String body was expected", http.HTTPStatus.BAD_REQUEST)
async def execute_in_loop():
await bot.send_message(message)
bot.loop.create_task(execute_in_loop())
return ('', http.HTTPStatus.NO_CONTENT)
@app.route('/user', methods=['GET'])
def users():
members = bot.guilds[0].members
json = list(map(lambda m: ({ 'name': m.name, 'id': m.id }), members))
return jsonify(json)
def run():
parsed = urllib.parse.urlsplit(core.env.discord_listener)
serve(app, host=parsed.hostname, port=parsed.port)
thread = threading.Thread(target=run)
thread.start() |
import logging
import multiprocessing
import multiprocessing.queues
import sys
import threading
import time
from six.moves import queue
from six import next
import numpy
from six.moves import queue
from smqtk.utils import SmqtkObject
__author__ = 'paul.tunison@kitware.com'
__all__ = [
'elements_to_matrix',
]
def elements_to_matrix(descr_elements, mat=None, procs=None, buffer_factor=2,
report_interval=None, use_multiprocessing=False,
thread_q_put_interval=0.001):
"""
Add to or create a numpy matrix, adding to it the vector data contained in
a sequence of DescriptorElement instances using asynchronous processing.
If ``mat`` is provided, its shape must equal:
( len(descr_elements) , descr_elements[0].size )
:param descr_elements: Sequence of DescriptorElement objects to transform
into a matrix. Each element should contain descriptor vectors of the
same size.
:type descr_elements:
collections.Sequence[smqtk.representation.DescriptorElement] |
collections.Iterable[smqtk.representation.DescriptorElement]
:param mat: Optionally a pre-constructed numpy matrix of the shape
``(nDescriptors, nFeatures)`` to load descriptor vectors into. We will
only iterate ``nDescriptors`` into the given ``descr_elements``
iterable. If there are more rows in the given matrix than there are
DescriptorElements in ``descr_elements``, then not all rows in the
given matrix will be set. Elements yielded by ``descr_elements`` must
be of the same dimensionality as this given matrix (``nFeatures``)
otherwise an exception will be raised (``ValueError``, by numpy).
If this is not supplied, we create a new matrix to insert vectors into
based on the number of input descriptor elements. This mode required
that the input elements are in a container that defines __len__
:type mat: None | numpy.ndarray
:param procs: Optional specification of the number of threads/cores to use.
If None, we will attempt to use all available threads/cores.
:type procs: None | int | long
:param buffer_factor: Multiplier against the number of processes used to
limit the growth size of the result queue coming from worker processes.
:type buffer_factor: float
:param report_interval: Optional interval in seconds for debug logging to
occur reporting about conversion speed. This should be greater than 0
if this debug logging is desired.
:type report_interval: None | float
:param use_multiprocessing: Whether or not to use discrete processes as the
parallelization agent vs python threads.
:type use_multiprocessing: bool
:param thread_q_put_interval: Interval at worker threads attempt to insert
values into the output queue after fetching vector from a
DescriptorElement. This is for dead-lock protection due to size-limited
output queue. This is only used if ``use_multiprocessing`` is ``False``
and this must be >0.
:type thread_q_put_interval: float
:return: Created or input matrix.
:rtype: numpy.ndarray
"""
log = logging.getLogger(__name__)
# Create/check matrix
if mat is None:
sample = next(iter(descr_elements))
sample_v = sample.vector()
shp = (len(descr_elements),
sample_v.size)
log.debug("Creating new matrix with shape: %s", shp)
mat = numpy.ndarray(shp, sample_v.dtype)
if procs is None:
procs = multiprocessing.cpu_count()
# Choose parallel types
worker_kwds = {}
if use_multiprocessing:
queue_t = multiprocessing.Queue
worker_t = _ElemVectorExtractorProcess
else:
queue_t = queue.Queue
worker_t = _ElemVectorExtractorThread
assert thread_q_put_interval >= 0, \
"Thread queue.put interval must be >= 0. (given: %f)" \
% thread_q_put_interval
worker_kwds['q_put_interval'] = thread_q_put_interval
in_q = queue_t()
out_q = queue_t(int(procs * buffer_factor))
# Workers for async extraction
log.debug("constructing worker processes")
workers = [worker_t(i, in_q, out_q, **worker_kwds) for i in range(procs)]
in_queue_t = _FeedQueueThread(descr_elements, in_q, mat, len(workers))
try:
# Start worker processes
log.debug("starting worker processes")
for w in workers:
w.daemon = True
w.start()
log.debug("Sending work packets")
in_queue_t.daemon = True
in_queue_t.start()
# Collect work from async
log.debug("Aggregating async results")
terminals_collected = 0
f = 0
lt = t = time.time()
while terminals_collected < len(workers):
packet = out_q.get()
if packet is None:
terminals_collected += 1
elif isinstance(packet, Exception):
raise packet
else:
r, v = packet
mat[r] = v
f += 1
if report_interval and time.time() - lt >= report_interval:
log.debug("Rows per second: %f, Total: %d",
f / (time.time() - t), f)
lt = time.time()
# All work should be exhausted at this point
if use_multiprocessing and sys.platform == 'darwin':
# multiprocessing.Queue.qsize doesn't work on OSX
# Try to get something from each queue, expecting an empty exception
try:
in_q.get(block=False)
except multiprocessing.queues.Empty:
pass
else:
raise AssertionError("In queue not empty")
try:
out_q.get(block=False)
except multiprocessing.queues.Empty:
pass
else:
raise AssertionError("Out queue not empty")
else:
assert in_q.qsize() == 0, "In queue not empty"
assert out_q.qsize() == 0, "Out queue not empty"
return mat
finally:
log.debug("Stopping/Joining queue feeder thread")
in_queue_t.stop()
in_queue_t.join()
if use_multiprocessing:
# Forcibly terminate worker processes if still alive
log.debug("Joining/Terminating process workers")
for w in workers:
if w.is_alive():
w.terminate()
w.join()
log.debug("Cleaning multiprocess queues")
for q in (in_q, out_q):
q.close()
q.join_thread()
else:
log.debug("Stopping/Joining threaded workers")
for w in workers:
w.stop()
# w.join()
# Threads should exit fine from here
log.debug("Done")
class _FeedQueueThread (SmqtkObject, threading.Thread):
def __init__(self, descr_elements, q, out_mat, num_terminal_packets):
super(_FeedQueueThread, self).__init__()
self.num_terminal_packets = num_terminal_packets
self.out_mat = out_mat
self.q = q
self.descr_elements = descr_elements
self._stop_event = threading.Event()
def stop(self):
self._stop_event.set()
def stopped(self):
return self._stop_event.isSet()
def run(self):
try:
# Special case for in-memory storage of descriptors
from smqtk.representation.descriptor_element.local_elements \
import DescriptorMemoryElement
for r, d in enumerate(self.descr_elements):
# If we've run out of matrix to fill,
if r >= self.out_mat.shape[0]:
break
if isinstance(d, DescriptorMemoryElement):
self.out_mat[r] = d.vector()
else:
self.q.put((r, d))
# If we're told to stop, immediately quit out of processing
if self.stopped():
break
except KeyboardInterrupt:
pass
except Exception as ex:
self._log.error("Feeder thread encountered an exception: %s",
str(ex))
self.q.put(ex)
finally:
self._log.debug("Sending in-queue terminal packets")
for _ in range(self.num_terminal_packets):
self.q.put(None)
self._log.debug("Closing in-queue")
class _ElemVectorExtractorProcess (SmqtkObject, multiprocessing.Process):
"""
Helper process for extracting DescriptorElement vectors on a separate
process. This terminates with a None packet fed to in_q. Otherwise, in_q
values are expected to be (row, element) pairs. Tuples of the form
(row, vector) are published to the out_q.
Terminal value: None
"""
def __init__(self, i, in_q, out_q):
super(_ElemVectorExtractorProcess, self)\
.__init__(name='[w%d]' % i)
self._log.debug("Making process worker (%d, %s, %s)", i, in_q, out_q)
self.i = i
self.in_q = in_q
self.out_q = out_q
def run(self):
try:
packet = self.in_q.get()
while packet is not None:
if isinstance(packet, Exception):
self.out_q.put(packet)
else:
row, elem = packet
v = elem.vector()
self.out_q.put((row, v))
packet = self.in_q.get()
self.out_q.put(None)
except KeyboardInterrupt:
pass
except Exception as ex:
self._log.error("%s%s encountered an exception: %s",
self.__class__.__name__, self.name,
str(ex))
self.out_q.put(ex)
class _ElemVectorExtractorThread (SmqtkObject, threading.Thread):
"""
Helper process for extracting DescriptorElement vectors on a separate
process. This terminates with a None packet fed to in_q. Otherwise, in_q
values are expected to be (row, element) pairs. Tuples of the form
(row, vector) are published to the out_q.
Terminal value: None
"""
def __init__(self, i, in_q, out_q, q_put_interval=0.001):
SmqtkObject.__init__(self)
threading.Thread.__init__(self, name='[w%d]' % i)
self._log.debug("Making thread worker (%d, %s, %s)", i, in_q, out_q)
self.i = i
self.in_q = in_q
self.out_q = out_q
self.q_put_interval = q_put_interval
self._stop_event = threading.Event()
def stop(self):
self._stop_event.set()
def stopped(self):
return self._stop_event.isSet()
def run(self):
try:
packet = self.in_q.get()
while packet is not None and not self.stopped():
if isinstance(packet, Exception):
self.out_q.put(packet)
else:
row, elem = packet
v = elem.vector()
self.q_put((row, v))
packet = self.in_q.get()
self.q_put(None)
except KeyboardInterrupt:
pass
except Exception as ex:
self._log.error("%s%s encountered an exception: %s",
self.__class__.__name__, self.name,
str(ex))
self.out_q.put(ex)
def q_put(self, val):
"""
Try to put the given value into the output queue until it is inserted
(if it was previously full), or the stop signal was given.
"""
put = False
while not put and not self.stopped():
try:
self.out_q.put(val, timeout=self.q_put_interval)
put = True
except queue.Full:
# self._log.debug("Skipping q.put Full error")
pass
|
n1,n2=map(int,input().split())
l1=list(map(int,input().split()))
l2=list(map(int,input().split()))
flag=0
if (all(x in l1 for x in l2)):
flag=1
if(flag==0):
print("NO")
else:
print("YES")
|
from main.page.purchase.pe_tx_payment_confirmation import *
from main.page.purchase.pe_tx_order_status import *
from main.page.purchase.pe_tx_receive_confirmation import *
from main.page.purchase.pe_tx_transaction_list import *
class paymentConfirmation():
def goto_payment_confirmation(self, driver, site):
payment_confirmation_page = PaymentConfirmationPage(driver)
payment_confirmation_page.open(site)
def select_payment_confirmation(self, driver):
payment_confirmation_page = PaymentConfirmationPage(driver)
payment_confirmation_page.select_tab_payment_confirmation()
class orderStatus():
def goto_order_status(self, driver,site):
order_status_page = OrderStatusPage(driver)
order_status_page.open(site)
def select_order_status(self, driver):
order_status_page = OrderStatusPage(driver)
order_status_page.select_tab_order_status()
class receiveConfirmation():
def goto_receive_confirmation(self, driver, site):
receive_confirmation_page = ReceiveConfirmationPage(driver)
receive_confirmation_page.open(site)
def select_receive_confirmation(self, driver):
receive_confirmation_page = ReceiveConfirmationPage(driver)
receive_confirmation_page.select_tab_receive_confirmation()
class transactionList():
def goto_transaction_list(self, driver, site):
transaction_list_page = TransactionListPage(driver)
transaction_list_page.open(site)
def select_transaction_list(self, driver):
transaction_list_page = TransactionListPage(driver)
transaction_list_page.select_tab_transaction_list()
|
import sqlite3
from flask import g
from config import Config
from app import app
from collections import OrderedDict
"""
HOW TO USE:
`from modules import database as db`
`db.getCursor().execute("INSERT_SQL_COMMAND_HERE")`
IF SELECT QUERY:
`db.getCursor().fetchall()`
OR IF INSERT OR UPDATE QUERY:
`db.getDB().commit()`
Example:
If you want all movies
`db.getCursor().execute("SELECT * FROM movie")
movies = db.getCursor().fetchall()`
"""
def getDB():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(Config.DATABASE_PATH)
return db
def getCursor():
db = getDB()
if not hasattr(g, 'cursor'):
g.cursor = db.cursor()
return g.cursor
def getAllMovies():
getCursor().execute("SELECT * FROM movies INNER JOIN ratings ON movies.tconst=ratings.tconst")
movies = getCursor().fetchall()
movieList = []
i = 0
for movie in movies:
movieDict = {}
movieDict['budget'] = movie[4]
genresRaw = movie[9].split(',')
genres = []
ignoreGenres = ["Adult"]
for g in genresRaw:
if g not in ignoreGenres:
genres.append(g)
movieDict['genre1'] = genres[0] if len(genres) > 0 else ''
movieDict['genre2'] = genres[1] if len(genres) > 1 else ''
movieDict['genre3'] = genres[2] if len(genres) > 2 else ''
movieDict['primaryTitle'] = movie[2]
movieDict['revenue'] = movie[14]
movieDict['runtimeMinutes'] = movie[8]
movieDict['startYear'] = movie[12]
movieDict['day'] = movie[11]
movieDict['month'] = movie[10]
movieDict['tconst'] = movie[0]
movieDict['wins'] = movie[15]
movieDict['nominations'] = movie[16]
movieDict['userRating'] = movie[18]
for key, value in movieDict.items():
if value is None and isinstance(value, str):
movieDict[key] = ''
elif value is None:
movieDict[key] = 0
if movieDict['genre1'] != '':
movieList.append(movieDict)
i+=1
return movieList
@app.teardown_appcontext
def close_connection(exception):
db = getattr(g, '_database', None)
if db is not None:
db.close() |
import googlemaps
from googlemaps.convert import decode_polyline
from datetime import datetime
from config import Config
import json
from math import ceil
from pprint import pprint
class Car(object):
# In gallons
tank_capacity = Config.Car.tank_capacity
# MPG
fuel_efficiency = Config.Car.fuel_efficiency
fill_level = 1
refill_level = Car.tank_capacity * .25
gmaps = googlemaps.Client(key=Config.api_key)
directions = gmaps.directions("105 Seth Way, Georgetown, KY, USA", "Los Angeles, CA, USA", transit_mode="driving",
departure_time=datetime.now())
if len(directions) == 0:
print "Received nothing!"
exit()
legs = directions[0]['legs']
print "Total distance from {beginning} to {end} is {distance} with {num_waypoints} waypoints inbetween".format(
beginning=legs[0]["start_address"],
end=legs[0]["end_address"],
distance=legs[0]["distance"]["text"],
num_waypoints=len(legs[0]["steps"]))
def meters_to_miles(meters):
return round(meters * 0.00062137, 3)
def calculate_gas_used(distance, car):
"""
:param distance:
:type car: Car
:param car:
:return:
"""
return round(distance / car.fuel_efficiency, 3)
waypoints = legs[0]["steps"]
gas_used_for_waypoints = []
for waypoint in waypoints:
print "Waypoint: {}".format(waypoint)
miles_for_leg = meters_to_miles(waypoint["distance"]["value"])
gas_used = calculate_gas_used(miles_for_leg, Car)
Car.fill_level = round(Car.fill_level - (gas_used / Car.tank_capacity), 3)
print "{} miles, {} gallons ({} gallons left)".format(miles_for_leg, gas_used, Car.tank_capacity * Car.fill_level)
gas_used_for_waypoints.append(gas_used)
sampling_size = 100
long_path = googlemaps.convert.decode_polyline(waypoints[6]["polyline"]["points"])
if len(long_path) > sampling_size:
long_path = [long_path[int(ceil(i * float(len(long_path)) / sampling_size))] for i in range(sampling_size)]
print "Total gas used: {} gallons".format(sum(gas_used_for_waypoints))
# pprint()
road_fit = gmaps.snap_to_roads(long_path, interpolate=True)
pprint(road_fit)
|
#!/usr/bin/env python
import sys, os, os.path, shlex, subprocess
from subprocess import Popen as execScript
from distutils.core import setup
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
pkg_name = 'lcg-info-dynamic-scheduler-pbs'
pkg_version = '2.4.5'
pkg_release = '1'
source_items = "config setup.py src"
class bdist_rpm(_bdist_rpm):
def run(self):
topdir = os.path.join(os.getcwd(), self.bdist_base, 'rpmbuild')
builddir = os.path.join(topdir, 'BUILD')
srcdir = os.path.join(topdir, 'SOURCES')
specdir = os.path.join(topdir, 'SPECS')
rpmdir = os.path.join(topdir, 'RPMS')
srpmdir = os.path.join(topdir, 'SRPMS')
cmdline = "mkdir -p %s %s %s %s %s" % (builddir, srcdir, specdir, rpmdir, srpmdir)
execScript(shlex.split(cmdline)).communicate()
cmdline = "tar -zcf %s %s" % (os.path.join(srcdir, pkg_name + '.tar.gz'), source_items)
execScript(shlex.split(cmdline)).communicate()
specOut = open(os.path.join(specdir, pkg_name + '.spec'),'w')
cmdline = "sed -e 's|@PKGVERSION@|%s|g' -e 's|@PKGRELEASE@|%s|g' project/%s.spec.in" % (pkg_version, pkg_release, pkg_name)
execScript(shlex.split(cmdline), stdout=specOut, stderr=sys.stderr).communicate()
specOut.close()
cmdline = "rpmbuild -ba --define '_topdir %s' %s.spec" % (topdir, os.path.join(specdir, pkg_name))
execScript(shlex.split(cmdline)).communicate()
libexec_list = [
"src/info-dynamic-pbs",
"src/lrmsinfo-pbs",
"src/vomaxjobs-maui"
]
setup(
name='lcg-info-dynamic-scheduler-pbs',
version=pkg_version,
description='Plugins for the lcg-info-dynamic-scheduler GIP plugin',
long_description='''Plugins for the lcg-info-dynamic-scheduler GIP plugin. The two
plugins here are for Maui (scheduler) and PBS/Torque (LRMS).''',
license='Apache Software License',
author_email='CREAM group <cream-support@lists.infn.it>',
packages=['TorqueInfoUtils'],
package_dir = {'': 'src'},
data_files=[
('usr/libexec', libexec_list)
],
cmdclass={'bdist_rpm': bdist_rpm}
)
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import re
f1 = open("minister_exp_count.csv", "r")
key_data = ()
election_data = {}
for line in f1.readlines():
split_data = line.split(',')
split_data[0] = re.sub(r'_[0-9]', "" , split_data[0])
split_data[2] = re.sub(r'\n', "" , split_data[2])
key_data = (split_data[0], split_data[1])
if key_data not in election_data:
election_data.update({key_data: split_data[2]})
f = open("j_c.json", "r")
for line in f.readlines():
label, dat = line[:-1].split('\t')
split_data = dat.split(",")
for data in split_data:
data = data.replace("\"", "")
if "flame" in data:
year = re.sub(r'{flame:', "" , data)
if year == "y1985":
year = '2000'
if year == "y1990":
year = '2005'
if year == "y1995":
year = '2010'
if year == "y2000":
year = '2015'
if "cand_last" in data:
name = re.sub(r'cand_last:', "" , data)
key_data = (year,name)
if key_data in election_data:
label = election_data[key_data]
print(str(label) + "\t" + dat)
else:
label = 0
print(str(label) + "\t" + dat)
f.close()
|
import matplotlib.pyplot as plt
def main():
sales = [100, 400, 300, 600]
slice_labels = ['1st Qtr', '2nd Qtr', '3rd Qtr','4th Qtr']
plt.pie(sales, labels=slice_labels)
plt.title('Sales by Quarter')
plt.show()
main() |
#!/usr/bin/python
# Copyright 2014 Google.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List configs with scores.
"""
import argparse
import sys
import encoder
import optimizer
import pick_codec
import score_tools
def main():
parser = argparse.ArgumentParser()
parser.add_argument('rate')
parser.add_argument('videofile')
parser.add_argument("--codec")
parser.add_argument('--component')
parser.add_argument('--criterion', default='psnr')
parser.add_argument('--show_result', action='store_true', default=False)
args = parser.parse_args()
videofile = encoder.Videofile(args.videofile)
codec = pick_codec.PickCodec(args.codec)
my_optimizer = optimizer.Optimizer(codec,
score_function=score_tools.PickScorer(args.criterion))
bitrate = int(args.rate)
encodings = my_optimizer.AllScoredEncodings(bitrate, videofile)
encodings.sort(key=my_optimizer.Score)
for encoding in encodings:
if args.component:
component = encoding.result[args.component]
elif args.show_result:
result = encoding.result.copy()
del result['frame'] # per-frame data is too big to show
component = str(result)
else:
component = ''
print '%s %f %s %s' % (encoding.encoder.Hashname(),
my_optimizer.Score(encoding),
component,
encoding.encoder.parameters.ToString())
if __name__ == '__main__':
sys.exit(main())
|
from math import pi
import torch
import numpy as np
from torch.nn import Module, Parameter, ModuleList
from torch import FloatTensor
from numpy.random import uniform
class DSVFCell(Module):
def __init__(self, G=0.5, twoR=1, hp_gain=0.0, bp_gain=0.0, lp_gain=1.0):
args = locals()
del args['self']
del args['__class__']
super(DSVFCell, self).__init__()
for key in args:
setattr(self, key, Parameter(FloatTensor([args[key]])))
self.master_gain = Parameter(FloatTensor([1.0]))
def forward(self, x, v):
coeff0, coeff1 = self.calc_coeffs()
input_minus_v1 = x - v[:, 1]
bp_out = coeff1 * input_minus_v1 + coeff0 * v[:, 0]
lp_out = self.G * bp_out + v[:, 1]
hp_out = x - lp_out - self.twoR * bp_out
v = torch.cat([(2 * bp_out).unsqueeze(-1), (2 * lp_out).unsqueeze(-1)], dim=-1) - v
y = self.master_gain * (self.hp_gain * hp_out + self.bp_gain * self.twoR * bp_out + self.lp_gain * lp_out)
return y, v
def init_states(self, size):
v = torch.zeros(size, 2).to(next(self.parameters()).device)
return v
def calc_coeffs(self):
self.G.data = torch.clamp(self.G, min=1e-8)
self.twoR.data = torch.clamp(self.twoR, min=0)
self.bp_gain.data = torch.clamp(self.bp_gain, min=-1)
self.hp_gain.data = torch.clamp(self.hp_gain, min=-1, max=1)
self.lp_gain.data = torch.clamp(self.lp_gain, min=-1, max=1)
coeff0 = 1.0 / (1.0 + self.G * (self.G + self.twoR))
coeff1 = self.G * coeff0
return coeff0, coeff1
class DSVF(Module):
def __init__(self, G=0.5, twoR=1, hp_gain=1.0, bp_gain=1.0, lp_gain=1.0):
super(DSVF, self).__init__()
self.cell = DSVFCell(G=G, twoR=twoR, hp_gain=hp_gain, bp_gain=bp_gain, lp_gain=lp_gain)
def forward(self, input, initial_states=None):
batch_size = input.shape[0]
sequence_length = input.shape[1]
if initial_states is None:
states = self.cell.init_states(batch_size)
else:
states = initial_states
out_sequence = torch.zeros(input.shape[:-1]).to(input.device)
for s_idx in range(sequence_length):
out_sequence[:, s_idx], states = self.cell(input[:, s_idx].view(-1), states)
out_sequence = out_sequence.unsqueeze(-1)
if initial_states is None:
return out_sequence
else:
return out_sequence, states
|
from django.db import models
# Create your models here.
from django.db import models
from django.db.models import *
# Create your models here.
class order(models.Model):
user_id = IntegerField(max_length=11,verbose_name='用户Id')
fian_code = CharField(max_length=1042,verbose_name='股票或基金的代码')
fian_price = FloatField(max_length=11,verbose_name='订单支付的钱')
fian_type = IntegerField(max_length=1, verbose_name='基金或股票的类型',default=0)
order_status = IntegerField(max_length=1,verbose_name='订单的状态')
created_time = models.DateTimeField(auto_now_add=True)
updated_time = models.DateTimeField(auto_now=True)
class Meta():
db_table = 'order'
def __str__(self):
return '{0}_{1}'.format(self.funds_id,self.funds_name)
|
country = "Bangladesh"
country.find("Ban")
country.find("Bangla")
country.find("Ban")
country.find("desh") |
import gzip
import pathlib
import sys
import numpy as np
import pytest
import torch
from datasets_utils import make_fake_flo_file, make_tar
from torchdata.datapipes.iter import FileOpener, TarArchiveLoader
from torchvision.datasets._optical_flow import _read_flo as read_flo_ref
from torchvision.datasets.utils import _decompress
from torchvision.prototype.datasets.utils import Dataset, GDriveResource, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import fromfile, read_flo
@pytest.mark.filterwarnings("error:The given NumPy array is not writeable:UserWarning")
@pytest.mark.parametrize(
("np_dtype", "torch_dtype", "byte_order"),
[
(">f4", torch.float32, "big"),
("<f8", torch.float64, "little"),
("<i4", torch.int32, "little"),
(">i8", torch.int64, "big"),
("|u1", torch.uint8, sys.byteorder),
],
)
@pytest.mark.parametrize("count", (-1, 2))
@pytest.mark.parametrize("mode", ("rb", "r+b"))
def test_fromfile(tmpdir, np_dtype, torch_dtype, byte_order, count, mode):
path = tmpdir / "data.bin"
rng = np.random.RandomState(0)
rng.randn(5 if count == -1 else count + 1).astype(np_dtype).tofile(path)
for count_ in (-1, count // 2):
expected = torch.from_numpy(np.fromfile(path, dtype=np_dtype, count=count_).astype(np_dtype[1:]))
with open(path, mode) as file:
actual = fromfile(file, dtype=torch_dtype, byte_order=byte_order, count=count_)
torch.testing.assert_close(actual, expected)
def test_read_flo(tmpdir):
path = tmpdir / "test.flo"
make_fake_flo_file(3, 4, path)
with open(path, "rb") as file:
actual = read_flo(file)
expected = torch.from_numpy(read_flo_ref(path).astype("f4", copy=False))
torch.testing.assert_close(actual, expected)
class TestOnlineResource:
class DummyResource(OnlineResource):
def __init__(self, download_fn=None, **kwargs):
super().__init__(**kwargs)
self._download_fn = download_fn
def _download(self, root):
if self._download_fn is None:
raise pytest.UsageError(
"`_download()` was called, but `DummyResource(...)` was constructed without `download_fn`."
)
return self._download_fn(self, root)
def _make_file(self, root, *, content, name="file.txt"):
file = root / name
with open(file, "w") as fh:
fh.write(content)
return file
def _make_folder(self, root, *, name="folder"):
folder = root / name
subfolder = folder / "subfolder"
subfolder.mkdir(parents=True)
files = {}
for idx, root in enumerate([folder, folder, subfolder]):
content = f"sentinel{idx}"
file = self._make_file(root, name=f"file{idx}.txt", content=content)
files[str(file)] = content
return folder, files
def _make_tar(self, root, *, name="archive.tar", remove=True):
folder, files = self._make_folder(root, name=name.split(".")[0])
archive = make_tar(root, name, folder, remove=remove)
files = {str(archive / pathlib.Path(file).relative_to(root)): content for file, content in files.items()}
return archive, files
def test_load_file(self, tmp_path):
content = "sentinel"
file = self._make_file(tmp_path, content=content)
resource = self.DummyResource(file_name=file.name)
dp = resource.load(tmp_path)
assert isinstance(dp, FileOpener)
data = list(dp)
assert len(data) == 1
path, buffer = data[0]
assert path == str(file)
assert buffer.read().decode() == content
def test_load_folder(self, tmp_path):
folder, files = self._make_folder(tmp_path)
resource = self.DummyResource(file_name=folder.name)
dp = resource.load(tmp_path)
assert isinstance(dp, FileOpener)
assert {path: buffer.read().decode() for path, buffer in dp} == files
def test_load_archive(self, tmp_path):
archive, files = self._make_tar(tmp_path)
resource = self.DummyResource(file_name=archive.name)
dp = resource.load(tmp_path)
assert isinstance(dp, TarArchiveLoader)
assert {path: buffer.read().decode() for path, buffer in dp} == files
def test_priority_decompressed_gt_raw(self, tmp_path):
# We don't need to actually compress here. Adding the suffix is sufficient
self._make_file(tmp_path, content="raw_sentinel", name="file.txt.gz")
file = self._make_file(tmp_path, content="decompressed_sentinel", name="file.txt")
resource = self.DummyResource(file_name=file.name)
dp = resource.load(tmp_path)
path, buffer = next(iter(dp))
assert path == str(file)
assert buffer.read().decode() == "decompressed_sentinel"
def test_priority_extracted_gt_decompressed(self, tmp_path):
archive, _ = self._make_tar(tmp_path, remove=False)
resource = self.DummyResource(file_name=archive.name)
dp = resource.load(tmp_path)
# If the archive had been selected, this would be a `TarArchiveReader`
assert isinstance(dp, FileOpener)
def test_download(self, tmp_path):
download_fn_was_called = False
def download_fn(resource, root):
nonlocal download_fn_was_called
download_fn_was_called = True
return self._make_file(root, content="_", name=resource.file_name)
resource = self.DummyResource(
file_name="file.txt",
download_fn=download_fn,
)
resource.load(tmp_path)
assert download_fn_was_called, "`download_fn()` was never called"
# This tests the `"decompress"` literal as well as a custom callable
@pytest.mark.parametrize(
"preprocess",
[
"decompress",
lambda path: _decompress(str(path), remove_finished=True),
],
)
def test_preprocess_decompress(self, tmp_path, preprocess):
file_name = "file.txt.gz"
content = "sentinel"
def download_fn(resource, root):
file = root / resource.file_name
with gzip.open(file, "wb") as fh:
fh.write(content.encode())
return file
resource = self.DummyResource(file_name=file_name, preprocess=preprocess, download_fn=download_fn)
dp = resource.load(tmp_path)
data = list(dp)
assert len(data) == 1
path, buffer = data[0]
assert path == str(tmp_path / file_name).replace(".gz", "")
assert buffer.read().decode() == content
def test_preprocess_extract(self, tmp_path):
files = None
def download_fn(resource, root):
nonlocal files
archive, files = self._make_tar(root, name=resource.file_name)
return archive
resource = self.DummyResource(file_name="folder.tar", preprocess="extract", download_fn=download_fn)
dp = resource.load(tmp_path)
assert files is not None, "`download_fn()` was never called"
assert isinstance(dp, FileOpener)
actual = {path: buffer.read().decode() for path, buffer in dp}
expected = {
path.replace(resource.file_name, resource.file_name.split(".")[0]): content
for path, content in files.items()
}
assert actual == expected
def test_preprocess_only_after_download(self, tmp_path):
file = self._make_file(tmp_path, content="_")
def preprocess(path):
raise AssertionError("`preprocess` was called although the file was already present.")
resource = self.DummyResource(
file_name=file.name,
preprocess=preprocess,
)
resource.load(tmp_path)
class TestHttpResource:
def test_resolve_to_http(self, mocker):
file_name = "data.tar"
original_url = f"http://downloads.pytorch.org/{file_name}"
redirected_url = original_url.replace("http", "https")
sha256_sentinel = "sha256_sentinel"
def preprocess_sentinel(path):
return path
original_resource = HttpResource(
original_url,
sha256=sha256_sentinel,
preprocess=preprocess_sentinel,
)
mocker.patch("torchvision.prototype.datasets.utils._resource._get_redirect_url", return_value=redirected_url)
redirected_resource = original_resource.resolve()
assert isinstance(redirected_resource, HttpResource)
assert redirected_resource.url == redirected_url
assert redirected_resource.file_name == file_name
assert redirected_resource.sha256 == sha256_sentinel
assert redirected_resource._preprocess is preprocess_sentinel
def test_resolve_to_gdrive(self, mocker):
file_name = "data.tar"
original_url = f"http://downloads.pytorch.org/{file_name}"
id_sentinel = "id-sentinel"
redirected_url = f"https://drive.google.com/file/d/{id_sentinel}/view"
sha256_sentinel = "sha256_sentinel"
def preprocess_sentinel(path):
return path
original_resource = HttpResource(
original_url,
sha256=sha256_sentinel,
preprocess=preprocess_sentinel,
)
mocker.patch("torchvision.prototype.datasets.utils._resource._get_redirect_url", return_value=redirected_url)
redirected_resource = original_resource.resolve()
assert isinstance(redirected_resource, GDriveResource)
assert redirected_resource.id == id_sentinel
assert redirected_resource.file_name == file_name
assert redirected_resource.sha256 == sha256_sentinel
assert redirected_resource._preprocess is preprocess_sentinel
def test_missing_dependency_error():
class DummyDataset(Dataset):
def __init__(self):
super().__init__(root="root", dependencies=("fake_dependency",))
def _resources(self):
pass
def _datapipe(self, resource_dps):
pass
def __len__(self):
pass
with pytest.raises(ModuleNotFoundError, match="depends on the third-party package 'fake_dependency'"):
DummyDataset()
|
#in actual website, going to have more files, images, css, javascript, html.
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def index:
retrun "<h2>Homepage</h2>" #return value of the function is the server response. so we can type HTML in here
#but never put HTML directly in FLASK
@app.route('/profile/<name>')#profile page and passing in a name
def profile(name):
return render_template("profile.html", name=name)#what does the layout in order to know what information to display= the name variable
if __name__ == "__main__":
app.run
<title>Welcome to thenewboston</title>
<h1>Hey there {{name}} </h1> #when you want to use a variable you want to put it in between two curly braces this gets passed through the template
#flask is generating t he html for us. uses the profil.html, throuws in the variables and returns it as a server response
{% if user %}# only going to be displayed when the user is looged in
<h1>Hello {{user}}</h1>
{% else %}
<h1>Please log in</h1>
{% endif %}
#content of the html is dependant of whatever variables passed im
PASSING OBJECTS INTO TEMPLATES
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/shopping")
def shopping():
food = ["cheese", "tuna", "beef"]
return render_template("shopping.html", food=food) #passing the variable food in. shopping template has acess to the
# food variable which is a list
if __name__ == "__main__":
app.run
html
<ul>
{% for item in food %} #dont read this as html, interpret it as python and flask.
#will loop through the list three times
<li>{{item}}</li>
{% endfor %}#signifies the end of the loop
</ul>
|
#__date__ = 6/14/18
#__time__ = 4:09 PM
#__author__ = isminilourentzou
import lib
import gpustat
import numpy as np
import torch
from torchtext import data
import csv
#https://stackoverflow.com/questions/36352300/python-compute-average-of-n-th-elements-in-list-of-lists-with-different-lengths
def mean_list(a):
return [np.mean([x[i] for x in a if len(x) > i]) for i in range(len(max(a,key=len)))]
def indices2words(indices, vocab, remove_pad=False):
results = []
for _ind in indices:
words = [vocab.itos[id] for id in _ind]
if(remove_pad): words = [word for word in words if word not in ['<pad>']]
results.append(list(words))
return results
def choose_selftraining_method(strategy, dataset, model, opt):
switcher = {
'rs': lib.active_learning.RandomSampling(dataset, opt),
'us': lib.active_learning.UncertaintySampling(dataset, opt, model=model, method='entropy', inverse=True),
'bald': lib.active_learning.BALD(dataset, opt, model=model, nb_MC_samples=5, inverse=True),
'ds': lib.active_learning.DiversitySampling(dataset, opt, model=model, inverse=True),
'st': lib.active_learning.SelfTraining(dataset, opt, model=model)
}
return switcher.get(strategy, lib.active_learning.RandomSampling(dataset, opt))
def choose_al_method(strategy, dataset, model, opt):
switcher = {
'rs': lib.active_learning.RandomSampling(dataset, opt),
'us': lib.active_learning.UncertaintySampling(dataset, opt, model=model, method='entropy'),
'bald': lib.active_learning.BALD(dataset, opt, model=model, nb_MC_samples=5),
'nb': lib.active_learning.NextBatch(dataset, opt),
'ds': lib.active_learning.DiversitySampling(dataset, opt, model=model)
}
return switcher.get(strategy, lib.active_learning.RandomSampling(dataset, opt))
def save_predictions(save_pred, pred, labels, tokens, binary=False):
with open(save_pred, 'w') as file:
if(binary):
file.write("pred\ttarget\tsent\n")
for prediction, label, sent in zip(pred, labels, tokens):
file.write("{}\t{}\t{}\n".format(prediction, label, sent))
file.write('\n')
else:
with open(save_pred, 'w') as file:
file.write("pred\ttarget\tsent\n")
for prediction, label, sent in zip(pred, labels, tokens):
for p, l, s in zip(prediction, label, sent):
if(s not in ['<pad>']):
file.write("{}\t{}\t{}\n".format(p, l, s))
file.write('\n')
def show_memusage(device=0):
gpu_stats = gpustat.GPUStatCollection.new_query()
item = gpu_stats.jsonify()["gpus"][device]
print("Gpu memory used: {}/{} \n".format(item["memory.used"], item["memory.total"]))
return item["memory.used"]
def save_results(csv_results, averageacc, averagef1, averageprec, averagerec, averagecount, strategy):
colnames = ['iteration', 'al', 'examples', 'acc', 'f1', 'prec', 'rec']
with open(csv_results, "w") as output:
writer = csv.writer(output, lineterminator='\n')
writer.writerow(colnames)
for i, (count, acc, f1, prec, rec) in enumerate(zip(averagecount, averageacc, averagef1, averageprec, averagerec)):
writer.writerow([(i+1), strategy, count, acc, f1, prec, rec])
def getState(dataset, unlabeled_entries, classifier):
#TODO: this won't scale well - split into batches
#TODO: make appropriate state for binary and seq.labeling
diter = data.BucketIterator(dataset=unlabeled_entries, batch_size=len(unlabeled_entries), repeat=False, shuffle=False,
device=torch.device("cuda:"+str(classifier.opt.gpu) if classifier.opt.cuda else "cpu"))
candidates = list(diter)[0]
if isinstance(classifier, torch.nn.Module):
_, scores, prediction = classifier(candidates)
confidences = [o.max(0)[0].item() for o in scores]
confidences = [pow(conf, 1. / len(y)) for conf, y in zip(confidences, prediction)]
else:
_, confidences, prediction = classifier(*classifier.iter_to_xy(diter))
labeledD = None
if(len(dataset.train.examples) > 0):
diter = data.BucketIterator(dataset=dataset.train, batch_size=len(dataset.train), repeat=False, shuffle=False,
device=torch.device("cuda:"+str(classifier.opt.gpu) if classifier.opt.cuda else "cpu"))
labeledD = list(diter)[0]
cand = classifier.wordrepr(candidates)
labeledD = classifier.wordrepr(labeledD).sum(0).unsqueeze(0) if labeledD else torch.zeros((1, cand.size(1), cand.size(2)))
labeled = [d.id for d in dataset.train.examples]
for i, d in enumerate(unlabeled_entries.examples):
if d.id in labeled:
cand[i] = 0
confidences[i] = 0
prediction[i] = len(prediction[i])*[0]
return cand.data.cpu().numpy(), labeledD.data.cpu().numpy(), prediction, confidences
def get_predictions(examples, fields, model, discard=False):
if(len(examples)>0):
pseudo_example = data.Dataset(examples=examples, fields=fields)
pseudo_example = data.BucketIterator(dataset=pseudo_example, batch_size=model.opt.batch_size, repeat=False, shuffle=False,
device=torch.device("cuda:"+str(model.opt.gpu) if model.opt.cuda else "cpu"))
if isinstance(model, torch.nn.Module):
golds, preds = [], [], []
for batch in pseudo_example:
_, _, prediction = model(batch)
y = lib.utils.indices2words(batch.labels.data.tolist(), model.wordrepr.tag_vocab)
golds.extend(y)
preds.extend(prediction)
else:
x, golds = model.iter_to_xy(pseudo_example)
_, _, preds = model(x, golds)
prediction = lib.utils.indices2words(preds, model.wordrepr.tag_vocab)
if(discard):
temp = []
for item in prediction:
if(not all(i in ['O', '<pad>'] for i in item)):
temp.append(item)
return temp, golds
return prediction, golds
return [], [] |
import sys
import os
import csv
import subprocess
file_path = os.path.realpath(sys.argv[1])
reader = csv.reader(open(file_path), delimiter=',', quotechar=' ')
dictionary = {}
for row in reader:
x = row[0]
y = row[1]
dictionary.setdefault(x, []).append(y)
dictionary.setdefault(y, []).append(x)
for vertex in dictionary:
neigh = ' '.join(dictionary[vertex])
subprocess.Popen(f'python ./vertex.py ./ {vertex} {neigh}', creationflags=subprocess.CREATE_NEW_CONSOLE)
# subprocess.Popen(f'python ./vertex.py ./ {vertex} {neigh}', shell=True)
# subprocess.call('python pi.txt', shell=True)
# Run: D:\Educational\dsg_single_faliure_recovery\src> python .\controller.py ..\data\facebook_data.csv |
#!/usr/bin/env python
from tkinter import *
from tkinter import ttk
from tkinter import messagebox
from enum import Enum
import time
import datetime
import os
import csv
import cv2
import numpy as np
import PIL.Image, PIL.ImageTk
import busio
import board
import adafruit_amg88xx
import VL53L0X
##############################################################################
# 定数
##############################################################################
PROC_CYCLE = 50 # 処理周期[msec]
DISTANCE_STANDARD = 50.0 # 距離(基準値)[cm]
DISTANCE_UPPER_LIMIT = 100.0 # 距離(上限値)[cm]
DISTANCE_LOWER_LIMIT = 30.0 # 距離(下限値)[cm]
THERMISTOR_CORR_STANDARD = 10.0 # サーミスタ温度補正(基準値)[℃]
BODY_TEMP_STANDARD = 36.2 # 体温(基準値)[℃]
TARGET_DIFF = 0.5 # 学習目標との差分[℃]
LOG_PATH = './log_file/' # ログファイル保存パス
# 周期処理状態
class CycleProcState(Enum):
FACE_DETECTION = 0 # 顔検出
THERMISTOR = 1 # サーミスタ温度
TEMPERATURE = 2 # 赤外線センサ温度
DUMMY = 3 # ダミー
MAKE_BODY_TEMP = 4 # 体温演算
UPDATE_CSV = 5 # CSV更新
PAUSE = 6 # 一時停止
##############################################################################
# クラス:Application
##############################################################################
class Application(ttk.Frame):
def __init__(self, master=None):
ttk.Frame.__init__(self, master)
self.pack()
# ウィンドウをスクリーンの中央に配置
self.setting_window(master)
# 周期処理状態
self.cycle_proc_state = CycleProcState.FACE_DETECTION
# 距離計測タイマ
self.distance_timer = 0
# 一時停止タイマ
self.pause_timer = 0
# 距離
self.distance = DISTANCE_STANDARD
# サーミスタ温度
self.thermistor_temp = 0.0
# サーミスタ温度補正
self.thermistor_corr = THERMISTOR_CORR_STANDARD
# 赤外線センサ温度(インデックス)
self.temperature_index = 0
# 赤外線センサ温度
self.temperature = [0.0, 0.0, 0.0]
# 赤外線センサ温度(中央値)
self.temperature_med = 0.0
# 体温
self.body_temp = BODY_TEMP_STANDARD
# ウィジットを生成
self.create_widgets()
# カメラ
self.camera_init()
# 距離センサ(VL530X)
self.distance_sensor_init()
# サーマルセンサ(AMG8833)
self.thermal_sensor_init()
# CSV出力の初期設定
self.csv_init()
if not self.camera.isOpened:
messagebox.showerror('カメラ認識エラー', 'カメラの接続を確認してください')
else:
# 周期処理
self.cycle_proc()
##########################################################################
# ウィンドウをスクリーンの中央に配置
##########################################################################
def setting_window(self, master):
w = 500 # ウィンドウの横幅
h = 800 # ウィンドウの高さ
sw = master.winfo_screenwidth() # スクリーンの横幅
sh = master.winfo_screenheight() # スクリーンの高さ
# ウィンドウをスクリーンの中央に配置
master.geometry(str(w)+'x'+str(h)+'+'+str(int(sw/2-w/2))+'+'+str(int(sh/2-h/2)))
# ウィンドウの最小サイズを指定
master.minsize(w,h)
master.title('非接触体温計')
##########################################################################
# ウィジットを生成
##########################################################################
def create_widgets(self):
# フレーム(上部)
frame_upper = ttk.Frame(self)
frame_upper.grid(row=0, padx=10, pady=(10,0), sticky='NW')
self.label_msg = ttk.Label(frame_upper, font=('',20))
self.label_msg.grid(row=0, sticky='NW')
self.label_body_tmp = ttk.Label(frame_upper, font=('',30))
self.label_body_tmp.grid(row=1, sticky='NW')
# フレーム(中央部)
frame_middle = ttk.Frame(self)
frame_middle.grid(row=1, padx=10, pady=(10,0), sticky='NW')
# カメラの映像を表示するキャンバスを用意する
self.canvas_camera = Canvas(frame_middle, width=480, height=480)
self.canvas_camera.pack()
# フレーム(下部)
frame_lower = ttk.Frame(self)
frame_lower.grid(row=2, padx=10, pady=(10,0), sticky='NW')
self.label_distance = ttk.Label(frame_lower)
self.label_distance.grid(row=0, sticky='NW')
self.label_thermistor = ttk.Label(frame_lower)
self.label_thermistor.grid(row=1, sticky='NW')
self.label_thermistor_corr = ttk.Label(frame_lower)
self.label_thermistor_corr.grid(row=2, sticky='NW')
self.label_temperature_0 = ttk.Label(frame_lower)
self.label_temperature_0.grid(row=3, sticky='NW')
self.label_temperature_1 = ttk.Label(frame_lower)
self.label_temperature_1.grid(row=4, sticky='NW')
self.label_temperature_2 = ttk.Label(frame_lower)
self.label_temperature_2.grid(row=5, sticky='NW')
self.label_temperature_med = ttk.Label(frame_lower)
self.label_temperature_med.grid(row=6, sticky='NW')
self.init_param_widgets()
##########################################################################
# 計測データ ウィジット 初期化
##########################################################################
def init_param_widgets(self):
# フレーム(上部)
self.label_msg.config(text='顔が白枠に合うよう近づいてください')
self.label_body_tmp.config(text='体温:--.-- ℃')
# フレーム(下部)
self.label_distance.config(text='距離:--- cm')
self.label_thermistor.config(text='サーミスタ温度:--.-- ℃')
self.label_thermistor_corr.config(text='サーミスタ温度補正:--.-- ℃')
self.label_temperature_0.config(text='センサ温度(1回目):--.-- ℃')
self.label_temperature_1.config(text='センサ温度(2回目):--.-- ℃')
self.label_temperature_2.config(text='センサ温度(3回目):--.-- ℃')
self.label_temperature_med.config(text='センサ温度(中央値):--.-- ℃')
##########################################################################
# カメラ初期化
##########################################################################
def camera_init(self):
self.camera = cv2.VideoCapture(0)
self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, 480)
self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
# print(self.camera.get(cv2.CAP_PROP_FPS))
##########################################################################
# カメラ制御
##########################################################################
def camera_ctrl(self):
ret, frame = self.camera.read()
# 左右反転
frame_mirror = cv2.flip(frame, 1)
# OpenCV(BGR) -> Pillow(RGB)変換
frame_color = cv2.cvtColor(frame_mirror, cv2.COLOR_BGR2RGB)
# ガイド枠の描画
cv2.rectangle(frame_color, (60,60), (420,420), (255,255,255), thickness=5)
# OpenCV frame -> Pillow Photo
self.photo = PIL.ImageTk.PhotoImage(image = PIL.Image.fromarray(frame_color))
# Pillow Photo -> Canvas
self.canvas_camera.create_image(0, 0, image = self.photo, anchor = 'nw')
##########################################################################
# カメラ映像の空読み
##########################################################################
def camera_clear_frame(self):
ret, frame = self.camera.read()
##########################################################################
# 距離センサ(VL530X) 初期化
##########################################################################
def distance_sensor_init(self):
self.distance_sensor = VL53L0X.VL53L0X(address=0x29)
self.distance_sensor.start_ranging(VL53L0X.VL53L0X_BETTER_ACCURACY_MODE)
##########################################################################
# サーマルセンサ(AMG8833) 初期化
##########################################################################
def thermal_sensor_init(self):
# I2Cバスの初期化
i2c_bus = busio.I2C(board.SCL, board.SDA)
# センサの初期化
self.thermal_sensor = adafruit_amg88xx.AMG88XX(i2c_bus, addr=0x68)
##########################################################################
# CSV出力の初期設定
##########################################################################
def csv_init(self):
# フォルダの存在チェック
if not os.path.isdir(LOG_PATH):
os.makedirs(LOG_PATH)
# 日付取得
now = datetime.datetime.today()
# csvファイルの生成
self.filename = LOG_PATH + now.strftime('%Y-%m') + '.csv'
# ファイルの存在チェック
if not os.path.isfile(self.filename):
with open(self.filename, 'w', newline='') as csvfile:
file = csv.writer(csvfile)
# 1行目:見出し
file.writerow(['日時',
'体温',
'センサ温度',
'距離',
'サーミスタ',
'サーミスタ補正'])
##########################################################################
# CSV出力
##########################################################################
def csv_ctrl(self):
with open(self.filename, 'a', newline='') as csvfile:
# csvファイルへの書き込みデータ
now = datetime.datetime.today()
data = [now.strftime('%Y-%m-%d %H:%M:%S'),
self.body_temp,
self.temperature_med,
self.distance,
self.thermistor_temp,
self.thermistor_corr]
# データの書き込み
file = csv.writer(csvfile)
file.writerow(data)
##########################################################################
# 周期処理
##########################################################################
def cycle_proc(self):
# 顔検出
if self.cycle_proc_state == CycleProcState.FACE_DETECTION:
# カメラ制御
self.camera_ctrl()
# 距離計測
self.distance_timer += 1
if self.distance_timer >= 5:
self.distance_timer = 0
self.distance = self.distance_sensor.get_distance() / float(10)
if self.distance > DISTANCE_UPPER_LIMIT:
self.label_msg.config(text='顔が白枠に合うよう近づいてください')
self.label_distance.config(text='距離:--- cm')
else:
if self.distance < DISTANCE_LOWER_LIMIT:
self.label_msg.config(text='もう少し離れてください')
elif self.distance > DISTANCE_STANDARD:
self.label_msg.config(text='もう少し近づいてください')
else:
self.label_msg.config(text='')
self.cycle_proc_state = CycleProcState.THERMISTOR
self.label_distance.config(text='距離:' + str(self.distance) + ' cm ')
# サーミスタ温度
elif self.cycle_proc_state == CycleProcState.THERMISTOR:
self.thermistor_temp = round(self.thermal_sensor.temperature, 2)
self.label_thermistor.config(text='サーミスタ温度:' + str(self.thermistor_temp) + ' ℃')
self.cycle_proc_state = CycleProcState.TEMPERATURE
# 赤外線センサ温度
elif self.cycle_proc_state == CycleProcState.TEMPERATURE:
self.temperature[self.temperature_index] = round(np.amax(np.array(self.thermal_sensor.pixels)), 2)
self.cycle_proc_state = CycleProcState.DUMMY
# ダミー
elif self.cycle_proc_state == CycleProcState.DUMMY:
if self.temperature_index == 0:
print('センサ温度')
print(*self.thermal_sensor.pixels, sep='\n')
self.label_temperature_0.config(text='センサ温度(1回目):' + str(self.temperature[0]) + '℃')
self.temperature_index = 1
self.cycle_proc_state = CycleProcState.TEMPERATURE
elif self.temperature_index == 1:
self.label_temperature_1.config(text='センサ温度(2回目):' + str(self.temperature[1]) + '℃')
self.temperature_index = 2
self.cycle_proc_state = CycleProcState.TEMPERATURE
elif self.temperature_index == 2:
self.label_temperature_2.config(text='センサ温度(3回目):' + str(self.temperature[2]) + '℃')
self.temperature_index = 0
self.cycle_proc_state = CycleProcState.MAKE_BODY_TEMP
else:
# 設計上ありえないがロバスト性に配慮
print('[error] index')
self.temperature_index = 0
self.cycle_proc_state = CycleProcState.FACE_DETECTION
# 体温演算
elif self.cycle_proc_state == CycleProcState.MAKE_BODY_TEMP:
# 赤外線センサ温度(中央値)
self.temperature.sort()
self.temperature_med = round(self.temperature[1], 2)
self.label_temperature_med.config(text='センサ温度(中央値):' + str(self.temperature_med) + '℃')
# サーミスタ温度補正
diff = BODY_TEMP_STANDARD - self.temperature_med
corr = diff - self.thermistor_corr
print(corr)
self.thermistor_corr = round((self.thermistor_corr + (corr / 10)), 2)
self.label_thermistor_corr.config(text='サーミスタ温度補正:' + str(self.thermistor_corr) + ' ℃')
# 体温
self.body_temp = round((self.temperature_med + self.thermistor_corr), 1)
self.label_body_tmp.config(text='体温:' + str(self.body_temp) + '℃')
if self.body_temp > 38.0:
self.label_msg.config(text='体温が高いです!検温してください')
elif self.body_temp < 35.0:
self.label_msg.config(text='体温が低いです!検温してください')
else:
self.label_msg.config(text='体温は正常です!問題ありません')
self.cycle_proc_state = CycleProcState.UPDATE_CSV
# CSV更新
elif self.cycle_proc_state == CycleProcState.UPDATE_CSV:
self.csv_ctrl()
self.cycle_proc_state = CycleProcState.PAUSE
# 一時停止
elif self.cycle_proc_state == CycleProcState.PAUSE:
# カメラ映像の空読み
self.camera_clear_frame()
self.pause_timer += 1
if self.pause_timer > 20:
self.pause_timer = 0
# 計測データ ウィジット 初期化
self.init_param_widgets()
self.cycle_proc_state = CycleProcState.FACE_DETECTION
# 設計上ありえないがロバスト性に配慮
else:
print('[error] cycle_proc')
self.cycle_proc_state = CycleProcState.FACE_DETECTION
# 周期処理
self.after(PROC_CYCLE, self.cycle_proc)
if __name__ == '__main__':
root = Tk()
app = Application(master=root)
app.mainloop() |
from spack import *
import glob
import sys,os
class Hector(Package):
homepage = "http://www.example.com"
url = "http://cmsrep.cern.ch/cmssw/repos/cms/SOURCES/slc6_amd64_gcc630/external/hector/1.3.4_patch1-fmblme3/hector-1.3.4_patch1.tgz"
depends_on('root')
version('1.3.4_patch1', '419ec3ce8dfbcff972ea6d5b09e8c6f1')
def install(self, spec, prefix):
mkdirp('obj')
mkdirp('lib')
make()
cp = which('cp')
for f in glob.glob('*'):
cp('-r', f, prefix)
|
# Copyright 2021-2022 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import pytest
import cunumeric as num
rect = num.array([[1, 2, 3], [4, 5, 6]])
square = num.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
@pytest.mark.parametrize("x", (rect, square), ids=("rect", "square"))
class Test_free_function:
def test_forward(self, x):
y = num.transpose(x)
npx = np.array(x)
assert num.array_equal(y, np.transpose(npx))
def test_round_trip(self, x):
y = num.transpose(x)
z = num.transpose(y)
assert num.array_equal(x, z)
@pytest.mark.parametrize("x", (rect, square), ids=("rect", "square"))
class Test_method:
def test_forward(self, x):
y = x.transpose()
npx = np.array(x)
assert num.array_equal(y, npx.transpose())
def test_round_trip(self, x):
y = x.transpose()
z = y.transpose()
assert num.array_equal(x, z)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(sys.argv))
|
def test(r):
avg = round(sum([i for i in r]) / len(r),3)
perf = {'h': 0, 'a': 0, 'l': 0}
for x in r:
if x>8:
perf['h'] += 1
elif x>6:
perf['a'] += 1
else:
perf['l'] += 1
return [avg, perf] if len(r) != perf['h'] else [avg, perf, 'They did well']
'''
It's an important day today: the class has just had a math test.
You will be given a list of marks. Complete the function that will:
Calculate the average mark of the whole class and round it to 3 decimal places.
Make a dictionary/hash with keys "h", "a", "l" to make clear how many high,
average and low marks they got. High marks are 9 & 10, average marks are 7 & 8,
and low marks are 1 to 6.
Return list [class_average, dictionary] if there are different type of marks,
or [class_average, dictionary, "They did well"] if there are only high marks.
Examples
[10, 9, 9, 10, 9, 10, 9] ==> [9.429, {'h': 7, 'a': 0, 'l': 0}, 'They did well']
[5, 6, 4, 8, 9, 8, 9, 10, 10, 10] ==> [7.9, {'h': 5, 'a': 2, 'l': 3}]
'''
|
# -*- coding: utf-8 -*-
"""
To train cascade classifier by given positive / negative sample path and rounds.
@author: peter
"""
import sys
import numpy as np
import os
import json
from haar_properties import get_haars
from classifier_properties import Weak_Classifier #, Strong_Classifier
from image import png_gray
def train( _pos_dir : str, _neg_dir : str, rounds : int ):
print( "##### Training Start #####" )
# Get sample images
print( ">>>>> Loading training samples <<<<<" )
imgs, img_shape, n_pos, n_neg = load_samples( _pos_dir, _neg_dir )
print( " total images : ", n_pos + n_neg,
", positive images : ", n_pos, ", negative images : ", n_neg )
# Get Haar features
print( ">>>>> Generating features <<<<<" )
haar_features = get_haars( img_shape, (0,0), 1, 1, 1, 256 )
print( " total features : ", len(haar_features) )
# Initializing weights
print( ">>>>> Initializing weights <<<<<" )
weights_pos = [1.0 / (2 * n_pos)] * n_pos
weights_neg = [1.0 / (2 * n_neg)] * n_neg
weights = weights_pos + weights_neg
# Record classifiers chosen in each round
cascade = []
# Start Iterations
print( ">>>>> Start training iteration <<<<<" )
for t in range(rounds):
print(" ")
print( " # Round ", t + 1, " : " )
# Normalizing sample weights
weights = [ weight / sum(weights) for weight in weights ]
# find the optimal classifier that has the minimum error
print( "Training weak classifiers ......" )
opt_classifier, classifier_min_err = None, 1.0
for i, haar in enumerate(haar_features):
log = (" training ( " + str(i+1) + " / " +
str(len(haar_features)) + " ) of weak classifiers." )
sys.stdout.write( '\r' + log )
this_classifier = Weak_Classifier( haar, imgs, weights )
if this_classifier.error < classifier_min_err:
classifier_min_err = this_classifier.error
opt_classifier = this_classifier
# If this is not final round, then update new weights for next training
if t != rounds-1:
weights = opt_classifier.get_updated_weights()
cascade.append(opt_classifier)
print( ">>>>> Training complete <<<<<" )
return cascade
def test( _pos_dir : str, _neg_dir : str, cascade, rounds = None ):
print( "##### Test Start #####" )
if rounds:
cascade = cascade[:rounds]
# Get sample images
print( ">>>>> Loading test samples <<<<<" )
imgs, img_shape, n_pos, n_neg = load_samples( _pos_dir, _neg_dir )
print( " total images : ", n_pos + n_neg,
", positive images : ", n_pos, ", negative images : ", n_neg )
tp, tn = 0.0, 0.0
nDrops = 0.0
for img in imgs:
# Test on positive sample image
if img.isPositive:
for wClassifier in cascade:
feature_val = wClassifier.feature.get_feature_val( img.integral )
if wClassifier.isHit(feature_val):
tp += 1
break
else:
nDrops += 1
# Test on negative sample image
if not img.isPositive:
for wClassifier in cascade:
feature_val = wClassifier.feature.get_feature_val( img.integral )
if wClassifier.isHit(feature_val):
tn += 1
break
else:
nDrops += 1
fp, fn = n_pos - tp, n_neg - tn
accuracy = (tp + tn) / (tp + tn + fp + fn)
true_positive_rate = tp / (tp + fp)
false_positive_rate = 1 - true_positive_rate
true_negative_rate = tn / (tn + fn)
false_negative_rate = 1 - true_negative_rate
avg_drops = nDrops / len(imgs)
print( " accuracy of this classifier : ", accuracy )
print( " true positive rate : ", true_positive_rate,
", false positive rate : ", false_positive_rate,
", true negative rate : ", true_negative_rate,
", false negative rate : ", false_negative_rate,
", average drops : ", avg_drops)
print(tp,tn,fp,fn)
return accuracy, ( true_positive_rate, false_positive_rate,
true_negative_rate, false_negative_rate )
'''
def test_one_image( image : png_gray, sClassifier : Strong_Classifier ):
return
'''
def save_cascade( cascade, save_path = None ):
data = {}
for i, classifier in enumerate(cascade):
data[ 'classifier_' + str(i) ] = ({
'pattern' : str(classifier.feature.pattern),
'size' : str(classifier.feature.shape),
'upperleft' : str(classifier.feature.upperleft),
'threshold' : str(classifier.threshold),
'training_error' : str(classifier.error)
})
with open( save_path + 'cascade_data.txt', 'w') as outfile:
json.dump(data, outfile)
return True
def load_samples( _pos_dir : str, _neg_dir : str ):
# Load positive and negative training image filepath
pos_imgs, neg_imgs = load_dataset(_pos_dir,True), load_dataset(_neg_dir,False)
n_pos, n_neg = len(pos_imgs), len(neg_imgs)
imgs = pos_imgs + neg_imgs
# Get image shape
img_shape = np.shape(imgs[0].image)
return imgs, img_shape, n_pos, n_neg
def load_dataset( data_dir : str, isPositive : bool ):
images = []
for file in os.listdir(data_dir):
if file.endswith(".png"):
images.append( png_gray( data_dir + "\\" + file, isPositive) )
return images
|
import cv2
import numpy as np
import imutils
image = cv2.imread("image/picasso.jpg")
cv2.imshow("Orginal",image)
#cv2.waitKey(0)
#İmage Translation
M = np.float32([[1,0,25],[0,1,50]]) #Burda translation matrix'i oluşturuyoruz.
shifted = cv2.warpAffine(image,M,(image.shape[1],image.shape[0])) # Asıl kaydırma burada yapılıyor.
cv2.imshow("Shifted Right and Down",shifted)
#cv2.waitKey(0)
#Image Translation imutils
shifted = imutils.translate(image,25,50)
cv2.imshow("Imutils Shifted Right and Down",shifted)
#cv2.waitKey(0)
#Rotation
(h,w) = image.shape[:2]
center = ((image.shape[1]//2),(image.shape[0]//2))
M = cv2.getRotationMatrix2D(center,45,1.0) #Burada döndürme matrix'ini oluşturduk
rotated = cv2.warpAffine(image,M,(w,h)) #Burada ise döndürme işlemini gerçekleştirdik
cv2.imshow("Rotated",rotated)
#cv2.waitKey(0)
#Rotation Imutils
rotated = imutils.rotate(image,45)
cv2.imshow("Rotated Imutils",rotated)
#cv2.waitKey(0)
#Resize
r = 150.0 / image.shape[1]
dim = (150,int(image.shape[0]*r))
resized = cv2.resize(image,dim,interpolation = cv2.INTER_AREA)
cv2.imshow("Resized Width",resized)
cv2.waitKey(0)
#Flipping
flipped = cv2.flip(image,1)
cv2.imshow("Flipping",flipped)
#cv2.waitKey(0)
#Cropping
cropped = image[30:120,40:130]
cv2.imshow("Cropped",cropped)
#cv2.waitKey(0)
#Image Arithmetic
#cv2 ile toplama işlemi
print("max of 255 : {}".format(cv2.add(np.uint8([200]),np.uint8([100])))) #255'den yüksekse 255 alınıyor düşükse 0 alınıyor.
print("min of 0 : {}".format(cv2.subtract(np.uint8([50]),np.uint8([100]))))
#Numpy ile toplama işlemi
print("Wrap around : {}".format(np.uint8([200])+np.uint8([100]))) #Mod 256 alınıyor.
print("Wrap around : {}".format(np.uint8([50])-np.uint8([100])))
#Image Arithmetic Resim Soluklaştırma
M = np.ones(image.shape,image.dtype) *100
added = cv2.add(image,M)
cv2.imshow("White Matris",M)
cv2.imshow("Added",added)
#cv2.waitKey(0)
M = np.ones(image.shape,image.dtype) *50
subtracted = cv2.subtract(image,M)
cv2.imshow("Subtracted",subtracted)
#cv2.waitKey(0)
rectangle = np.zeros((300,300),dtype = "uint8")
cv2.rectangle(rectangle,(25,25),(275,275),255,-1)
cv2.imshow("Rectangle",rectangle)
cv2.waitKey(0)
circle = np.zeros((300,300),dtype = "uint8")
cv2.circle(circle,(150,150),150,255,-1)
cv2.imshow("Circle",circle)
cv2.waitKey(0)
bitwiseAnd=cv2.bitwise_and(circle,rectangle)
cv2.imshow("Bitwise And",bitwiseAnd)
cv2.waitKey(0)
#Masking
mask = np.zeros(image.shape[:2],dtype = "uint8")
(cX,cY) = ((image.shape[1]//2),(image.shape[0]//2))
cv2.rectangle(mask,(cX-75,cY-75),(cX+75,cY+75),255,-1)
cv2.imshow("Mask",mask)
masked = cv2.bitwise_and(image,image,mask = mask)
cv2.imshow("Mask applied Image",masked)
cv2.waitKey(0)
#Splitting and Merging Channel
(B,G,R) = cv2.split(image)
cv2.imshow("Red",R)
cv2.imshow("Green",G)
cv2.imshow("Blue",B)
cv2.waitKey(0)
merged = cv2.merge([B,G,R])
cv2.imshow("Merged",merged)
cv2.waitKey(0)
zeros = np.zeros(image.shape[:2],image.dtype)
cv2.imshow("Red",cv2.merge([zeros,zeros,R]))
cv2.imshow("Green",cv2.merge([zeros,G,zeros]))
cv2.imshow("Blue",cv2.merge([B,zeros,zeros]))
cv2.waitKey(0)
#Color Spaces
gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)
hsv = cv2.cvtColor(image,cv2.COLOR_BGR2HSV)
lab = cv2.cvtColor(image,cv2.COLOR_BGR2LAB)
cv2.imshow("Gray",gray)
cv2.imshow("hsv",hsv)
cv2.imshow("lab",lab)
cv2.waitKey(0)
|
a={1,1,2,2,3,4}
print(a) #ouput will be {1,2,3,4} since set doesnt have repeated numbers
s={"a",'s','g',"d"}
print(s) |
from .serializers import FrontendSerializer
from frontend.models import Frontend
from rest_framework import viewsets, permissions
class FrontendViewSet(viewsets.ModelViewSet):
serializer_class = FrontendSerializer
permission_classes = [
permissions.IsAuthenticated
]
def get_queryset(self):
return self.request.user.frontend.all()
def perform_create(self, serializer):
serializer.save(owner=self.request.user) |
# Copyright (c) Microsoft. All rights reserved. Licensed under the MIT license.
# See LICENSE in the project root for license information.
import requests
import uuid
import json
from connect.data import get_email_text
# The base URL for the Microsoft Graph API.
graph_api_endpoint = 'https://graph.microsoft.com/v1.0{0}'
def call_sendMail_endpoint(access_token, alias, emailAddress):
# The resource URL for the sendMail action.
send_mail_url = graph_api_endpoint.format('/me/microsoft.graph.sendMail')
# Set request headers.
headers = {
'User-Agent' : 'python_tutorial/1.0',
'Authorization' : 'Bearer {0}'.format(access_token),
'Accept' : 'application/json',
'Content-Type' : 'application/json'
}
# Use these headers to instrument calls. Makes it easier
# to correlate requests and responses in case of problems
# and is a recommended best practice.
request_id = str(uuid.uuid4())
instrumentation = {
'client-request-id' : request_id,
'return-client-request-id' : 'true'
}
headers.update(instrumentation)
# Create the email that is to be sent with API.
email = {
'Message': {
'Subject': 'Welcome to Office 365 development with Python and the Office 365 Connect sample',
'Body': {
'ContentType': 'HTML',
'Content': get_email_text(alias)
},
'ToRecipients': [
{
'EmailAddress': {
'Address': emailAddress
}
}
]
},
'SaveToSentItems': 'true'
}
response = requests.post(url = send_mail_url, headers = headers, data = json.dumps(email), verify=False, params = None)
# Check if the response is 202 (success) or not (failure).
if response.status_code == requests.codes.accepted:
return response.status_code
else:
return "{0}: {1}".format(response.status_code, response.text)
|
import enum
import pathlib
from typing import Any, BinaryIO, Dict, List, Optional, Tuple, Union
from torchdata.datapipes.iter import CSVParser, Demultiplexer, Filter, IterDataPipe, IterKeyZipper, LineReader, Mapper
from torchvision.prototype.datasets.utils import Dataset, EncodedImage, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
getitem,
hint_sharding,
hint_shuffling,
INFINITE_BUFFER_SIZE,
path_comparator,
read_categories_file,
)
from torchvision.prototype.tv_tensors import Label
from .._api import register_dataset, register_info
NAME = "dtd"
class DTDDemux(enum.IntEnum):
SPLIT = 0
JOINT_CATEGORIES = 1
IMAGES = 2
@register_info(NAME)
def _info() -> Dict[str, Any]:
return dict(categories=read_categories_file(NAME))
@register_dataset(NAME)
class DTD(Dataset):
"""DTD Dataset.
homepage="https://www.robots.ox.ac.uk/~vgg/data/dtd/",
"""
def __init__(
self,
root: Union[str, pathlib.Path],
*,
split: str = "train",
fold: int = 1,
skip_validation_check: bool = False,
) -> None:
self._split = self._verify_str_arg(split, "split", {"train", "val", "test"})
if not (1 <= fold <= 10):
raise ValueError(f"The fold parameter should be an integer in [1, 10]. Got {fold}")
self._fold = fold
self._categories = _info()["categories"]
super().__init__(root, skip_integrity_check=skip_validation_check)
def _resources(self) -> List[OnlineResource]:
archive = HttpResource(
"https://www.robots.ox.ac.uk/~vgg/data/dtd/download/dtd-r1.0.1.tar.gz",
sha256="e42855a52a4950a3b59612834602aa253914755c95b0cff9ead6d07395f8e205",
preprocess="decompress",
)
return [archive]
def _classify_archive(self, data: Tuple[str, Any]) -> Optional[int]:
path = pathlib.Path(data[0])
if path.parent.name == "labels":
if path.name == "labels_joint_anno.txt":
return DTDDemux.JOINT_CATEGORIES
return DTDDemux.SPLIT
elif path.parents[1].name == "images":
return DTDDemux.IMAGES
else:
return None
def _image_key_fn(self, data: Tuple[str, Any]) -> str:
path = pathlib.Path(data[0])
# The split files contain hardcoded posix paths for the images, e.g. banded/banded_0001.jpg
return str(path.relative_to(path.parents[1]).as_posix())
def _prepare_sample(self, data: Tuple[Tuple[str, List[str]], Tuple[str, BinaryIO]]) -> Dict[str, Any]:
(_, joint_categories_data), image_data = data
_, *joint_categories = joint_categories_data
path, buffer = image_data
category = pathlib.Path(path).parent.name
return dict(
joint_categories={category for category in joint_categories if category},
label=Label.from_category(category, categories=self._categories),
path=path,
image=EncodedImage.from_file(buffer),
)
def _datapipe(self, resource_dps: List[IterDataPipe]) -> IterDataPipe[Dict[str, Any]]:
archive_dp = resource_dps[0]
splits_dp, joint_categories_dp, images_dp = Demultiplexer(
archive_dp, 3, self._classify_archive, drop_none=True, buffer_size=INFINITE_BUFFER_SIZE
)
splits_dp = Filter(splits_dp, path_comparator("name", f"{self._split}{self._fold}.txt"))
splits_dp = LineReader(splits_dp, decode=True, return_path=False)
splits_dp = hint_shuffling(splits_dp)
splits_dp = hint_sharding(splits_dp)
joint_categories_dp = CSVParser(joint_categories_dp, delimiter=" ")
dp = IterKeyZipper(
splits_dp,
joint_categories_dp,
key_fn=getitem(),
ref_key_fn=getitem(0),
buffer_size=INFINITE_BUFFER_SIZE,
)
dp = IterKeyZipper(
dp,
images_dp,
key_fn=getitem(0),
ref_key_fn=self._image_key_fn,
buffer_size=INFINITE_BUFFER_SIZE,
)
return Mapper(dp, self._prepare_sample)
def _filter_images(self, data: Tuple[str, Any]) -> bool:
return self._classify_archive(data) == DTDDemux.IMAGES
def _generate_categories(self) -> List[str]:
resources = self._resources()
dp = resources[0].load(self._root)
dp = Filter(dp, self._filter_images)
return sorted({pathlib.Path(path).parent.name for path, _ in dp})
def __len__(self) -> int:
return 1_880 # All splits have the same length
|
import pytest
"""
conftest.py 是pytest框架集中管理固件的地方
文件名约定为这个conftest.py不可更改
用例会自动导入并调用这个文件里的固件,不需要显式指定
"""
@pytest.fixture()
def postcode():
return '010'
@pytest.fixture()
def hi():
return 'hi'
def pytest_addoption(parser):
parser.addoption('--host', action='store', default="localhost",
help='host of db')
parser.addoption('--port', action='store', default='8888',
help='port of db')
|
# -*- coding: utf-8 -*-
floor=int(input())
sum=0
for i in range(floor):
num=int(input())
copy=num
sum=0
while num > 0:
r=int(num%10)
sum=sum+r
num=(num-r)/10
print("Sum of all digits of {:} is {:}".format(copy,sum))
# if num ==13h:
|
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gio
from gi.repository.GdkPixbuf import Pixbuf
class workspaceLauncherWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="Workspace Launcher Window")
self.set_border_width(10)
hb = Gtk.HeaderBar(title="Workspace Launcher")
self.connect("destroy", Gtk.main_quit)
hbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.add(hbox)
listbox = Gtk.ListBox()
listbox.add(Gtk.Label(
"Select a directory as workspcare: NTBSG uses the workspace\n directory to store "
"sessions."))
listbox.add(self.workspaceDir())
listbox.add(self.destFolderName())
listbox.add(self.destFolderPath())
listbox.add(self.bottomBttn())
hbox.pack_start(listbox, False, True, 0)
def workspaceDir(self):
row = Gtk.ListBox()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=10)
row.add(hbox)
vbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=28)
hbox.pack_start(vbox, False, True, 0)
label2 = Gtk.Label()
label2.set_markup("Workspace")
vbox.pack_start(label2, False, True, 0)
entry1 = Gtk.Entry()
entry1.set_text('Workspace Directory Path')
vbox.pack_start(entry1, False, True, 0)
browse1 = Gtk.Button.new_with_label("Browse")
vbox.pack_start(browse1, False, True, 0)
return row
def destFolderName(self):
row = Gtk.ListBox()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=10)
row.add(hbox)
vbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=15)
hbox.pack_start(vbox, False, True, 0)
label2 = Gtk.Label()
label2.set_markup("Destination\nFolder Name")
vbox.pack_start(label2, False, True, 0)
entry1 = Gtk.Entry()
entry1.set_text('Destination Folder Name')
vbox.pack_start(entry1, False, True, 0)
return row
def destFolderPath(self):
row = Gtk.ListBox()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=10)
row.add(hbox)
vbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=25)
hbox.pack_start(vbox, False, True, 0)
label2 = Gtk.Label()
label2.set_markup("Destination\nFolder Path")
vbox.pack_start(label2, False, True, 0)
entry1 = Gtk.Entry()
entry1.set_text('Destination Folder Path')
vbox.pack_start(entry1, False, True, 0)
browse1 = Gtk.Button.new_with_label("Browse")
vbox.pack_start(browse1, False, True, 0)
return row
def bottomBttn(self):
row = Gtk.ListBoxRow()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=10)
row.add(hbox)
btn = Gtk.Button.new_with_label("Launch")
hbox.pack_start(btn, True, True, 0)
btn = Gtk.Button.new_with_label("Cancel")
hbox.pack_start(btn, True, True, 0)
return row
window = workspaceLauncherWindow()
window.show_all()
Gtk.main()
|
#-*- coding: utf-8-*-
import xlrd, codecs, json
from lxml import etree
from collections import OrderedDict
def xls2xml(filename, outfile):
with xlrd.open_workbook(filename) as excel:
#table = excel.sheet_by_name('student')
table = excel.sheet_by_index(0)
data = OrderedDict()
for i in range(table.nrows):
key = str(int(table.row_values(i)[0]))
value = str(table.row_values(i)[1:])
data[key] = value
output = codecs.open(outfile, 'w', 'utf-8')
root = etree.Element('root')
students_xml = etree.ElementTree(root)
students = etree.SubElement(root, 'students')
students.append(etree.Comment('\n\t学生信息表\n\t"d" :[名字, 数学, 语文, 英语]\n'))
students.text = '\n\t学生信息表\n\t"d" :[名字, 数学, 语文, 英语]\n'
students.text = '\n'+str(json.dumps(data, indent=4, ensure_ascii=False))+'\n'
output.write('<?xml version="1.0" encoding="UTF-8"?>\n' + etree.tounicode(students_xml.getroot()))
output.close()
if __name__ == '__main__':
xls2xml('student.xls', 'student.xml')
|
import os
import sys
if not __package__:
path = os.path.join(os.path.dirname(__file__), os.pardir)
sys.path.insert(0, path)
import wpcmd
wpcmd.main()
|
'''
building an exotic pet class
'''
names = ['Roger', 'Stuart', 'Rossetta', 'Karla', 'Cep']
colors = ['red', 'yellow', 'green', 'blue', 'purple', 'rainbow']
species = ['fish', 'snake', 'bird', 'frog', 'whale', 'dog']
# create animal class
class ExoticAnimals:
# instantiate class attributes
'''
height is an integer value
weight is an integer value
legs is an integer value
mammals is a boolean value
'''
def __init__(self, height, weight, color, legs, species, mammal=True):
self.height = int(height)
self.weight = int(weight)
self.color = color
self.legs = int(legs)
self.species = species
self.mammal = bool(mammal)
# create method
def move(self, distance, direction):
return f'{self.name} travels {distance} meters in {distance} direction!'
# create method
def eat(self, food):
return f'Yum, Yum! I love {food}!'
# inheritance class
class Anaconda(ExoticAnimals):
def __init__(self, height, weight, color, legs=0, species=snake, mammal=False):
super().__init__(self, height, weight, color, legs, species, mammal)
# create inherited method
def move(self, distance, direction):
return f'{self.name} slithers {distance} meters in {distance} direction!'
# note that the second parent method is available to the child class
# create an animal
def create_animal():
print('Hello! Please enter the following to create your animal: ')
n = input("What is your animal's name?")
h = input("What is your animal's height?")
w = input("What is your animal's weight?")
c = input("What is your animal's color?")
l = input("What is your animal's number of legs?")
s = input("What is your animal's species?")
m = input("Is your animal a mammal? True or False")
dist = input("How far will your animal travel?")
dirc = input("In which direction will your animal travel?")
user_animal = ExoticAnimal(name=n, weight=w, color=c, legs=l, species=s, mammal=m)
print(user_animal.move(dist, dirc))
# create a function that creates lots of animals
def lots_of_animals(n):
a_name = random.choice(names)
a_height = random.randint(5, 501)
a_weight = random.randint(2, 2001)
a_color = random.choice(colors)
a_legs = random.randint(0, 5)
a_species =random.choice(species_names)
if a_species == 'dog' or a_species == 'whale':
a_mammal = True
else:
a_mammal = False
a_animal = ExoticAnimals(a_name, a_height, a_weight, a_color, a_legs, a_legs, a_species)
animals.append(a_animal)
return animals
|
import boto3
import urllib
import traceback
import json
import os
DEBUG = True
def debug_log(s, comment=""):
if DEBUG:
print comment
print s
def handler(event, context):
try:
debug_log(json.dumps(event, indent=2), "==== Function started")
## Get env variables for collection id and dynamo_db
collection_id = os.environ['COLLECTION_ID']
users_table = os.environ['USERS_TABLE']
## Create AWS clients for services
rekognition = boto3.client('rekognition')
dynamodb = boto3.resource('dynamodb')
ddb_users_table = dynamodb.Table(users_table)
## Search through collection
response = rekognition.search_faces_by_image(
CollectionId = collection_id,
Image={ 'S3Object': {
'Bucket': event['bucket'],
'Name': event['key']} },
MaxFaces=5,
FaceMatchThreshold=75)
debug_log(json.dumps(response, indent=2), "==== Rekognition responce")
## Find all unique user's id
user_ids = set(face['Face']['ExternalImageId'] for face in response["FaceMatches"])
## If one and only one id found
if len(user_ids) == 1:
user_id = user_ids.pop()
info = ddb_users_table.get_item(
Key = {'id': user_id.replace(':at:', '@') },
ConsistentRead=False)
return info['Item']
except Exception as e:
# If any other exceptions which we didn't expect are raised
# then fail the job and log the exception message.
debug_log('Function failed due to exception.' + str(e))
traceback.print_exc()
debug_log('==== Function complete.')
return "Not found."
|
import sys
N = ['riya riya@gmail.com',
'julia julia@julia.me',
'julia sjulia@gmail.com',
'julia julia@gmail.com',
'samantha samantha@gmail.com',
'tanya tanya@gmail.com']
names = []
for a0 in N:
firstName, emailID = a0.split(' ')
firstName, emailID = [str(firstName), str(emailID)]
if emailID.endswith('gmail.com'):
names.append(firstName)
print(*sorted(names), sep='\n')
|
"""
Create a turtle and make it do some random moves, with random
direction, random color, random distance at random speeds.
"""
import turtle
import random
t_screen = turtle.Screen()
sul = turtle.Turtle()
color_lst = ["red","blue","yellow","green","black"]
for i in range(0,50):
t_speed = random.randint(0,100)
t_distance = random.randint(0,5)
t_angle = random.randint(0,360)
t_direction = random.randint(0,1)
my_pen_color = random.choice(color_lst)
if t_direction == 0:
sul.right(t_angle)
elif t_direction ==1:
sul.left(t_angle)
sul.speed(t_speed)
sul.pencolor(my_pen_color)
sul.forward(t_distance)
#turtle.mainloop()
t_screen.exitonclick()
|
import pygame, caracter, math
class Player(caracter.Caracter):
volume=1
frame={}
sound = {}
def __init__(self, position=(0, 0)):
# maybe u can make the rect_attack evry time o attack
super(Player, self).__init__(position)
self.rect = Player.frame[self.frame_on][self.frame_index].get_rect(topleft=position)
self.attack_on = False
self.attack_combo = 1
self.attack_next = False
self.hit_box = pygame.Rect(position[0], position[1], 14 * 3, 40 * 3)
self.reload=0
self.hp=850
self.hp_max = 850
self.num_jumps = 0
self.death=False
self.test_mode= False
self.block=0
self.roll_on=False
self.grab_on = True
self.attack_range = pygame.Rect(10, 10, 100, 100)
self.hurt=0
self.enemy=None
# \ self.rect.width -= 13
return
# This funtion is to controle the player movement, changes the side (for the img) , the atribute movement is list where [x_movement , y_movement]
def controle(self, up, down, left, right, jump, k1, k2,k3):
if self.hurt!=0:
self.hurt-=1
#-------------------------------------------------------
if up:
jump=True
#----------------------------------------------------------
# animation
if self.animation():
if self.roll_on:
self.hit_box.y -= 20 * 3
self.hit_box.height = 40 * 3
elif self.grab_on:
self.frame_index=4
self.attack_on = False
self.roll_on= False
if self.hp==0 and self.frame_on != "death":
self.frame_on = "death"
self.frame_index = 0
#Player.sound[self.frame_on].play()
elif ((self.attack_next == True and self.attack_on == False) or (self.attack_on != False and (
self.frame_on != "attack1" and self.frame_on != "attack2" and self.frame_on != "attack3") ))and self.hp!=0 and self.roll_on==False and self.grab_on==False:
if self.attack_next == True and self.attack_on == False:
if right and not left:
self.side_left = False
elif not right and left:
self.side_left = True
if (self.attack_next == True) and self.attack_on == False:
self.attack_next = False
self.attack_on = True
self.attack_combo += 1
if self.attack_combo == 4:
self.attack_combo = 1
else:
self.attack_combo = 1
self.frame_on = "attack" + str(self.attack_combo)
self.frame_index = 0
self.play_sound()
#---------------------------------------------------------------------------------------------
self.reload = 12
#---------------------------------------------------------------------------------------------
elif self.frame_on!="grab" and self.grab_on :
self.frame_on = "grab"
self.frame_index = 0
elif self.roll_on== True and self.frame_on!="roll" and self.attack_on ==False and self.frame_on!="death"and not self.grab_on :
print( self.frame_on)
self.frame_on = "roll"
self.frame_index = 0
elif self.movement[1] < 0 and self.frame_on != "jump" and self.attack_on == False and self.frame_on != "death" and self.roll_on==False and self.block==0:
self.frame_on = "jump"
self.frame_index = 0
self.play_sound()
elif self.movement[1] > 0 and self.frame_on != "fall" and self.attack_on == False and self.frame_on != "death" and self.roll_on==False and self.block==0:
self.frame_on = "fall"
self.frame_index = 0
elif k2 and self.frame_on!="block idli" and self.attack_on == False and self.frame_on != "death" and self.roll_on==False and self.grab_on==False:
self.frame_on = "block idli"
self.frame_index = 0
#elif k3 and self.frame_on!="roll" and self.attack_on == False and self.frame_on != "death":
# self.frame_on = "roll"
# self.frame_index = 0
elif ((left and not right) or (not left and right)) and self.frame_on != "run" and self.num_jumps == 0 and self.attack_on == False and self.movement [1]==0 and self.frame_on != "death" and self.block==0 and self.roll_on==False and not self.grab_on:#and self.hit_box.height ==27 * 3:
self.frame_on = "run"
self.frame_index = 0
self.play_sound()
elif self.movement[0] == 0 and self.movement[1] == 0 and self.frame_on != "idle" and self.num_jumps == 0 and self.attack_on == False and self.frame_on != "death" and self.block==0 and self.roll_on==False and not self.grab_on:#and self.hit_box.height ==27 * 3
self.frame_on = "idle"
self.frame_index = 0
# if self.attack_next == 1 :
# if self.attack_on==1 :
# self.attack_end = 0
# self.attack_on=0
# self.img_on = "attack"+str(self.attack_combo)
# self.attack_combo=+1
# if self.attack_combo>3:
# self.attack_combo=1
# if u take the "and down ==False something happends"
if jump and self.num_jumps < 2 and down == False and self.roll_on==False and self.hit_box.height!=2*3:
self.grab_on=False
self.movement[1] = -17
self.num_jumps += 1
# if down and self.movement[1] == 0 and self.num_jumps == 0 and self.hit_box.height ==27 * 3:
# self.hit_box.height = self.hit_box.height / 2
# self.hit_box.y += self.rect.height / 2
# elif not down and self.hit_box.height !=27 * 3:
# self.hit_box.y-=self.hit_box.height
# self.hit_box.height =27 * 3
# Side movement
if right and left:
if self.movement[0] != 0:
self.movement[0] = int(self.movement[0] * 0.7)
elif right and left == False and self.attack_on == False and self.frame_on!="die" and self.block==0 and self.roll_on==False:# and self.hit_box.height ==27 * 3:
if self.side_left and self.grab_on:
self.grab_on=False
self.side_left = False
if k2==False:
self.movement[0] += 3
if self.movement[0] >= self.run_speed:
self.movement[0] = self.run_speed
elif right == False and left and self.attack_on == False and self.block==0 and self.roll_on==False:# and self.hit_box.height ==27 * 3:
if not self.side_left and self.grab_on:
self.grab_on=False
self.side_left = True
if k2==False:
self.movement[0] -= 3
if self.movement[0] <= -self.run_speed:
self.movement[0] = -self.run_speed
else:
if self.movement!=0 and self.roll_on==False:
self.movement[0] = int (self.movement[0]*0.7)
if self.roll_on and self.attack_on==False:
if self.side_left:
self.movement[0] -= 3
if self.movement[0] <= -self.run_speed *1.5:
self.movement[0] = -self.run_speed*1.5
else:
self.movement[0] += 3
if self.movement[0] >= self.run_speed*1.5:
self.movement[0] = self.run_speed*1.5
if k3 and not self.attack_on and not self.roll_on and self.block==0 and not self.grab_on:
self.hit_box.y+=38*3
self.hit_box.height = 2 * 3
self.roll_on = True
# attack
if k1 :
self.attack_next = True
if self.attack_next == True and self.attack_on == False:
self.attack_next = False
self.attack_on = True
if k2 and not self.grab_on:
self.block+=1
else:
self.block=0
if self.side_left:
self.attack_range.midright = self.hit_box.midleft
else:
self.attack_range.midleft = self.hit_box.midright
return
def attack(self, hit_box,screen=None,camera=[0,0]):
if self.attack_on :
self.reload-=1
if self.reload==0:
attack_range=self.attack_range
# I need to know in what side they got hit on
if hit_box.colliderect(attack_range):
print("Hello")
return True
else :
return False
def draw(self, screen, camera=(0, 0)):
pygame.draw.line(screen,(150,150,150),(self.hit_box.centerx-camera[0],self.hit_box.centery-camera[1]),(self.enemy.hit_box.centerx-camera[0],self.enemy.hit_box.centery-camera[1]),3)
if self.roll_on:
self.rect.center=(self.hit_box.centerx,self.hit_box.centery-66)
else:
self.rect.center = (self.hit_box.centerx, self.hit_box.centery-15)
if self.grab_on:
screen.blit(
pygame.transform.flip(Player.frame[self.frame_on][int(self.frame_index)], self.side_left, False),
(self.rect.x - camera[0], self.rect.y - camera[1]+13))
else:
screen.blit(pygame.transform.flip(Player.frame[self.frame_on][int(self.frame_index)], self.side_left, False),(self.rect.x - camera[0], self.rect.y - camera[1]))
if self.test_mode:
rect=pygame.Surface(self.hit_box.size).convert_alpha()
rect.fill((200,0,0,100))
screen.blit(rect, (self.hit_box.x - camera[0], self.hit_box.y - camera[1]))
if self.attack_on and self.reload==0:
attack_range_surface = pygame.Surface(self.attack_range.size).convert_alpha()
attack_range_surface.fill((250, 150, 71, 100))
#REDUNDANTE
#---------------------------------------------------------
if self.side_left:
self.attack_range.midright = self.hit_box.midleft
else:
self.attack_range.midleft = self.hit_box.midright
#---------------------------------------------------------
screen.blit(attack_range_surface,(self.attack_range.x - camera[0], self.attack_range.y - camera[1]))
def animation(self):
self.frame_index += 13*(1/40)
if len(Player.frame[self.frame_on])<= int(self.frame_index):
self.frame_index = 0
if self.frame_on=="death" and self.hp==0:
self.frame_index=9
return True
return False
def play_sound(self):
try:
Player.sound[self.frame_on].play()
except:
pass
def damage(self,damage_points):
if self.block>0 and self.block<15:
self.play_sound()
elif self.block>15 or self.hurt==0:
self.hurt=70
super(Player, self).damage(int(damage_points/2))
self.movement[1] = -4
else:
self.hurt = 70
super(Player, self).damage(damage_points)
self.movement[1] = -4
def move(self, barreiras):
# Mudar depois, pq se colidir em duas barreiras do mesmo lado, por exemplo a direita e detectar primeiro a menos aa direita a posisao sera atualizada para a barreiramais a direita, assim atravesando a menos a direita!!!
# gravidade talvez fique no controle
self.gravity()
# mover no y
if self.grab_on:
self.movement[1]=0
self.hit_box.y += self.movement[1]
# lista das barreiras que colidio
collisinons = self.move_collision(barreiras)
for barreira in collisinons:
if self.movement[1] < 0:
self.hit_box.top = barreira.rect.bottom
self.movement[1] = 0
if self.movement[1] > 0:
self.hit_box.bottom = barreira.rect.top
self.movement[1] = 0
self.num_jumps = 0
# mover no x
self.hit_box.x += self.movement[0]
# lista das barreiras que colidio
collisinons = self.move_collision(barreiras)
for barreira in collisinons:
if self.movement[0] < 0:
#--------------------------------------------------------------------------------------------------
if self.grab_on==False and(self.frame_on=="fall" or self.frame_on=="jump") and self.hit_box.top-barreira.rect.top<6 and self.hit_box.top-barreira.rect.top>-2 and (barreira.index==10 or barreira.index==4 or barreira.index==15):
self.grab_on=True
self.num_jumps=0
self.hit_box.topleft = barreira.rect.topright
else:
self.hit_box.left = barreira.rect.right
elif self.movement[0] > 0:
if self.grab_on==False and(self.frame_on=="fall" or self.frame_on=="jump") and self.hit_box.top-barreira.rect.top<6 and self.hit_box.top-barreira.rect.top>-2 and (barreira.index==10 or barreira.index==4 or barreira.index==15):
self.grab_on=True
self.num_jumps = 0
self.hit_box.topright = barreira.rect.topleft
else:
self.hit_box.right = barreira.rect.left
self.rect.center = self.hit_box.center
return
def move_collision(self, barreiras):
collision = []
for barreira in barreiras:
if self.hit_box.colliderect(barreira.rect):
collision.append(barreira)
return collision
def closer_enemy(self,enemy_list):
enemy=enemy_list[0]
distance=pow(pow(enemy.hit_box.centerx-self.hit_box.centerx,2)+pow(enemy.hit_box.centery-self.hit_box.centery,2),0.5)
for e in enemy_list:
e_distance=pow(pow(e.hit_box.centerx - self.hit_box.centerx, 2) + pow(e.hit_box.centery - self.hit_box.centery,2), 0.5)
if e_distance <distance:
distance =e_distance
enemy=e
return enemy
|
from django.http import HttpRequest
from django.test import TestCase, RequestFactory
from django.contrib.auth.models import User
from registration.views import registration, register_complete
from registration.forms import RegistrationForm
class ViewsTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
def test_registration_view_basic(self):
"""
Test that registration view returns a 200 response and uses corect
template
"""
request = self.factory.get('/registration/registration/')
with self.assertTemplateUsed('registration/registration.html'):
response = registration(request)
self.assertEqual(response.status_code, 200)
def test_registration_view_returns_registration_form(self):
"""
Test that registration view returns html form
"""
response = self.client.get('/registration/registration/')
self.assertIs(type(response.context['form']), RegistrationForm)
def test_registration_view_can_save_post_request(self):
"""
Test that registration view can save a POST request
"""
request = HttpRequest()
request.method = 'POST'
request.POST['user'] = User.objects.create_user(username='fisherman-bob', password='BoBfish23')
response = registration(request)
self.assertEqual(User.objects.count(), 1)
user = User.objects.first()
self.assertEqual(user.username, 'fisherman-bob')
self.assertEqual(response.status_code, 200)
def test_registration_view_saves_user_only_if_user_is_valid(self):
"""
Test that user can be save only if user data is valid
"""
request = HttpRequest()
registration(request)
self.assertEqual(User.objects.count(), 0)
#def test_registration_view_redirects_after_saving_user(self):
"""
Test that registration view redirects to confirmation
page after saving user
"""
#request = HttpRequest()
#response = registration(request)
def test_register_complete_view_basic(self):
"""
Test that register_complete view returns a 200 responce
and uses correct template
"""
request = self.factory.get('/registration/register-complete/')
with self.assertTemplateUsed('registration/register_complete.html'):
response = register_complete(request)
self.assertEqual(response.status_code, 200)
|
import pygame, sys, random, time
from pygame.locals import *
from random import *
from math import *
try:
import android
except ImportError:
android = None
try:
import pygame.mixer as mixer
except ImportError:
import android.mixer as mixer
# Event constant.
TIMEREVENT = pygame.USEREVENT
# The FPS the game runs at.
FPS = 40
# Screen constants
SCREEN_WIDTH, SCREEN_HEIGHT = 480, 800
SCREENRECT = Rect(0, 0, 480, 800)
def load_image(file_name):
'''load images with exception handling'''
try:
image = pygame.image.load(file_name)
except pygame.error, message:
print "Cannot open image: " + file_name
raise SystemExit, message
return image.convert_alpha()
class Background (pygame.sprite.Sprite):
def __init__(self, screen, speed, image_file):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.scrolling = False
self.image = load_image(image_file)
self.rect = self.image.get_rect()
self.image_w, self.image_h = self.image.get_size()
self.dy = speed
self.rect.bottom = SCREEN_HEIGHT
def update(self, score):
if self.scrolling == True:
self.rect.bottom += self.dy
if self.rect.top >= 0:
self.rect.bottom = SCREEN_HEIGHT
if self.dy < 0:
if self.rect.bottom <= SCREEN_HEIGHT:
self.rect.top = 0
#if self.rect.top > 0:
def draw(self):
self.screen.blit(self.image, self.rect)
class Balloon (pygame.sprite.Sprite):
def __init__(self, screen, init_x, init_y, dx, dy, image_file, lives):
'''constructor for balloon class, initialize the balloon'''
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.x = init_x
self.y = init_y
self.dx = dx
self.dy = dy
self.image = pygame.transform.scale (load_image(image_file), (132, 200))
self.rect = self.image.get_rect()
self.rect.topleft = (self.x, self.y)
self.image_w, self.image_h = self.image.get_size()
self.lives = lives
def update(self, lives, img1, img2, img3, justcollided):
'''update the balloon and check to make sure it hasn't moved off the screen'''
self.lives = lives
if justcollided <= 0:
if lives > 7:
self.image = img1
if lives <= 7 and lives >3:
self.image = img2
if lives <= 3:
self.image = img3
if ((self.x + self.dx) <= 0):
self.dx = self.dx * -1
if ((self.x + self.dx) >= self.screen.get_size()[0]):
self.dx = self.dx * -1
if ((self.y + self.dy) <= 0):
self.dy = self.dy * -1
if ((self.y + self.dy) >= self.screen.get_size()[1]):
self.dy = self.dy * -1
self.x = self.x + self.dx
self.y = self.y + self.dy
self.rect.topleft = (self.x, self.y)
def draw(self):
'''draw the balloon in its current position'''
self.screen.blit(self.image, self.rect)
class Enemy(pygame.sprite.Sprite):
def __init__(self, screen, init_x, init_y, dx, dy, image_file, (height, width), numrows, numcols):
'''initialize the enemy sprite'''
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.x = init_x
self.y = init_y
if self.x >= SCREEN_WIDTH / 2:
dx = dx * -1
self.dx = dx
self.dy = dy
self.juststarted = True
self.sheet = load_image(image_file)
self.frame = []
self.frame_index = 0
# Get the image's width and height
self.image_w, self.image_h = self.sheet.get_size()[0] / numrows, self.sheet.get_size()[1] / numcols
for i in range(numrows * numcols):
self.frame.append(pygame.transform.scale(load_image(image_file), (height, width)))
# Load each frame as a subsurface in animation
for j in range(numcols):
for i in range(numrows):
self.frame.append(pygame.transform.scale(self.sheet.subsurface(Rect(i * self.image_w, j, self.image_w, self.image_h)), (height, width)))
self.image = self.frame[self.frame_index]
self.rect = self.frame[self.frame_index].get_rect()
self.rect.topleft = (self.x, self.y)
self.active = True
def update(self):
'''move and update the sprite'''
if (self.rect.left < 0 - 3*self.image_w) or (self.rect.right > SCREEN_WIDTH + 3*self.image_w) or (self.rect.top < 0 - 3*self.image_h) or (self.rect.bottom > SCREEN_HEIGHT + 3* self.image_h):
self.kill()
self.active = False
self.x += self.dx
self.y += self.dy
self.frame_index = (self.frame_index + 1) % len(self.frame)
self.image = self.frame[self.frame_index]
self.rect.topleft = (self.x, self.y)
def update2(self, score, SCREEN_SIZE):
'''special move and update for the city'''
# if (self.rect.left < 0 - 3*self.image_w) or (self.rect.right > SCREEN_WIDTH + 3*self.image_w) or (self.rect.top < 0 - 3*self.image_h) or (self.rect.bottom > SCREEN_HEIGHT + 3* self.image_h):
# self.kill()
# self.active = False
if score <= 2:
if self.y <= SCREEN_SIZE - 600:
self.dy = -5
else:
self.dy = -5
else :
if self.y >= SCREEN_SIZE -200 :
self.dy = 0
else:
self.dy = 3
if self.juststarted:
self.dy = 3
if score > 3:
self.juststarted = False
self.x += 0
self.y += self.dy
self.rect.topleft = (self.x, self.y)
def draw(self):
'''if the enemy is active, draw it to the screen'''
if self.active:
self.screen.blit(self.image, self.rect)
def game(screen):
'''main function that runs the game'''
pygame.init()
window = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
pygame.display.set_caption('Sky High')
screen = pygame.display.get_surface()
clock = pygame.time.Clock()
scroll_speed = 3
sky_image = "assets/sky.gif"
balloon0 = pygame.transform.scale (load_image('assets/balloon.png'), (132, 200))
balloon1 = pygame.transform.scale (load_image('assets/balloon1.png'), (132, 200))
balloon2 = pygame.transform.scale (load_image('assets/balloon2.png'), (132, 200))
balloonflashing = pygame.transform.scale (load_image('assets/balloonflash.png'), (132, 200))
font = pygame.font.Font("BRLNSDB.TTF", 30)
try:
mixer.music.load("assets/Scores.ogg")
hit = mixer.Sound("assets/hit.ogg")
star = mixer.Sound("assets/star.ogg")
mixer.music.play(-1)
except pygame.error:
print "Couldn't find file."
balloon_speed = 6
moveRate = 2
score = 0
sky = Background(screen, scroll_speed, sky_image)
# Map the back button to the escape key.
if android:
android.init()
android.map_key(android.KEYCODE_BACK, pygame.K_ESCAPE)
android.accelerometer_enable(True)
# Use a timer to control FPS.
pygame.time.set_timer(TIMEREVENT, 1000)
pygame.time.set_timer(USEREVENT + 1, 3000)
pygame.time.set_timer(USEREVENT + 2, 2000)
pygame.time.set_timer(USEREVENT + 3, 4000)
pygame.time.set_timer(USEREVENT + 4, 5000)
pygame.time.set_timer(USEREVENT + 5, 8000)
pygame.key.set_repeat(FPS, FPS) # set key repeat on
lives = 10
balloon = Balloon(screen, SCREEN_WIDTH / 2 - 50, SCREEN_HEIGHT, 0, balloon_speed, "assets/balloon.png", lives)
city = Enemy(screen,0, SCREEN_HEIGHT-800, 0, -3, "assets/cityskyline.png",(800,480),1,1)
city.image = load_image("assets/cityskyline.png")
city.rect = city.image.get_rect()
airplanes = pygame.sprite.Group()
birds = pygame.sprite.Group()
missiles = pygame.sprite.Group()
powerups = pygame.sprite.Group()
spawn_pt = range(-200, -100) + range(SCREEN_WIDTH, SCREEN_WIDTH + 100)
elapsed_time = 0
timer = 0
justcollided = 0
imagechanged = False
while True:
#game loop
time_passed = clock.tick(FPS)
elapsed_time += 1
text = font.render("Height: " + str(score), 1, (120, 40, 80))
#render score
lives_txt = font.render("Balloon Strength: " + str(lives)+ "/10" , 1, (85, 0, 50))
timer -= 1
justcollided -= 1
if android:
balloon_move = android.accelerometer_reading()
if balloon.x >= 0 and balloon.x <= SCREEN_WIDTH - balloon.image_w:
balloon.x = balloon.x - (balloon_move[0] * moveRate)
elif balloon.x <= 0:
balloon.x += 1
else:
balloon.x -= 1
if balloon.rect.bottom <= SCREEN_HEIGHT and balloon.y >= (SCREEN_HEIGHT - balloon.image_h)/3:
balloon.y = balloon.y + ((balloon_move[1] - 5) * moveRate)
elif balloon.rect.bottom >= SCREEN_HEIGHT:
balloon.y -= 1
else:
balloon.y += 1
if android.check_pause():
android.wait_for_resume()
#Randomly choose a spawn point from the list
init_x = choice(spawn_pt)
if init_x < SCREEN_WIDTH/2:
enemy_image = "assets/plane-right.gif"
else:
enemy_image = "assets/plane-left.gif"
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
elif event.key == pygame.K_RIGHT:
if balloon.x <= SCREEN_WIDTH - balloon.image_w:
balloon.x += balloon_speed
elif event.key == pygame.K_LEFT:
if balloon.x >= 0:
balloon.x -= balloon_speed
elif event.key == pygame.K_DOWN:
if balloon.y <= SCREEN_HEIGHT - balloon.image_h:
balloon.y += balloon_speed
elif event.key == pygame.K_UP:
if balloon.y >= 0:
balloon.y -= balloon_speed
elif event.type == TIMEREVENT:
score += 1
elif event.type == USEREVENT + 1 and score>=2 and score<=20:
airplanes.add(Enemy(screen, init_x, randint(-50, 200), randint(1, 5), 3, enemy_image, (100, 50), 1, 1))
elif event.type == USEREVENT + 2 and score>=50:
airplanes.add(Enemy(screen, init_x, randint(-50, 200), randint(1, 5), randint(3, 5), enemy_image, (100, 50), 1, 1))
elif event.type == USEREVENT + 3 and score>=5:
birds.add(Enemy(screen, init_x, randint(-50, SCREEN_HEIGHT + 50), randint(2,4), 0, "assets/UFO.png", (100, 80), 1, 1))
if score >=20 and score<40:
missiles.add(Enemy(screen, randint(0, SCREEN_WIDTH), SCREEN_HEIGHT, 0, randint(-8, -3), "assets/missile.png", (40, 150), 1, 1))
elif event.type == USEREVENT + 4 and score>=50:
missiles.add(Enemy(screen, randint(0, SCREEN_WIDTH), SCREEN_HEIGHT, 0, randint(-8, -3), "assets/missile.png", (40, 150), 1, 1))
elif event.type == USEREVENT + 5 and score>=30:
powerups.add(Enemy(screen, randint(100, SCREEN_WIDTH-100), 0, 0, 3, "assets/gold-star.gif", (60, 60), 1, 1))
if timer <= 20 and timer >= 0:
sky.dy = 6
elif timer > 45:
sky.dy = -6
else:
sky.dy = 3
sky.update(score)
sky.draw()
city.update2(score,SCREEN_HEIGHT)
city.draw()
balloon.update(lives, balloon0, balloon1, balloon2, justcollided)
balloon.draw()
if balloon.y <= SCREEN_HEIGHT / 3:
balloon.dy = 0
sky.scrolling = True
if justcollided <= 0:
balloon.update(lives, balloon0, balloon1, balloon2, justcollided)
for enemy in airplanes:
if pygame.sprite.collide_mask(enemy, balloon):
# ADD GAME OVER SCREEN HERE
if android:
android.vibrate(0.3)
if lives <= 0:
return score
enemy.dy = 20
timer = 80
if score >= 10:
score -= 10
else:
score = 1
justcollided = 20
lives -= 1
hit.play()
for bird in birds:
if bird.dy != 20:
bird.dy = 6*cos(0.1*elapsed_time) + 1
if pygame.sprite.collide_mask(bird, balloon):
# ADD GAME OVER SCREEN HERE
if android:
android.vibrate(0.3)
if lives <= 0:
return score
bird.dy = 20
timer = 70
if score >= 5:
score -= 5
else:
score = 1
justcollided = 20
lives -= 1
hit.play()
for missile in missiles:
if pygame.sprite.collide_mask(missile, balloon):
# ADD GAME OVER SCREEN HERE
if android:
android.vibrate(0.1)
missile.dy = 20
if lives <= 0:
return score
timer = 80
if score >= 15:
score -= 15
else:
score = 1
justcollided = 20
lives -= 1
hit.play()
for powerup in powerups:
if pygame.sprite.collide_mask(powerup, balloon):
timer = 25
powerup.kill()
score += 10
star.play()
else:
balloon.image = balloonflashing
imagechanged = True
airplanes.update()
airplanes.draw(screen)
birds.update()
birds.draw(screen)
missiles.update()
missiles.draw(screen)
powerups.update()
powerups.draw(screen)
screen.blit(text, (0, SCREEN_HEIGHT - 30))
screen.blit(lives_txt, (0, 0))
pygame.display.flip()
if __name__ == "__main__":
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT), 0, 0)
game(screen)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#============================= No-ip Updater ==============================
#
# Description: script to update you external IP address in the DDNS service
# no-ip.com
# Author: Hector Gonzalez
# Last modified: 17/07/2013
# Version: 0.6
#
#============================================================================
# Uncomment if you don't want to manually install the requests library (not tested)
#import sys
#sys.path.append("Lib/kennethreitz-requests-2b62980")
import requests
import logging
from requests.auth import HTTPBasicAuth
class NOIP:
USERNAME = '' # insert your noip username, e.g. myuser%40domain.com
PASSWORD = '' # insert your password
HOSTNAME = '' # insert your noip hostname, e.g. myuser.no-ip.org'
WHATISMYIP = 'http://icanhazip.com' # look for your external IP address
IPFILE = 'myIP.txt' # where to store your current IP address
USER_AGENT = 'Python Client Updater/0.6 myusername@domain.com'
UPDATE_URL = 'https://dynupdate.no-ip.com/nic/update'
#init
def __init__(self):
try:
f = open(self.IPFILE, 'r')
# if IPFILE does not exist, create it
except IOError:
f = open(self.IPFILE, 'w')
f.close()
#getIP returns your external IP address
def getIP(self):
r = requests.get(self.WHATISMYIP)
return r.text.rsplit()[0]
# newIP looks for changes on your current IP address
def newIP(self, IP):
f = open(self.IPFILE, 'r')
storedIP = f.readline()
f.close()
return True if storedIP != IP else False
# refresh your external IP address in your no-ip account
def updateIP(self, IP):
#Build the update IP request as per no-ip instructions
#example -> http://username:password@dynupdate.no-ip.com/nic/update?hostname=mytest.testdomain.com&myip=1.2.3.4
payload = {'hostname': self.HOSTNAME, 'myip':IP} #extra parameters
#When making an update it is important that your http request include an HTTP User-Agent
#to help No-IP identify different clients that access the system.
#Clients that do not supply a User-Agent risk being blocked from the system.
uagent = { 'user-agent': self.USER_AGENT}
#execute the GET request
r = requests.get(self.UPDATE_URL, headers=uagent, params=payload, auth=HTTPBasicAuth(self.USERNAME,self.PASSWORD))
# Uncomment to show on console the final URL (debug only)
#print r.url
# Uncomment to show on console the HTTP response code 200,401,etc. (debug only)
#print r.status_code
# Uncomment to show on console the headers of the HTTP response (debug only)
#print r.headers
return r.text.encode("ascii")
def storeIP(self, IP):
f = open(self.IPFILE, 'w')
f.write(IP)
f.close()
# end of class NOIP
# Open/Create the log file with level ERROR and timestamp enabled
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s',filename='noipUpdater.log',level=logging.INFO)
# create instance of class NOIP
c = NOIP()
# if the IP has changed, call the update IP address method
IP = c.getIP()
if c.newIP(IP):
output = c.updateIP(IP)
# Write the success message into the log
if output.rsplit()[0] == 'good':
logging.info("External IP %s has been updated", IP)
#only if the IP is successfully updated we store its value on a local file
c.storeIP(IP)
elif output.rsplit()[0] == 'nochg':
logging.info("External IP %s has not changed", IP)
# Write the possible error into the log file
elif output.rsplit()[0] == 'badauth':
logging.error('Invalid username/password')
elif output.rsplit()[0] == 'nohost':
logging.error('No hostname specified')
elif output.rsplit()[0] == 'badagent':
logging.error('Client disabled. Client should exit and not perform any more updates without user intervention')
elif output.rsplit()[0] == '!donator':
logging.error('An update request was sent including a feature that is not available to that particular user such as offline options')
else:
logging.error(output)
else:
# IP has not changed
logging.info('External IP has not changed')
|
class Human(object):
def __init__(self, called):
self.__called = called
def getCalled(self):
return self.__called
def __str__(self):
return '%s(%s)' % (self.__class__.__name__, self.getCalled())
def __repr__(self):
return '%s(%s)' % (self.__class__, self.getCalled())
class Parent(Human):
def __init__(self, *args, **kwargs):
Human.__init__(self, *args, **kwargs)
def homeTask(self):
return 'Parenting'
|
# tests/__init__.py -- Portage Unit Test functionality
# Copyright 2006-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import print_function
import sys
import time
import unittest
from optparse import OptionParser, OptionValueError
try:
from unittest.runner import _TextTestResult # new in python-2.7
except ImportError:
from unittest import _TextTestResult
from portage import os
from portage import _encodings
from portage import _unicode_decode
def main():
suite = unittest.TestSuite()
basedir = os.path.dirname(os.path.realpath(__file__))
usage = "usage: %s [options] [tests to run]" % os.path.basename(sys.argv[0])
parser = OptionParser(usage=usage)
parser.add_option("-l", "--list", help="list all tests",
action="store_true", dest="list_tests")
(options, args) = parser.parse_args(args=sys.argv)
if options.list_tests:
testdir = os.path.dirname(sys.argv[0])
for mydir in getTestDirs(basedir):
testsubdir = os.path.basename(mydir)
for name in getTestNames(mydir):
print("%s/%s/%s.py" % (testdir, testsubdir, name))
return os.EX_OK
if len(args) > 1:
suite.addTests(getTestFromCommandLine(args[1:], basedir))
else:
for mydir in getTestDirs(basedir):
suite.addTests(getTests(os.path.join(basedir, mydir), basedir))
result = TextTestRunner(verbosity=2).run(suite)
if not result.wasSuccessful():
return 1
return os.EX_OK
def my_import(name):
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def getTestFromCommandLine(args, base_path):
result = []
for arg in args:
realpath = os.path.realpath(arg)
path = os.path.dirname(realpath)
f = realpath[len(path)+1:]
if not f.startswith("test") or not f.endswith(".py"):
raise Exception("Invalid argument: '%s'" % arg)
mymodule = f[:-3]
result.extend(getTestsFromFiles(path, base_path, [mymodule]))
return result
def getTestDirs(base_path):
TEST_FILE = b'__test__'
svn_dirname = b'.svn'
testDirs = []
# the os.walk help mentions relative paths as being quirky
# I was tired of adding dirs to the list, so now we add __test__
# to each dir we want tested.
for root, dirs, files in os.walk(base_path):
if svn_dirname in dirs:
dirs.remove(svn_dirname)
try:
root = _unicode_decode(root,
encoding=_encodings['fs'], errors='strict')
except UnicodeDecodeError:
continue
if TEST_FILE in files:
testDirs.append(root)
testDirs.sort()
return testDirs
def getTestNames(path):
files = os.listdir(path)
files = [ f[:-3] for f in files if f.startswith("test") and f.endswith(".py") ]
files.sort()
return files
def getTestsFromFiles(path, base_path, files):
parent_path = path[len(base_path)+1:]
parent_module = ".".join(("portage", "tests", parent_path))
parent_module = parent_module.replace('/', '.')
result = []
for mymodule in files:
# Make the trailing / a . for module importing
modname = ".".join((parent_module, mymodule))
mod = my_import(modname)
result.append(unittest.TestLoader().loadTestsFromModule(mod))
return result
def getTests(path, base_path):
"""
path is the path to a given subdir ( 'portage/' for example)
This does a simple filter on files in that dir to give us modules
to import
"""
return getTestsFromFiles(path, base_path, getTestNames(path))
class TextTestResult(_TextTestResult):
"""
We need a subclass of unittest._TextTestResult to handle tests with TODO
This just adds an addTodo method that can be used to add tests
that are marked TODO; these can be displayed later
by the test runner.
"""
def __init__(self, stream, descriptions, verbosity):
super(TextTestResult, self).__init__(stream, descriptions, verbosity)
self.todoed = []
self.portage_skipped = []
def addTodo(self, test, info):
self.todoed.append((test,info))
if self.showAll:
self.stream.writeln("TODO")
elif self.dots:
self.stream.write(".")
def addPortageSkip(self, test, info):
self.portage_skipped.append((test,info))
if self.showAll:
self.stream.writeln("SKIP")
elif self.dots:
self.stream.write(".")
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
self.printErrorList('TODO', self.todoed)
self.printErrorList('SKIP', self.portage_skipped)
class TestCase(unittest.TestCase):
"""
We need a way to mark a unit test as "ok to fail"
This way someone can add a broken test and mark it as failed
and then fix the code later. This may not be a great approach
(broken code!!??!11oneone) but it does happen at times.
"""
def __init__(self, *pargs, **kwargs):
unittest.TestCase.__init__(self, *pargs, **kwargs)
self.todo = False
self.portage_skip = None
def defaultTestResult(self):
return TextTestResult()
def run(self, result=None):
if result is None: result = self.defaultTestResult()
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
try:
try:
self.setUp()
except SystemExit:
raise
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
return
ok = False
try:
testMethod()
ok = True
except self.failureException:
if self.portage_skip is not None:
if self.portage_skip is True:
result.addPortageSkip(self, "%s: SKIP" % testMethod)
else:
result.addPortageSkip(self, "%s: SKIP: %s" %
(testMethod, self.portage_skip))
elif self.todo:
result.addTodo(self,"%s: TODO" % testMethod)
else:
result.addFailure(self, sys.exc_info())
except (KeyboardInterrupt, SystemExit):
raise
except:
result.addError(self, sys.exc_info())
try:
self.tearDown()
except SystemExit:
raise
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
ok = False
if ok: result.addSuccess(self)
finally:
result.stopTest(self)
def assertRaisesMsg(self, msg, excClass, callableObj, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
"""
try:
callableObj(*args, **kwargs)
except excClass:
return
else:
if hasattr(excClass,'__name__'): excName = excClass.__name__
else: excName = str(excClass)
raise self.failureException("%s not raised: %s" % (excName, msg))
class TextTestRunner(unittest.TextTestRunner):
"""
We subclass unittest.TextTestRunner to output SKIP for tests that fail but are skippable
"""
def _makeResult(self):
return TextTestResult(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"""
Run the given test case or test suite.
"""
result = self._makeResult()
startTime = time.time()
test(result)
stopTime = time.time()
timeTaken = stopTime - startTime
result.printErrors()
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
if not result.wasSuccessful():
self.stream.write("FAILED (")
failed = len(result.failures)
errored = len(result.errors)
if failed:
self.stream.write("failures=%d" % failed)
if errored:
if failed: self.stream.write(", ")
self.stream.write("errors=%d" % errored)
self.stream.writeln(")")
else:
self.stream.writeln("OK")
return result
test_cps = ['sys-apps/portage','virtual/portage']
test_versions = ['1.0', '1.0-r1','2.3_p4','1.0_alpha57']
test_slots = [ None, '1','gentoo-sources-2.6.17','spankywashere']
test_usedeps = ['foo','-bar', ('foo','bar'),
('foo','-bar'), ('foo?', '!bar?') ]
|
# class and inheritance example 2
class Animal:
# fields or property or variables
# using '_' these are private variables and through this a variables can be private variable in python
_name = ''
_age = 0
_color = ''
_specification = ''
def __init__(self, name='', age=0, color='', specification=''):
self.set_name(name)
self.set_age(age)
self.set_color(color)
self.set_specification(specification)
def set_color(self, color):
self._color = color
def get_color(self):
return self._color
def set_name(self, name):
self._name = name
def get_name(self):
return self._name
def set_age(self, age):
self._age = age
def get_age(self):
return self._age
def set_specification(self, spec):
self._specification = spec
def get_specification(self):
return self._specification
def details_view(self):
print("Name= {name} \n"
"Age= {age} \n"
"Color= {color} \n"
"Specification= {spec}".format(name=self.get_name(), age=self.get_age(), color=self.get_color(),
spec=self.get_specification()))
return
#
# another class
class Bird(Animal):
pass
# creating object
beautifulBird = Bird('Koyel', 2, 'yellow', 'Birds')
beautifulBird.details_view()
|
""""
小论文 敏感性分析
2021年6月17日
电流误差 1 A,暂时不需要分析了
"""
from os import error, path
import sys
sys.path.append(path.dirname(path.abspath(path.dirname(__file__))))
sys.path.append(path.dirname(path.dirname(
path.abspath(path.dirname(__file__)))))
from cctpy import *
if __name__ == '__main__':
BaseUtils.i_am_sure_my_code_closed_in_if_name_equal_main()
timer = BaseUtils.Timer()
agcct3_winding_number = 25
agcct4_winding_number = 40
agcct5_winding_number = 34
floatings:List[float] = BaseUtils.linspace(-0.02,0.02,6)
bls = []
for floating in floatings:
gantry = HUST_SC_GANTRY(
qs3_gradient=5.546,
qs3_second_gradient=-57.646,
dicct345_tilt_angles=[30, 87.426, 92.151, 91.668],
agcct345_tilt_angles=[94.503, 30, 72.425, 82.442],
dicct345_current=9445.242 * (floating+1),
agcct345_current=-5642.488,
agcct3_winding_number=agcct3_winding_number,
agcct4_winding_number=agcct4_winding_number,
agcct5_winding_number=agcct5_winding_number,
agcct3_bending_angle=-67.5*(agcct3_winding_number)/(
agcct3_winding_number+agcct4_winding_number+agcct5_winding_number),
agcct4_bending_angle=-67.5*(agcct4_winding_number)/(
agcct3_winding_number+agcct4_winding_number+agcct5_winding_number),
agcct5_bending_angle=-67.5*(agcct5_winding_number)/(
agcct3_winding_number+agcct4_winding_number+agcct5_winding_number),
DL1=0.9007765,
GAP1=0.4301517,
GAP2=0.370816,
qs1_length=0.2340128,
qs1_aperture_radius=60 * MM,
qs1_gradient=0.0,
qs1_second_gradient=0.0,
qs2_length=0.200139,
qs2_aperture_radius=60 * MM,
qs2_gradient=0.0,
qs2_second_gradient=0.0,
DL2=2.35011,
GAP3=0.43188,
qs3_length=0.24379,
agcct345_inner_small_r=83 * MM,
agcct345_outer_small_r=98 * MM, # 83+15
dicct345_inner_small_r=114 * MM, # 83+30+1
dicct345_outer_small_r=130 * MM, # 83+45 +2
)
bl = gantry.create_second_bending_part_beamline()
bls.append(bl)
# beamline_phase_ellipse_multi_delta(
# bl, 8, [-0.05, 0.0, 0.05], foot_step=10*MM, report=False
# )
ga = GPU_ACCELERATOR(
# float_number_type=GPU_ACCELERATOR.FLOAT64,
# block_dim_x=256
)
if True:
ds = BaseUtils.linspace(-0.05,0.05,11)
ret:List[List[float]] = []
for d in ds:
results = ga.track_phase_ellipse_in_multi_beamline(
beamlines=bls,
x_sigma_mm=3.5,xp_sigma_mrad=7.5,
y_sigma_mm=3.5,yp_sigma_mrad=7.5,
delta=d,
particle_number=16,kinetic_MeV=215,
footstep=10*MM
)
results_info = BaseUtils.combine(floatings,results)
for i in range(len(results_info)):
result_info = results_info[i]
floating = result_info[0]
result = result_info[1]
xs = result[0]
ys = result[1]
x_width = BaseUtils.Statistic().add_all(P2.extract_x(xs)).half_width()
y_width = BaseUtils.Statistic().add_all(P2.extract_x(ys)).half_width()
ret.append(
[d, floating, x_width, y_width]
)
print(timer.period())
refrom = []
for i in range(0,len(ret),len(floatings)):
appending = [
ret[i][0],
ret[i][1],
ret[i][2],
ret[i][3],
]
for j in range(1,len(floatings)):
appending = appending + [
ret[i+j][1],
ret[i+j][2],
ret[i+j][3],
]
refrom.append(appending)
for r in refrom:
print(*r,sep=' ')
print("----------------------------") |
# Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
#
import re
import typing
ForwardRefPristine = typing.ForwardRef
_union_exp = r"^([^| \[\]]*)\s*\|\s*([^\[\]]*)$"
def _translate_piped_types_to_union(value: str) -> str:
# Very naive limited to top-level plain types two-legged unions only.
return re.sub(_union_exp, r"Union[\1, \2]", value)
def patch_forward_ref() -> None:
typing.ForwardRef = ForwardRefPatched # type: ignore[misc]
def restore_forward_ref() -> None:
typing.ForwardRef = ForwardRefPristine # type: ignore[misc]
# We are not supposed to subclass this... but we want to support | annotations.
class ForwardRefPatched(typing.ForwardRef, _root=True): # type: ignore[call-arg, misc]
def __init__(self, arg, *args, **kwargs):
unionised_arg = _translate_piped_types_to_union(arg)
super().__init__(unionised_arg, *args, **kwargs)
def _evaluate(self, globalns, *args, **kwargs):
if globalns and "Union" not in globalns:
globalns["Union"] = typing.Union
return super()._evaluate(globalns, *args, **kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import mako
import os
import requests
import subprocess
from girder import constants, events
from girder.constants import SettingKey
from girder.utility.model_importer import ModelImporter
from .imagefeatures_rest import ImageFeatures
from .imagepivot_rest import ImagePivot
from .imagesearch_rest import ImageSearch
from .imageprefix_rest import ImagePrefix
from .settings import ImageSpaceSetting
imagespaceSetting = ImageSpaceSetting()
class CustomAppRoot(object):
"""
This serves the main index HTML file of the custom app from /
"""
exposed = True
indexHtml = None
vars = {
'apiRoot': 'api/v1',
'staticRoot': 'static',
'title': 'ImageSpace',
'versionInfo': {
'niceName': 'SUG v3.0',
'sha': subprocess.check_output(
['git', 'rev-parse', '--short', 'HEAD'],
cwd=os.path.dirname(os.path.realpath(__file__))
).strip()
}
}
template = r"""
<!DOCTYPE html>
<html lang="en">
<head>
<title>${title}</title>
<link rel="stylesheet"
href="//fonts.googleapis.com/css?family=Droid+Sans:400,700">
<link rel="stylesheet"
href="${staticRoot}/lib/bootstrap/css/bootstrap.min.css">
<link rel="stylesheet"
href="${staticRoot}/lib/fontello/css/fontello.css">
<link rel="stylesheet"
href="${staticRoot}/lib/fontello/css/animation.css">
<link rel="stylesheet"
href="${staticRoot}/built/app.min.css">
% for plugin in pluginCss:
% if plugin != 'imagespace':
<link rel="stylesheet"
href="${staticRoot}/built/plugins/${plugin}/plugin.min.css">
% endif
% endfor
<link rel="stylesheet"
href="${staticRoot}/built/plugins/imagespace/imagespace.min.css">
<link rel="icon"
type="image/png"
href="${staticRoot}/img/Girder_Favicon.png">
<style id="blur-style">
img.im-blur {
-webkit-filter: blur(10px);
filter: blur(10px)
}
</style>
<script type="text/javascript">
imagespace = {};
imagespace.versionInfo = ${versionInfo};
</script>
</head>
<body>
<div id="g-global-info-apiroot" class="hide">${apiRoot}</div>
<div id="g-global-info-staticroot" class="hide">${staticRoot}</div>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-66442136-2', 'auto');
ga('send', 'pageview');
</script>
<script src="${staticRoot}/built/libs.min.js"></script>
<script src="${staticRoot}/built/app.min.js"></script>
<script src="${staticRoot}/built/plugins/gravatar/plugin.min.js">
</script>
<script src="${staticRoot}/built/plugins/imagespace/imagespace-libs.min.js">
</script>
<script src="${staticRoot}/built/plugins/imagespace/imagespace.min.js">
</script>
<script src="${staticRoot}/built/plugins/imagespace/main.min.js"></script>
% for plugin in pluginJs:
% if plugin != 'imagespace':
<script src="${staticRoot}/built/plugins/${plugin}/plugin.min.js"></script>
% endif
% endfor
</body>
</html>
"""
def GET(self):
self.vars['pluginCss'] = []
self.vars['pluginJs'] = []
builtDir = os.path.join(constants.STATIC_ROOT_DIR, 'clients', 'web',
'static', 'built', 'plugins')
for plugin in ModelImporter.model('setting').get(
SettingKey.PLUGINS_ENABLED):
if os.path.exists(os.path.join(builtDir, plugin, 'plugin.min.css')):
self.vars['pluginCss'].append(plugin)
if os.path.exists(os.path.join(builtDir, plugin, 'plugin.min.js')):
self.vars['pluginJs'].append(plugin)
if self.indexHtml is None:
self.indexHtml = mako.template.Template(self.template).render(
**self.vars)
return self.indexHtml
def load(info):
for setting in ImageSpaceSetting.requiredSettings:
imagespaceSetting.get(setting)
# Absolute path to a directory of images to serve statically at /basename
image_dir = imagespaceSetting.get('IMAGE_SPACE_IMAGE_DIR')
if image_dir:
info['config']['/images'] = {
'tools.staticdir.on': True,
'tools.staticdir.dir': image_dir
}
# Bind our REST resources
info['apiRoot'].imagesearch = ImageSearch()
info['apiRoot'].imagefeatures = ImageFeatures()
info['apiRoot'].imagepivot = ImagePivot()
info['apiRoot'].imageprefix = ImagePrefix()
# Move girder app to /girder, serve our custom app from /
info['serverRoot'], info['serverRoot'].girder = (CustomAppRoot(),
info['serverRoot'])
info['serverRoot'].api = info['serverRoot'].girder.api
def solr_documents_from_field(field, values, classifications=None):
"""Given a field, and a list of values, return list of relevant solr documents.
Additionally it can take an iterable of classifications which will be
searched for through Solr.
:param paths: List of solr paths corresponding to the Solr id attribute
:param classifications: List of classifications to search by
:returns: List of solr documents
"""
def paged_request(params):
"""
Takes a params dictionary and manages paging.
Uses POST so very large request bodies can be sent to Solr.
Returns a list of all documents.
"""
documents = []
# Adjust paging params
params['start'] = 0
params['rows'] = 1000
numFound = None
numRetrieved = None
while numRetrieved is None or numRetrieved < numFound:
r = requests.post(imagespaceSetting.get('IMAGE_SPACE_SOLR') + '/select',
data=params,
verify=False).json()
numFound = r['response']['numFound']
numRetrieved = len(r['response']['docs']) if numRetrieved is None \
else numRetrieved + len(r['response']['docs'])
documents += r['response']['docs']
# Setup offset for next request
params['start'] = numRetrieved
return documents
event = events.trigger('imagespace.solr_documents_from_field', info={
'field': field,
'values': values
})
for response in event.responses:
field = response['field']
values = response['values']
if classifications:
q = ' OR '.join(['%s:[.7 TO *]' % key
for key in classifications])
else:
q = '*:*'
qparams = {
'wt': 'json',
'q': q
}
# Give plugins a chance to adjust the Solr query parameters
event = events.trigger('imagespace.imagesearch.qparams', qparams)
for response in event.responses:
qparams = response
# Filter by field
qparams['fq'] = qparams['fq'] if 'fq' in qparams else []
qparams['fq'].append('%(field)s:(%(value)s)' % {
'field': field,
'value': ' '.join(values)
})
return paged_request(qparams)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Filename: step05_rerun_preliminary_regression_with_single_variable
# @Date: 2020/3/20
# @Author: Mark Wang
# @Email: wangyouan@gamil.com
"""
python -m ConstructRegressionFile.Stata.step04_rerun_preliminary_regression_for_presidential_countries
"""
import os
from Constants import Constants as const
from .step02_rerun_preliminary_regression import generate_regression_code
from .step04_rerun_preliminary_regression_for_presidential_countries import DEP_VARS
CTRL_VARS = ' '.join(
['ln_at', 'TANGIBILITY', 'CAPEX', 'ROA', 'ln_GDP', 'ln_GDP_PC', 'NY_GDP_MKTP_KD_ZG', 'SL_UEM_TOTL_ZS',
'ln_POPULATION', 'NE_IMP_GNFS_ZS', 'NE_EXP_GNFS_ZS'])
if __name__ == '__main__':
date_str = '20200324'
save_file = os.path.join(const.STATA_CODE_PATH, '{}_preliminary_code_1.do'.format(date_str))
output_path = os.path.join(const.STATA_RESULT_PATH, '{}_preliminary_1'.format(date_str))
if not os.path.isdir(output_path):
os.makedirs(output_path)
cmd_list = ['clear',
'use "{}"'.format(os.path.join(const.STATA_DATA_PATH, '20200320_term_limit_regression_data.dta')),
'replace R_B_1 = 0 if missing(R_B_1)']
ind_vars = list()
for suf in ['Extend', 'ToUnlimit', 'ToLimit', 'Shrink']:
for pre in ['formal', 'real']:
ind_vars.append('{}_{}'.format(pre, suf))
for ind_key in ind_vars:
for lag in [2]:
real_key = '{}_{}'.format(ind_key, lag + 1) if lag != 0 else ind_key
output_file = os.path.join(output_path, '{}.txt'.format(real_key))
for dep_key in DEP_VARS:
cmd_list.extend(
generate_regression_code(dep=dep_key, ind=real_key, ctrl=CTRL_VARS, fe_option='gvkey fyear',
cluster_option='gvkey', output_path=output_file, condition='',
text_option='Firm Dummy, Yes, Year Dummy, Yes, Cluster, Firm',
data_description='tstat bdec(4) tdec(4) rdec(4)'))
with open(save_file, 'w') as f:
f.write('\n'.join(cmd_list))
print('do "{}"'.format(save_file))
|
import time
# import os
# import base64
# import logging
# import pandas as pd
TERM_RED = '\033[1;31m'
TERM_NFMT = '\033[0;0m'
TERM_BLUE = '\033[1;34m'
TERM_GREEN = '\033[1;32m'
def print_error(func_name, err):
'''
highlight error info
'''
print(f'{TERM_RED}{func_name} - {err}{TERM_NFMT}')
def current_milli_ts() -> str:
return str(int(time.time() * 1000))
# ########################################################################################################################
# # Logging relates
# ########################################################################################################################
# def get_logger(name, log_level=s.LOG_LEVEL):
# '''
# customize logger format
# '''
# formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(message)s')
# file_handler = logging.FileHandler('daxiang_robot.log')
# file_handler.setFormatter(formatter)
# console_handler = logging.StreamHandler()
# console_handler.setFormatter(formatter)
# logger = logging.getLogger(name)
# logger.setLevel(log_level)
# logger.addHandler(file_handler)
# logger.addHandler(console_handler)
# return logger
# def read_log(file):
# '''
# read a log file line by line, return a html formatted string
# '''
# text = ''
# if not os.path.isfile(file): return text
# with open(file,'r') as f:
# lines = f.readlines()
# for line in lines:
# text += line
# text += '<br>'
# return text
# def read_recent_log(file, offset):
# '''
# read log from botton with offset
# offset: should be negative, and it refers bytes
# '''
# text = ''
# if not os.path.isfile(file): return text
# with open(file, 'rb') as f:
# try:
# f.seek(offset, os.SEEK_END)
# lines = f.readlines()
# lines = lines[::-1]
# for line in lines:
# text += line.decode()
# text += '<br>'
# except OSError:
# lines = f.readlines()
# lines = lines[::-1]
# for line in lines:
# text += line.decode()
# text += '<br>'
# return text
# def href_wrapper(file):
# '''
# return a html formatted string for href
# '''
# return f'<a href="http://{s.DASHBOARD_HOST}:{s.DASHBOARD_PORT}/log">{file}</a>'
# def logging_order(id, type, side, qty, price=None, stop=None):
# logger.info(f"========= New Order ==============")
# logger.info(f"ID : {id}")
# logger.info(f"Type : {type}")
# logger.info(f"Side : {side}")
# logger.info(f"Qty : {qty}")
# logger.info(f"Price : {price}")
# logger.info(f"Stop : {stop}")
# logger.info(f"======================================")
# ########################################################################################################################
# # Network relates
# ########################################################################################################################
# def retry(func, count=5):
# '''
# Bitmex http request wrapper function for robust purpose.
# For 503 case ("The system is currently overloaded. Please try again later."),
# will not increase index, make request until succeed.
# '''
# err = None
# i = 0
# while i < count:
# try:
# ret, res = func()
# rate_limit = int(res.headers['X-RateLimit-Limit'])
# rate_remain = int(res.headers['X-RateLimit-Remaining'])
# if rate_remain < 10:
# time.sleep(5 * 60 * (1 + rate_limit - rate_remain) / rate_limit)
# return ret
# except HTTPError as error:
# status_code = error.status_code
# err = error
# if status_code == 503:
# time.sleep(0.5)
# continue
# elif status_code >= 500:
# time.sleep(pow(2, i + 1))
# i = i+1
# continue
# elif status_code == 400 or \
# status_code == 401 or \
# status_code == 402 or \
# status_code == 403 or \
# status_code == 404 or \
# status_code == 429:
# logger.error(Exception(error))
# raise Exception(error)
# else:
# i = i+1
# logger.error(Exception(err))
# raise Exception(err)
# ########################################################################################################################
# # List or string process
# ########################################################################################################################
# def to_data_frame(data, reverse = False):
# '''
# convert ohlcv data list to pandas frame
# reverse the frame if latest come first
# '''
# data_frame = pd.DataFrame(data, columns=["timestamp", "high", "low", "open", "close", "volume"])
# data_frame = data_frame.set_index("timestamp")
# data_frame = data_frame.tz_localize(None).tz_localize('UTC', level=0)
# if reverse:
# data_frame = data_frame.iloc[::-1]
# return data_frame
# def resample(data_frame, bin_size):
# resample_time = s.INTERVAL[bin_size][1]
# return data_frame.resample(resample_time, closed='right').agg({
# "open": "first",
# "high": "max",
# "low": "min",
# "close": "last",
# "volume": "sum",
# })
# def random_str():
# '''
# generate a random string
# '''
# return base64.b64encode(os.urandom(5)).decode()
# def change_rate(a, b):
# '''
# calculate change rate from a to b
# return percentage with 2 digits
# '''
# return round(float((b-a)/a * 100), 2)
# ########################################################################################################################
# # Basic technical analysis
# ########################################################################################################################
# def crossover(a, b):
# return a[-2] < b[-2] and a[-1] > b[-1]
# def crossunder(a, b):
# return a[-2] > b[-2] and a[-1] < b[-1]
# def ema(series, periods):
# return series.ewm(span=periods, adjust=False).mean()
# def sma(series, periods):
# return series.rolling(periods).mean()
# def macd(df, n_fast=12, n_slow=26, n_signal=9):
# """Calculate MACD, MACD Signal and MACD difference
# :param df: pandas.DataFrame
# :param n_fast:
# :param n_slow:
# :param n_signal:
# :return: pandas.DataFrame
# """
# EMAfast = ema(df.close, n_fast)
# EMAslow = ema(df.close, n_slow)
# MACD = pd.Series(EMAfast - EMAslow, name='macd')
# MACD_signal = pd.Series(ema(MACD, n_signal), name='macd_signal')
# MACD_diff = pd.Series(MACD - MACD_signal, name='macd_diff')
# df = df.join(MACD)
# df = df.join(MACD_signal)
# df = df.join(MACD_diff)
# return df
# def rsi(df, n=14):
# close = df.close
# diff = close.diff(1)
# which_dn = diff < 0
# up, dn = diff, diff*0
# up[which_dn], dn[which_dn] = 0, -up[which_dn]
# emaup = ema(up, n)
# emadn = ema(dn, n)
# RSI = pd.Series(100 * emaup / (emaup + emadn), name='rsi')
# df = df.join(RSI)
# return df |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Created By Liang Jun Copyright owned
import sys,os,math
import cv2 as cv
import numpy as np
VESSEL = [2,2,2];
ERYTHROCYTE = [0,255,0];
NEGATIVE = [0,0,255];
RangeType = 9; #5,7,9,11,13,15,21
def checkMarkedData(folderName):
global VESSEL,ERYTHROCYTE,NEGATIVE;
if os.path.exists(folderName):
for f in os.listdir(folderName):
if f.find(r'.png') and f.find(r'_train') >= 0:
sourceName = f.replace(r'_train','');
print(sourceName);
sourceImage = cv.imread(os.path.join(folderName,sourceName));
markImage = cv.imread(os.path.join(folderName,f));
outImage = np.zeros(sourceImage.shape,dtype=np.uint8);
outImage[::] = 255;
for y in range(0,sourceImage.shape[0]):
for x in range(0,sourceImage.shape[1]):
if (markImage[y,x]<=np.array(VESSEL)).all():
outImage[y,x] = sourceImage[y,x];
elif (markImage[y,x]==np.array(ERYTHROCYTE)).all():
outImage[y,x] = sourceImage[y,x];
elif (markImage[y,x]==np.array(NEGATIVE)).all():
outImage[y,x] = sourceImage[y,x];
cv.imwrite(os.path.join(folderName,'out'+f),outImage);
def main(argv):
checkMarkedData(argv[1]);
# changeNamesInFolder(argv[1]);
if __name__ == '__main__':
main(sys.argv) |
from rest_framework import serializers
from dataprocessing.serializers import userProfileSerializer
from workprogramsapp.folders_ans_statistic.models import Folder, WorkProgramInFolder, AcademicPlanInFolder, \
DisciplineBlockModuleInFolder, IndividualImplementationAcademicPlanInFolder
from workprogramsapp.individualization.serializers import ShortImplementationAcademicPlanSerializer, \
ShortIndividualImplementationAcademicPlanSerializer
from workprogramsapp.serializers import WorkProgramShortForExperiseSerializer, AcademicPlanShortSerializer, \
DisciplineBlockModuleSerializer, ImplementationAcademicPlanSerializer
# ПАПКИ С РПД
class WorkProgramInFolderSerializer(serializers.ModelSerializer):
class Meta:
model = WorkProgramInFolder
fields = "__all__"
def to_representation(self, value):
self.fields['work_program'] = WorkProgramShortForExperiseSerializer(many=False)
return super().to_representation(value)
class FolderCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Folder
fields = ["id", "name", "description"]
class FolderSerializer(serializers.ModelSerializer):
class Meta:
model = Folder
fields = ["id", "name", "description", "owner", 'work_program_in_folder']
def update(self, instance, validated_data):
print(validated_data)
# ... logic to save ingredients for this recipe instance
return instance
def to_representation(self, value):
self.fields['owner'] = userProfileSerializer(many=False)
# self.fields['work_program'] = WorkProgramShortForExperiseSerializer(many=True)
self.fields['work_program_in_folder'] = WorkProgramInFolderSerializer(many=True)
self.fields['academic_plan_in_folder'] = AcademicPlanInFolderSerializer(many=True)
self.fields['block_module_in_folder'] = ModuleInFolderSerializer(many=True)
self.fields['individual_implementation_of_academic_plan_in_folder'] = IndividualImplementationAcademicPlanInFolderSerializer(many=True)
return super().to_representation(value)
# ПАПКИ С АКАДЕМПЛАНАМИ
class AcademicPlanInFolderSerializer(serializers.ModelSerializer):
class Meta:
model = AcademicPlanInFolder
fields = "__all__"
def to_representation(self, value):
self.fields['academic_plan'] = AcademicPlanShortSerializer(many=False)
return super().to_representation(value)
# ПАПКИ С МОДУЛЯМИ
class ModuleInFolderSerializer(serializers.ModelSerializer):
class Meta:
model = DisciplineBlockModuleInFolder
fields = "__all__"
def to_representation(self, value):
self.fields['block_module'] = DisciplineBlockModuleSerializer(many=False)
return super().to_representation(value)
# ПАПКИ С ТРАЕКТОРИЯМИ
class IndividualImplementationAcademicPlanInFolderSerializer(serializers.ModelSerializer):
class Meta:
model = IndividualImplementationAcademicPlanInFolder
fields = "__all__"
def to_representation(self, value):
self.fields['individual_implementation_of_academic_plan'] = ShortIndividualImplementationAcademicPlanSerializer(many=False)
return super().to_representation(value)
#Статистика
|
#!/usr/bin/env python
""" scps noise trained weka .model files neede for classification
"""
import sys, os
import glob
client_defs = [ \
{'name':'__local__',
'hostname':'127.0.0.1',
'furl_dirpath':'/home/pteluser/.ipython/security',
'username':'pteluser',
'ssh_port':22,
'n_engines':10},
{'name':'__worms2__',
'hostname':'localhost',
'furl_dirpath':'/home/starr/.ipython/security',
'username':'starr',
'ssh_port':32151,
'n_engines':0},
{'name':'__cch1__',
'hostname':'localhost',
'furl_dirpath':'/home/dstarr/.ipython/security',
'username':'dstarr',
'nice':19,
'ssh_port':32161,
'n_engines':1},
]
"""
{'name':'__trans1__',
'hostname':'192.168.1.45',
'furl_dirpath':'/home/pteluser/.ipython/security',
'username':'pteluser',
'ssh_port':22,
'n_engines':0},
{'name':'__trans2__',
'hostname':'192.168.1.55',
'furl_dirpath':'/home/pteluser/.ipython/security',
'username':'pteluser',
'ssh_port':22,
'n_engines':0},
{'name':'__trans3__',
'hostname':'192.168.1.65',
'furl_dirpath':'/home/pteluser/.ipython/security',
'username':'pteluser',
'ssh_port':22,
'n_engines':0},
{'name':'__sgn02__',
'hostname':'sgn02.nersc.gov',
'furl_dirpath':'/global/homes/d/dstarr/datatran/.ipython/security',
'username':'dstarr',
'ssh_port':22,
'n_engines':0},
"""
def send_to_other_nodes(glob_mask, dirnames, client_defs):
""" send files on this computer to other node computers.
"""
for client_def in client_defs:
if client_def['name'] == '__local__':
continue
for dirname in dirnames:
exec_str = "ssh -tp %d %s@%s mkdir scratch/Noisification/%s" % ( \
client_def['ssh_port'],
client_def['username'],
client_def['hostname'],
dirname)
os.system(exec_str)
exec_str = "scp -CP %d ~/scratch/Noisification/%s/*arff %s@%s:scratch/Noisification/%s/" % ( \
client_def['ssh_port'],
dirname,
client_def['username'],
client_def['hostname'],
dirname)
os.system(exec_str)
exec_str = "scp -CP %d ~/scratch/Noisification/%s/*model %s@%s:scratch/Noisification/%s/" % ( \
client_def['ssh_port'],
dirname,
client_def['username'],
client_def['hostname'],
dirname)
os.system(exec_str)
def retrieve_from_other_node(glob_mask, dirnames, retrieve_host_dict):
""" copy files to this node from other nodes.
"""
# will do an scp to ~/scratch/Noisification/ files: scratch/Noisification/*glob_mask*/*arff *model
for dirname in dirnames:
exec_str = "mkdir -p ~/scratch/Noisification/%s" % (dirname)
os.system(exec_str)
exec_str = "scp -CP %d %s@%s:scratch/Noisification/%s/{*arff,*model} ~/scratch/Noisification/%s/" % ( \
retrieve_host_dict['ssh_port'],
retrieve_host_dict['username'],
retrieve_host_dict['hostname'],
dirname,
dirname)
print exec_str
os.system(exec_str)
if __name__ == '__main__':
#glob_mask = sys.argv[1] # eg: 50nois_*short1
glob_mask = "50nois_*qk17.9"
dirnames = glob.glob(glob_mask)
#send_to_other_nodes(glob_mask, dirnames, client_defs)
retrieve_host_dict = \
{'name':'__cch1__',
'hostname':'localhost',
'furl_dirpath':'/home/dstarr/.ipython/security',
'username':'dstarr',
'nice':19,
'ssh_port':32161,
'n_engines':1}
dirnames = ['20nois_19epch_040need_0.050mtrc_j48_17.9',
'20nois_15epch_040need_0.050mtrc_j48_17.9',
'20nois_11epch_040need_0.050mtrc_j48_17.9',
'20nois_21epch_040need_0.050mtrc_j48_17.9',
'20nois_25epch_040need_0.050mtrc_j48_17.9',
'20nois_29epch_040need_0.050mtrc_j48_17.9',
'20nois_17epch_040need_0.050mtrc_j48_17.9',
'20nois_13epch_040need_0.050mtrc_j48_17.9',
'20nois_20epch_040need_0.050mtrc_j48_17.9',
'20nois_27epch_040need_0.050mtrc_j48_17.9',
'20nois_23epch_040need_0.050mtrc_j48_17.9',
'20nois_09epch_040need_0.050mtrc_j48_17.9',
'20nois_10epch_040need_0.050mtrc_j48_17.9',
'20nois_33epch_040need_0.050mtrc_j48_17.9']
retrieve_from_other_node(glob_mask, dirnames, retrieve_host_dict)
|
import numpy as np
import cv2
import sys
import yaml
import json
from PIL import Image
from mtcnn.mtcnn import MTCNN
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import Normalizer
from sklearn.svm import SVC
from function import get_embedding
import boto3
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
def get_people():
# Init DynamoDB
session = boto3.Session(profile_name=config['profile_name'])
dynamodb = session.resource('dynamodb')
# select table from DynamoDB
table = dynamodb.Table(config['table_users'])
response = table.scan()
count_people = len(response['Items'])
emdTrainX, trainy = list(), list()
for people in response['Items']:
print(people['userid'])
encoded = people['embeddingFace'].value
encode_array = np.frombuffer(encoded, dtype='float32')
i = int(people['countPhoto'])
embds_arr = encode_array.reshape(i, 128)
for face in embds_arr:
emdTrainX.append(face)
for x in range(i):
trainy.append(people['userid'])
return emdTrainX, trainy, count_people
config = yaml.load(open('config.yaml'))
# Init AWSIoTMQTTClient
myAWSIoTMQTTClient = AWSIoTMQTTClient(config['client_id'])
myAWSIoTMQTTClient.configureEndpoint(config['endpoint'], config['port'])
myAWSIoTMQTTClient.configureCredentials(config['root_ca_path'], config['private_key_path'], config['cert_path'])
# Connect and subscribe to AWS IoT
myAWSIoTMQTTClient.connect()
emdTrainX, trainy, count_people = get_people()
emdTrainX = np.asarray(emdTrainX)
trainy = np.asarray(trainy)
print(emdTrainX.shape, trainy.shape)
# normalize input vectors
in_encoder = Normalizer()
emdTrainX_norm = in_encoder.transform(emdTrainX)
out_encoder = LabelEncoder()
out_encoder.fit(trainy)
trainy_enc = out_encoder.transform(trainy)
# fit model
model = SVC(kernel='linear', probability=True)
model.fit(emdTrainX_norm, trainy_enc)
from inception_resnet_v1 import *
facenet_model = InceptionResNetV1()
print("model built")
facenet_model.load_weights('facenet_weights.h5')
print("weights loaded")
cap = cv2.VideoCapture(0) #webcam
while(True):
ret, img = cap.read()
detector = MTCNN()
# detect faces in the image
results = detector.detect_faces(img)
print('results')
#print(results)
for i in range(len(results)):
x, y, w, h = results[i]['box']
if w > 130: #discard small detected faces
cv2.rectangle(img, (x,y), (x+w,y+h), (67, 67, 67), 1) #draw rectangle to main image
detected_face = img[int(y):int(y+h), int(x):int(x+w)] #crop detected face
detected_face = cv2.resize(detected_face, (160, 160)) #resize to 224x224
if detected_face is not None:
random_face_emd = get_embedding(facenet_model, detected_face)
# prediction for the face
samples = np.expand_dims(random_face_emd, axis=0)
yhat_class = model.predict(samples)
yhat_prob = model.predict_proba(samples)
# get name
class_index = yhat_class[0]
class_probability = yhat_prob[0,class_index] * 100
predict_names = out_encoder.inverse_transform(yhat_class)
label_name = 'unknown'
labels = []
for i in range(count_people):
labels.append(i)
all_names = out_encoder.inverse_transform(labels)
index_max = np.argmax(yhat_prob[0])
label_name = all_names[index_max]
print('Predicted: \n%s \n%s' % (all_names, yhat_prob[0]))
if index_max > config['threshold']:
cv2.putText(img, label_name, (int(x+w+15), int(y-64)), cv2.FONT_HERSHEY_SIMPLEX, 1, (67,67,67), 2)
#connect face and text
cv2.line(img,(x+w, y-64),(x+w-25, y-64),(67,67,67),1)
cv2.line(img,(int(x+w/2),y),(x+w-25,y-64),(67,67,67),1)
# open the door
message = {}
message['pin'] = config['door_pin']
message['command'] = 'open'
message['requester'] = label_name
messageJson = json.dumps(message)
myAWSIoTMQTTClient.publish(config['topic'], messageJson, 1)
cv2.imshow('img',img)
if cv2.waitKey(1) & 0xFF == ord('q'): #press q to quit
break
#kill open cv things
cap.release()
cv2.destroyAllWindows()
|
# this is a class to demonstrate the concept of inheritance
class User:
def __init__(self, email):
self.email = email
def sign_in(self):
print("Your are logged in")
class Wizard(User):
def __init__(self, name, power, email):
User.__init__(self, email)
self.name = name
self.power = power
def attack(self):
print(f"Attacking with the power of {self.power}")
class Archer(User):
def __init__(self, name, num_arrows, email):
super().__init__(email)
self.name = name
self.num_arrows = num_arrows
def attack(self):
print(f"Attacking with arrows: Arrows left - {self.num_arrows} arrows")
wizard1 = Wizard('Merlin', 50, 'merlin@gmail.com')
archer1 = Archer('Arrow', 500, 'arrow@gmail.com')
print(wizard1.email)
print((archer1.email))
wizard1.attack()
wizard1.sign_in()
archer1.attack()
archer1.sign_in()
|
import functools
def f(s):
@functools.lru_cache()
def f1(i):
nonlocal s
if i == 0: return s[i]
m = s[i]
for j in range(i):
otherS = s[j:i+1]
if otherS == "".join(reversed(otherS)) and len(otherS) > len(m): m = otherS
return m
ret = [f1(i) for i in range(len(s))]
m = ""
for r in ret:
if len(r) > len(m): m = r
return m
class Solution:
def longestPalindrome(self, s: str) -> str:
if len(s) <= 1: return s
revs = "".join(reversed(s))
if s == revs: return s
dp = [[0]*len(s) for j in range(len(s))]
for i in range(len(s)):
for j in range(len(s)):
if s[i] == revs[j]:
dp[i][j] = 1
m = ""
for i in range(len(s)):
for j in range(len(s)):
if dp[i][j] == 1:
tempS = []
tempi, tempj = i,j
cnt = 0
while tempi < len(s) and tempj < len(s) and dp[tempi][tempj] == 1:
tempS.append(s[tempi])
# we don't want to investigate this diagonal again
# the realpalindrome will be somewhere else
dp[tempi][tempj] = 0
cnt += 1
tempi += 1
tempj += 1
if tempS[::-1] == tempS:
if len(tempS) > len(m): m = "".join(tempS)
return m
ans = f("babadada")
ans2 = Solution().longestPalindrome("babadada")
x = 2 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bolinha import *
def main(inic):
big_bang(inic, frequencia=FREQUENCIA,
a_cada_tick=mover, #Posicao -> Posicao
desenhar=desenha #Posicao -> imagem
)
main(Posicao) |
from random import randint
from time import sleep
from operator import itemgetter
dados = {'Jogador 1': randint(1, 7),
'Jogador 2': randint(1, 7),
'Jogador 3': randint(1, 7),
'Jogador 4': randint(1, 7)}
ranking = list()
print('Valores sorteados: ')
for k, v in dados.items():
print(f' O {k} tirou {v}')
sleep(1)
ranking = sorted(dados.items(), key=itemgetter(1), reverse=True)
print('Ranking')
for k, v in enumerate(ranking):
print(f' {k+1}° lugar: {v[0]} com {v[1]}')
sleep(1) |
mapbox_access_token = 'pk.eyJ1IjoidGFubW95c3IiLCJhIjoiY2s5aDc2cjZoMHMzMTNscGhtcTA0MHZkOSJ9.ElGEgw3N2aEk1hFLjB7vng' |
import turtle
n = int(input("number of sides:"))
list = ["red","green","blue","yellow"]
paper = turtle.Screen()
pen = turtle.Turtle()
for i in range(0,n):
pen.color(list[i])
pen.fillcolor("black")
pen.forward(50)
pen.right(360/n) |
# Generated by Django 2.0.2 on 2018-06-25 02:08
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('core', '0001_initial'),
('custom_auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Inquiry',
fields=[
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('ip_address', models.GenericIPAddressField(null=True)),
('property_type', models.CharField(choices=[('sf', 'Single-Family Home'), ('mf', 'Muli-Family Home'), ('co', 'Condo, Townhouse or Apartment'), ('va', 'Vacation or Rental Property')], max_length=2)),
('rent_type', models.CharField(choices=[('no', 'No'), ('under_14', 'Yes, 14 days or less per year'), ('over_14', 'Yes, more than 14 days per year')], max_length=8)),
('primary_residence', models.BooleanField()),
('bedrooms', models.PositiveSmallIntegerField(validators=[django.core.validators.MaxValueValidator(50)])),
('bathrooms', models.FloatField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(50)])),
('sqft', models.PositiveSmallIntegerField(verbose_name=[django.core.validators.MinValueValidator(1)])),
('year_bought', models.PositiveSmallIntegerField(validators=[django.core.validators.MinValueValidator(1900), django.core.validators.MaxValueValidator(2018)])),
('duration_years', models.PositiveSmallIntegerField(blank=True, null=True)),
('duration_forever', models.BooleanField(default=False)),
('home_value', models.PositiveIntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(99999999)])),
('mortgage_payment', models.PositiveIntegerField(validators=[django.core.validators.MaxValueValidator(99999)])),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
('birth_date', models.DateField()),
('gender', models.CharField(choices=[('m', 'Male'), ('f', 'Female'), ('o', 'Other')], default=None, max_length=1)),
('referrer_name', models.CharField(blank=True, max_length=60)),
('credit_score', models.CharField(choices=[('excellent', 'Excellent (750+)'), ('good', 'Quite Good (700-749)'), ('fair', 'Alright (650-699)'), ('poor', 'Not Great (550-649)'), ('bad', 'In Trouble (300-549)'), ('i_dont_know', "I Don't Know")], max_length=15)),
('household_income_last_year', models.PositiveIntegerField(validators=[django.core.validators.MaxValueValidator(99999999)])),
('household_income_next_year', models.PositiveIntegerField(validators=[django.core.validators.MaxValueValidator(99999999)])),
('household_debt', models.PositiveIntegerField(validators=[django.core.validators.MaxValueValidator(99999999)])),
('investment_size', models.PositiveIntegerField(validators=[django.core.validators.MinValueValidator(1000), django.core.validators.MaxValueValidator(1000000)])),
('use_case_debts', models.BooleanField(verbose_name='Needs help with debts')),
('use_case_diversify', models.BooleanField(verbose_name='Wants to diversify')),
('use_case_renovate', models.BooleanField(verbose_name='Wants to renovate')),
('use_case_education', models.BooleanField(verbose_name='Needs help paying for education')),
('use_case_buy_home', models.BooleanField(verbose_name='Wants to buy another home')),
('use_case_business', models.BooleanField(verbose_name='Wants to fund a buisness')),
('use_case_other', models.CharField(blank=True, max_length=200)),
('notes', models.CharField(blank=True, max_length=1000)),
('address', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.Address')),
('client', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='inquiry', to='custom_auth.Client')),
],
options={
'verbose_name_plural': 'inquiries',
},
),
]
|
"""Test GraphQL."""
import requests
from pprint import pprint
GRAPHQL_URL = "https://data.rcsb.org/graphql"
GRAPHQL_QUERY = (
"{{"
" entries(entry_ids: {pdb_ids}) {{"
" rcsb_id"
" struct {{ title pdbx_descriptor }}"
" exptl {{ method }}"
" refine {{ ls_d_res_high }}"
" polymer_entities {{"
" rcsb_id"
" entity_poly {{"
" pdbx_strand_id"
" rcsb_entity_polymer_type"
" pdbx_seq_one_letter_code_can"
" }}"
" uniprots {{ rcsb_id }}"
" }}"
" }}"
"}}"
)
def main():
"""Main driver."""
# query = '{ entries(entry_ids: ["4HHB", "12CA", "3PQR"]) { exptl { method } } }'
query = GRAPHQL_QUERY.format(
pdb_ids=["1FAS", "7BLO", "1MAH", "2AAI", "2OS6"]
)
query = query.replace("'", '"')
params = {"query": query}
req = requests.get(GRAPHQL_URL, params=params)
results = req.json()
for result in results["data"]["entries"]:
pdb_id = result.pop("rcsb_id")
struct = result.pop("struct")
descriptor = struct.pop("pdbx_descriptor")
title = struct.pop("title")
experiments = result.pop("exptl")
if len(experiments) > 1:
print("Only using first experiment for annotation.")
experiment = experiments[0]
method = experiment.pop("method")
refinements = result.pop("refine")
if refinements is not None:
if len(refinements) > 1:
print("Only using first refinement for annotation.")
refinement = refinements[0]
resolution = refinement.pop("ls_d_res_high")
else:
resolution = None
for polymer in result.pop("polymer_entities"):
chain_id = polymer.pop("rcsb_id")
entity = polymer.pop("entity_poly")
strand_ids = entity.pop("pdbx_strand_id")
strand_type = entity.pop("rcsb_entity_polymer_type")
sequence = entity.pop("pdbx_seq_one_letter_code_can")
uniprots = polymer.pop("uniprots")
if uniprots is not None:
if len(uniprots) > 1:
print("Only using first UniProt ID for annotation")
uniprot = uniprots[0].pop("rcsb_id")
else:
uniprot = None
row = {
"PDB ID": pdb_id,
"PDB method": method,
"PDB resolution (A)": resolution,
"PDB description": descriptor,
"PDB title": title,
"PDB chain ID": chain_id,
"PDB strand ID(s)": strand_ids,
"PDB strand type": strand_type,
"PDB strand sequence": sequence,
"PDB strand UniProt": uniprot,
}
pprint(row)
print(result)
if __name__ == "__main__":
main()
|
#Python program converting Celcius to Fahrenheit
celcius = input("Type Celcius degree: ")
fahrenheit = (celcius * 1.8) + 32
print("%0.1f is equal to %0.1f degree Fahrenheit " %(celcius,fahrenheit))
#Python program converting Fahrenheit to Celcius
fah = input("Type Fahrenheit degree: ")
cel = (fah-32)/ 1.8
print("%0.1f is equal to %0.1f degree Celcius" %(fah, cel))
|
import os
import pandas as pd
DATA_DIR = "/home/qiuyuanchen/Onedrive/my_parser/src/main/resources/merge_data"
DEV_DIR = "/home/qiuyuanchen/Onedrive/my_parser/src/main/resources/dev"
# input
# what
what_reference = os.path.join(DATA_DIR, "what(deepcom)", "test.token.nl")
what_hypothesis = os.path.join(DATA_DIR, "what(deepcom)", "translate")
what_code = os.path.join(DATA_DIR, "what(deepcom)", "test.token.code")
# why
why_reference = os.path.join(DATA_DIR, "why(nngen)", "test.token.nl")
why_hypothesis = os.path.join(DATA_DIR, "why(nngen)", "why.nl")
why_code = os.path.join(DATA_DIR, "why(nngen)", "why.code")
# how_to_use
how_to_use_reference = os.path.join(
DATA_DIR, "how-to-use(code2seq)", "ref.txt")
how_to_use_hypothesis = os.path.join(
DATA_DIR, "how-to-use(code2seq)", "pred.txt")
how_to_use_code = os.path.join(DATA_DIR, "how-to-use(code2seq)", "code.txt")
# how_it_is_done
how_it_is_done_reference = os.path.join(
DATA_DIR, "how-it-is-done(nngen)", "test.token.nl")
how_it_is_done_hypothesis = os.path.join(
DATA_DIR, "how-it-is-done(nngen)", "how-it-is-done.nl")
how_it_is_done_code = os.path.join(
DATA_DIR, "how-it-is-done(nngen)", "how-it-is-done.code")
# property
property_reference = os.path.join(
DATA_DIR, "property(deepcom)", "test.token.nl")
property_hypothesis = os.path.join(DATA_DIR, "property(deepcom)", "translate")
property_code = os.path.join(DATA_DIR, "property(deepcom)", "test.token.code")
# others
others_reference = os.path.join(DATA_DIR, "others(nngen)", "test.token.nl")
others_hypothesis = os.path.join(DATA_DIR, "others(nngen)", "others.nl")
others_code = os.path.join(DATA_DIR, "others(nngen)", "others.code")
# output
merge_reference = os.path.join(DATA_DIR, "merge_reference.txt")
merge_hypothesis = os.path.join(DATA_DIR, "merge_hypothesis.txt")
merge_code = os.path.join(DATA_DIR, "merge_code.txt")
merge_category = os.path.join(DATA_DIR, "merge_category.txt")
def merge():
with open(merge_reference, "w") as f:
total_reference = 0
with open(what_reference) as data:
for line in data.readlines():
f.write(line)
total_reference += 1
with open(why_reference) as data:
for line in data.readlines():
f.write(line)
total_reference += 1
with open(how_to_use_reference) as data:
for line in data.readlines():
f.write(line)
total_reference += 1
with open(how_it_is_done_reference) as data:
for line in data.readlines():
f.write(line)
total_reference += 1
with open(property_reference) as data:
for line in data.readlines():
f.write(line)
total_reference += 1
with open(others_reference) as data:
for line in data.readlines():
f.write(line)
total_reference += 1
# assert total_reference == 5000, "wrong number"+str(total_reference)
with open(merge_hypothesis, "w") as f:
total_hypothesis = 0
with open(what_hypothesis) as data:
for line in data.readlines():
f.write(line)
total_hypothesis += 1
with open(why_hypothesis) as data:
for line in data.readlines():
f.write(line)
total_hypothesis += 1
with open(how_to_use_hypothesis) as data:
for line in data.readlines():
f.write(line)
total_hypothesis += 1
with open(how_it_is_done_hypothesis) as data:
for line in data.readlines():
f.write(line)
total_hypothesis += 1
with open(property_hypothesis) as data:
for line in data.readlines():
f.write(line)
total_hypothesis += 1
with open(others_hypothesis) as data:
for line in data.readlines():
f.write(line)
total_hypothesis += 1
# assert total_hypothesis == 5000, "wrong number"+str(total_hypothesis)
with open(merge_code, "w") as f:
total_code = 0
with open(what_code) as data:
for line in data.readlines():
f.write(line)
total_code += 1
with open(why_code) as data:
for line in data.readlines():
f.write(line)
total_code += 1
with open(how_to_use_code) as data:
for line in data.readlines():
f.write(line)
total_code += 1
with open(how_it_is_done_code) as data:
for line in data.readlines():
f.write(line)
total_code += 1
with open(property_code) as data:
for line in data.readlines():
f.write(line)
total_code += 1
with open(others_code) as data:
for line in data.readlines():
f.write(line)
total_code += 1
assert total_code == total_reference
print(total_code)
print("代码补齐搞定")
def output_category():
categories = []
with open(what_reference) as data:
category = "what"
num = len(data.readlines())
categories += [category] * num
with open(why_reference) as data:
category = "why"
num = len(data.readlines())
categories += [category] * num
with open(how_to_use_reference) as data:
category = "how_to_use"
num = len(data.readlines())
categories += [category] * num
with open(how_it_is_done_reference) as data:
category = "how_it_is_done"
num = len(data.readlines())
categories += [category] * num
with open(property_reference) as data:
category = "property"
num = len(data.readlines())
categories += [category] * num
with open(others_reference) as data:
category = "others"
num = len(data.readlines())
categories += [category] * num
with open(merge_category, "w") as f:
for category in categories:
f.write(category)
f.write("\n")
if os.path.exists(merge_category):
with open(merge_category) as f:
length = len(f.readlines())
assert length == len(categories), str(
length) + "!=" + str(len(categories))
if length == len(categories):
print("成功,一共{}条数据".format(length))
print("类别分布")
count = pd.Series(categories).value_counts()
print(count)
def evaluate():
# 最优效果
command = "nlg-eval --references {} --hypothesis {} --no-skipthoughts --no-glove".format(
merge_reference,
merge_hypothesis
)
os.system(command)
def code2seq_data():
# 因为预处理的原因,需要补全code2seq的数据
with open(how_to_use_reference) as f:
data = list(f.readlines())
code_file = os.path.join(DATA_DIR, "how-to-use(code2seq)", "test.source")
nl_file = os.path.join(DATA_DIR, "how-to-use(code2seq)", "test.token.nl")
with open(code_file) as f:
code = list(f.readlines())
with open(nl_file) as f:
nl = list(f.readlines())
assert len(code) == len(nl)
print("Code2Seq抽取数量:")
print(len(data))
print("实际标记数量:")
print(len(nl))
print("重复的")
print(len(nl) - len(list(set(nl))))
# 开始查找
nl = [s.split(" .")[0] + "\n" if "." in s else s for s in nl]
nl = [s.split(" ?")[0] + "\n" if "?" in s else s for s in nl]
nl = [s.split(" !")[0] + "\n" if "!" in s else s for s in nl]
index = []
for comment in data:
if comment in nl:
line_num = nl.index(comment)
index.append(line_num)
print(len(index))
with open(how_to_use_code, "w") as f:
for num in index:
f.write(code[num])
print("补全成功")
def split_dev():
# 不需要了
with open(merge_code) as f:
code = list(f.readlines())
with open(merge_reference) as f:
nl = list(f.readlines())
print(len(code))
# output
train_code = code[:3000]
train_nl = code[:3000]
test_code = code[3000:]
test_nl = code[3000:]
dev_train_code = os.path.join(DEV_DIR, "dev_train_code.txt")
dev_train_nl = os.path.join(DEV_DIR, "dev_train_nl.txt")
dev_test_code = os.path.join(DEV_DIR, "dev_test_code.txt")
dev_test_nl = os.path.join(DEV_DIR, "dev_test_nl.txt")
def write(data, path):
with open(path, "w") as f:
for i in data:
f.write(i)
write(train_code, dev_train_code)
write(train_nl, dev_train_nl)
write(test_code, dev_test_code)
write(test_nl, dev_test_nl)
print("finished")
def label_5000():
label_path = os.path.join(DEV_DIR, "test.csv")
init_5000 = os.path.join(DEV_DIR, "label_5000.txt")
data = pd.read_csv(label_path, index_col=0)
print(data.columns)
categories = data[' category']
with open(init_5000, "w") as f:
for category in categories:
f.write(category)
f.write("\n")
def main():
# merge()
# evaluate()
# split_dev()
output_category()
if __name__ == "__main__":
# main()
# code2seq_data()
label_5000()
|
"""
Parent Class for Model and Parallel_Link_Model objects.
A class that defines the network being modeled and that contains all
modeled objects in the network such as Nodes, Interfaces, Circuits,
and Demands.
"""
from .demand import Demand
from .exceptions import ModelException
from .node import Node
from .rsvp import RSVP_LSP
class MasterModel(object):
"""
Parent class for Model and Parallel_Link_Model subclasses; holds common defs
"""
def __init__(self, interface_objects=set(), node_objects=set(),
demand_objects=set(), rsvp_lsp_objects=set()):
self.interface_objects = interface_objects
self.node_objects = node_objects
self.demand_objects = demand_objects
self.circuit_objects = set()
self.rsvp_lsp_objects = rsvp_lsp_objects
self.srlg_objects = set()
self._parallel_lsp_groups = {}
def simulation_diagnostics(self): # TODO - make unit test for this
"""
Analyzes simulation results and looks for the following:
- Number of routed LSPs carrying Demands
- Number of routed LSPs with no Demands
- Number of Demands riding LSPs
- Number of Demands not riding LSPs
- Number of unrouted LSPs
- Number of unrouted Demands
:return: dict with the above as keys and the quantity of each for values and generators for
routed LSPs with no Demands, routed LSPs carrying Demands, Demands riding LSPs
This is not cached currently and my be expensive to (re)run on a very large model. Current best
practice is to assign the output of this to a variable:
ex: sim_diag1 = model1.simulation_diagnostics()
"""
simulation_data = {'Number of routed LSPs carrying Demands': 'TBD',
'Number of routed LSPs with no Demands': 'TBD',
'Number of Demands riding LSPs': 'TBD',
'Number of Demands not riding LSPs': 'TBD',
'Number of unrouted LSPs': 'TBD',
'Number of unrouted Demands': 'TBD',
'routed LSPs with no demands generator': 'TBD',
'routed LSPs with demands generator': 'TBD',
'demands riding LSPs generator': 'TBD'}
# Find LSPs with and without demands
lsps_routed_no_demands = [lsp for lsp in self.rsvp_lsp_objects if lsp.path != 'Unrouted' and
lsp.demands_on_lsp(self) == []]
lsps_routed_with_demands = [lsp for lsp in self.rsvp_lsp_objects if lsp.path != 'Unrouted' and
lsp.demands_on_lsp(self) != []]
# Find demands riding LSPs
dmds_riding_lsps = set()
# Find unrouted LSPs
for dmd in (dmd for dmd in self.demand_objects):
for object in dmd.path:
if isinstance(object, RSVP_LSP):
dmds_riding_lsps.add(dmd)
unrouted_lsps = [lsp for lsp in self.rsvp_lsp_objects if lsp.path == 'Unrouted']
# Update the quantities in simulation_data
simulation_data['Number of routed LSPs carrying Demands'] = len(lsps_routed_with_demands)
simulation_data['Number of routed LSPs with no Demands'] = len(lsps_routed_no_demands)
simulation_data['Number of Demands riding LSPs'] = len(dmds_riding_lsps)
simulation_data['Number of Demands not riding LSPs'] = len(self.demand_objects) - len(dmds_riding_lsps)
simulation_data['Number of unrouted LSPs'] = len(unrouted_lsps)
simulation_data['Number of unrouted Demands'] = len(self.get_unrouted_demand_objects())
# Create generators to be returned
dmds_riding_lsps_gen = (dmd for dmd in dmds_riding_lsps)
lsps_routed_no_demands_gen = (lsp for lsp in lsps_routed_no_demands)
lsps_routed_with_demands_gen = (lsp for lsp in lsps_routed_with_demands)
# Update generators in simulation_data
simulation_data['routed LSPs with no demands generator'] = lsps_routed_no_demands_gen
simulation_data['routed LSPs with demands generator'] = lsps_routed_with_demands_gen
simulation_data['demands riding LSPs generator'] = dmds_riding_lsps_gen
return simulation_data
def _make_int_info_dict(self):
"""
Makes dict of information for each interface. Most of this information
is derived from the simulation.
Returns dict object. Keys are the _key for each Interface; values are
dicts for each interface_ key that hold information about the Interface.
:return: int_info
"""
keys = (interface._key for interface in self.interface_objects)
int_info = {key: {'lsps': [], 'reserved_bandwidth': 0} for key in keys}
for lsp in (lsp for lsp in self.rsvp_lsp_objects if 'Unrouted' not in lsp.path):
for interface in lsp.path['interfaces']:
int_info[interface._key]['lsps'].append(lsp)
int_info[interface._key]['reserved_bandwidth'] += round(lsp.reserved_bandwidth, 1)
return int_info
def _validate_circuit_interface_capacity(self, circuits_with_mismatched_interface_capacity, ckt):
"""
Checks ckt's component Interfaces for matching capacity
:param circuits_with_mismatched_interface_capacity: list that will store
Circuits that have mismatched Interface capacity
:param ckt: Circuit object to check
:return: None
"""
int1 = ckt.get_circuit_interfaces(self)[0]
int2 = ckt.get_circuit_interfaces(self)[1]
# Match the failed status to True if they are different
if int1.failed != int2.failed:
int1.failed = True # pragma: no cover
int2.failed = True # pragma: no cover
# Make sure the interface capacities in the circuit match
if int1.capacity != int2.capacity:
circuits_with_mismatched_interface_capacity.append(ckt)
def _reserved_bw_error_checks(self, int_info, int_res_bw_sum_error, int_res_bw_too_high, interface):
"""
Checks interface for the following:
- Is reserved_bandwidth > capacity?
- Does reserved_bandwidth for interface match the sum of the
reserved_bandwidth for the LSPs egressing interface?
:param int_info: dict that holds int_res_bw_sum_error and
int_res_bw_too_high sets. Has the following format for a given
entry:
int_info[interface._key] = {'lsps': [], 'reserved_bandwidth': 0}
Where 'lsps' is a list of RSVP LSPs egressing the Interface and
'reserved_bandwidth' is the reserved_bandwidth value generated
by the simulation
:param int_res_bw_sum_error: set that will hold Interface objects
whose reserved_bandwidth does not match the sum of the
reserved_bandwidth for the LSPs egressing interface
:param int_res_bw_too_high: set that will hold Interface objects
whose reserved_bandwidth is > the capacity of the Interface
:param interface: Interface object to inspect
:return: None
"""
if interface.reserved_bandwidth > interface.capacity:
int_res_bw_too_high.add(interface)
if round(interface.reserved_bandwidth, 1) != round(int_info[interface._key][
'reserved_bandwidth'], 1): # pragma: no cover # noqa
int_res_bw_sum_error.add((interface, interface.reserved_bandwidth, tuple(interface.lsps(self))))
def _demand_traffic_per_int(self, demand): # common between model and parallel_link_model
"""
Given a Demand object, return the (key, value) pairs for how much traffic each
Interface gets from the routing of the traffic load over Model Interfaces.
: demand: Demand object
: return: dict of (Interface: <traffic from demand> ) k, v pairs
Example: The interface from node G to node D below has 2.5 units of traffic from 'demand';
the interface from A to B has 10.0, etc.
{Interface(name = 'A-to-B', cost = 4, capacity = 100, node_object = Node('A'),
remote_node_object = Node('B'), circuit_id = '1'): 12.0,
Interface(name = 'A-to-B_2', cost = 4, capacity = 50, node_object = Node('A'),
remote_node_object = Node('B'), circuit_id = '2'): 12.0,
Interface(name = 'B-to-E', cost = 3, capacity = 200, node_object = Node('B'),
remote_node_object = Node('E'), circuit_id = '7'): 8.0,
Interface(name = 'B-to-E_3', cost = 3, capacity = 200, node_object = Node('B'),
remote_node_object = Node('E'), circuit_id = '27'): 8.0,
Interface(name = 'B-to-E_2', cost = 3, capacity = 200, node_object = Node('B'),
remote_node_object = Node('E'), circuit_id = '17'): 8.0}
"""
shortest_path_int_list = []
for path in demand.path:
shortest_path_int_list += path
# Unique interfaces across all shortest paths
shortest_path_int_set = set(shortest_path_int_list)
# Dict to store how many unique next hops each node has in the shortest paths
unique_next_hops = {}
# Iterate through all the interfaces
for interface in shortest_path_int_set:
# For a given Interface's node_object, determine how many
# Interfaces on that Node are facing next hops
unique_next_hops[interface.node_object.name] = [intf.node_object.name for intf in shortest_path_int_set
if intf.node_object.name == interface.node_object.name]
# TODO - find shorter example here
# shortest_path_info will be a dict with the following info for each path:
# - an ordered list of interfaces in the path
# - a dict of cumulative splits for each interface at that point in the path
# - the amount of traffic on the path
# Example:
# shortest_path_info =
# {'path_0': {'interfaces': [
# Interface(name='A-to-B_2', cost=4, capacity=50, node_object=Node('A'), remote_node_object=Node('B'),
# circuit_id='2'),
# Interface(name='B-to-E_2', cost=3, capacity=200, node_object=Node('B'), remote_node_object=Node('E'),
# circuit_id='17')],
# 'path_traffic': 4.0,
# 'splits': {Interface(name='A-to-B_2', cost=4, capacity=50, node_object=Node('A'),
# remote_node_object=Node('B'), circuit_id='2'): 2,
# Interface(name='B-to-E_2', cost=3, capacity=200, node_object=Node('B'),
# remote_node_object=Node('E'), circuit_id='17'): 6}},
# 'path_1': {'interfaces': [
# Interface(name='A-to-B_2', cost=4, capacity=50, node_object=Node('A'), remote_node_object=Node('B'),
# circuit_id='2'),
# Interface(name='B-to-E', cost=3, capacity=200, node_object=Node('B'), remote_node_object=Node('E'),
# circuit_id='7')],
# 'path_traffic': 4.0,
# 'splits': {Interface(name='A-to-B_2', cost=4, capacity=50, node_object=Node('A'),
# remote_node_object=Node('B'), circuit_id='2'): 2,
# Interface(name='B-to-E', cost=3, capacity=200, node_object=Node('B'),
# remote_node_object=Node('E'), circuit_id='7'): 6}},
# 'path_2': {'interfaces': [
# Interface(name='A-to-B_2', cost=4, capacity=50, node_object=Node('A'), remote_node_object=Node('B'),
# circuit_id='2'),
# Interface(name='B-to-E_3', cost=3, capacity=200, node_object=Node('B'), remote_node_object=Node('E'),
# circuit_id='27')],
# 'path_traffic': 4.0,
# 'splits': {Interface(name='A-to-B_2', cost=4, capacity=50, node_object=Node('A'),
# remote_node_object=Node('B'), circuit_id='2'): 2,
# Interface(name='B-to-E_3', cost=3, capacity=200, node_object=Node('B'),
# remote_node_object=Node('E'), circuit_id='27'): 6}},
# 'path_3': {'interfaces': [
# Interface(name='A-to-B', cost=4, capacity=100, node_object=Node('A'), remote_node_object=Node('B'),
# circuit_id='1'),
# Interface(name='B-to-E_2', cost=3, capacity=200, node_object=Node('B'), remote_node_object=Node('E'),
# circuit_id='17')],
# 'path_traffic': 4.0,
# 'splits': {Interface(name='A-to-B', cost=4, capacity=100, node_object=Node('A'),
# remote_node_object=Node('B'), circuit_id='1'): 2,
# Interface(name='B-to-E_2', cost=3, capacity=200, node_object=Node('B'),
# remote_node_object=Node('E'), circuit_id='17'): 6}},
# 'path_4': {'interfaces': [
# Interface(name='A-to-B', cost=4, capacity=100, node_object=Node('A'), remote_node_object=Node('B'),
# circuit_id='1'),
# Interface(name='B-to-E', cost=3, capacity=200, node_object=Node('B'), remote_node_object=Node('E'),
# circuit_id='7')],
# 'path_traffic': 4.0,
# 'splits': {Interface(name='A-to-B', cost=4, capacity=100, node_object=Node('A'),
# remote_node_object=Node('B'), circuit_id='1'): 2,
# Interface(name='B-to-E', cost=3, capacity=200, node_object=Node('B'),
# remote_node_object=Node('E'), circuit_id='7'): 6}},
# 'path_5': {'interfaces': [
# Interface(name='A-to-B', cost=4, capacity=100, node_object=Node('A'), remote_node_object=Node('B'),
# circuit_id='1'),
# Interface(name='B-to-E_3', cost=3, capacity=200, node_object=Node('B'), remote_node_object=Node('E'),
# circuit_id='27')],
# 'path_traffic': 4.0,
# 'splits': {Interface(name='A-to-B', cost=4, capacity=100, node_object=Node('A'),
# remote_node_object=Node('B'), circuit_id='1'): 2,
# Interface(name='B-to-E_3', cost=3, capacity=200, node_object=Node('B'),
# remote_node_object=Node('E'), circuit_id='27'): 6}}}
shortest_path_info = {}
path_counter = 0
# Iterate thru each path for the demand
for path in demand.path:
# Dict of cumulative splits per interface
traffic_splits_per_interface = {}
path_key = 'path_' + str(path_counter)
shortest_path_info[path_key] = {}
# Create cumulative path splits for each interface
total_splits = 1
for interface in path:
total_splits = total_splits * len(unique_next_hops[interface.node_object.name])
traffic_splits_per_interface[interface] = total_splits
# Find path traffic
max_split = max([split for split in traffic_splits_per_interface.values()])
path_traffic = float(demand.traffic) / float(max_split)
shortest_path_info[path_key]['interfaces'] = path
shortest_path_info[path_key]['splits'] = traffic_splits_per_interface
shortest_path_info[path_key]['path_traffic'] = path_traffic
path_counter += 1
# For each path, determine which interfaces it transits and add
# that path's traffic to the interface
# Create dict to hold cumulative traffic for each interface for demand
traff_per_int = dict.fromkeys(shortest_path_int_set, 0)
for path, info in shortest_path_info.items():
for interface in info['interfaces']:
traff_per_int[interface] += info['path_traffic']
# Round all traffic values to 1 decimal place
traff_per_int = {interface: round(traffic, 1) for interface, traffic in traff_per_int.items()}
return traff_per_int
def _update_interface_utilization(self): # common between model and parallel_link_model
"""Updates each interface's utilization; returns Model object with
updated interface utilization."""
# In the model, in an interface is failed, set the traffic attribute
# to 'Down', otherwise, initialize the traffic to zero
for interface_object in self.interface_objects:
if interface_object.failed:
interface_object.traffic = 'Down'
else:
interface_object.traffic = 0.0
routed_demand_object_generator = (demand_object for demand_object in self.demand_objects if
'Unrouted' not in demand_object.path)
# For each demand that is not Unrouted, add its traffic value to each
# interface object in the path
for demand_object in routed_demand_object_generator:
# This model only allows demands to take RSVP LSPs if
# the demand's source/dest nodes match the LSP's source/dest nodes.
# Expand each LSP into its interfaces and add that the traffic per LSP
# to the LSP's path interfaces.
# Can demand take LSP?
routed_lsp_generator = (lsp for lsp in self.rsvp_lsp_objects if 'Unrouted' not in lsp.path)
lsps_for_demand = []
for lsp in routed_lsp_generator:
if (lsp.source_node_object == demand_object.source_node_object and
lsp.dest_node_object == demand_object.dest_node_object):
lsps_for_demand.append(lsp)
if lsps_for_demand != []:
# Find each demands path list, determine the ECMP split across the
# routed LSPs, and find the traffic per path (LSP)
num_routed_lsps_for_demand = len(lsps_for_demand)
traffic_per_demand_path = demand_object.traffic / num_routed_lsps_for_demand
# Get the interfaces for each LSP in the demand's path
for lsp in lsps_for_demand:
lsp_path_interfaces = lsp.path['interfaces']
# Now that all interfaces are known,
# update traffic on interfaces demand touches
for interface in lsp_path_interfaces:
# Get the interface's existing traffic and add the
# portion of the demand's traffic
interface.traffic += traffic_per_demand_path
# If demand_object is not taking LSPs, IGP route it, using hop by hop ECMP
else:
# demand_traffic_per_int will be dict of
# ('source_node_name-dest_node_name': <traffic from demand>) k,v pairs
#
# Example: The interface from node G to node D has 2.5 units of traffic from 'demand'
# {'G-D': 2.5, 'A-B': 10.0, 'B-D': 2.5, 'A-D': 5.0, 'D-F': 10.0, 'B-G': 2.5}
demand_traffic_per_int = self._demand_traffic_per_int(demand_object)
# Get the interface objects and update them with the traffic
for interface, traffic_from_demand in demand_traffic_per_int.items():
interface.traffic += traffic_from_demand
return self
def _route_demands(self, demands, input_model): # common between model and parallel_link_model
"""
Routes demands through input_model Model object
:param demands: iterable of Demand objects to be routed
:param input_model: Model object in which to route the demands
:return:
"""
for demand_object in demands:
demand_object = demand_object._add_demand_path(input_model)
return self._update_interface_utilization()
def _route_lsps(self, input_model):
"""Route the LSPs in the model
1. Get LSPs into groups with matching source/dest
2. Find all the demands that take the LSP group
3. Route the LSP group, one at a time
:param input_model: Model object; this may have different parameters than 'self'
:return: self, with updated LSP paths
"""
# Find parallel LSP groups
parallel_lsp_groups = self.parallel_lsp_groups() # TODO - can this be optimized?
# Find all the parallel demand groups
parallel_demand_groups = self.parallel_demand_groups() # TODO - can this be optimized?
# Find the amount of bandwidth each LSP in each parallel group will carry
counter = 1
for group, lsps in parallel_lsp_groups.items():
print("Routing {} LSPs in parallel LSP group {}; {}/{}".format(len(lsps), group, counter,
len(parallel_lsp_groups)))
# Traffic each LSP in a parallel LSP group will carry; initialize
traff_on_each_group_lsp = 0
try:
# Get all demands that would ride the parallel LSP group
dmds_on_lsp_group = parallel_demand_groups[group]
traffic_in_demand_group = sum([dmd.traffic for dmd in dmds_on_lsp_group])
if traffic_in_demand_group > 0:
traff_on_each_group_lsp = traffic_in_demand_group / len(lsps)
except KeyError:
# LSPs with no demands will cause a KeyError in parallel_demand_groups[group]
# since parallel_demand_group will have no entry for 'group'
pass
# Now route each LSP in the group (first routing iteration)
for lsp in lsps: # TODO - can this be optimized?
# Route each LSP one at a time
lsp.route_lsp(input_model, traff_on_each_group_lsp)
routed_lsps_in_group = [lsp for lsp in lsps if lsp.path != 'Unrouted']
# ##### Optimize the LSP group reserved bandwidth #####
# If not all the LSPs in the group can route at the lowest (initial)
# setup bandwidth, determine which LSPs can signal and for how much traffic
if len(routed_lsps_in_group) != len(lsps) and len(routed_lsps_in_group) > 0:
self._optimize_parallel_lsp_group_res_bw(input_model, routed_lsps_in_group, traffic_in_demand_group)
counter += 1
return self
def _optimize_parallel_lsp_group_res_bw(self, input_model, routed_lsps_in_group, traffic_in_demand_group):
"""
If not all LSPs in a parallel LSP group can route, some of the LSPs that did
route may be able to signal for a new, optimal setup_bandwidth, one based
on more than one parallel LSP not routing. This new path would natively
have enough bandwidth to signal the new LSP setup bandwidth, regardless of
how much setup bandwidth an LSP was already consuming on a common interface
:param input_model: Model object containing the parallel LSP group; typically
this Model will consist only of non-failed interfaces from self.
:param routed_lsps_in_group: LSPs in parallel LSP group with a path
:param traffic_in_demand_group: aggregate traffic for all demands with
the same source node and destination node as the parallel LSP group
:return:
"""
# This value would be the optimal setup bandwidth for each LSP
# as it would allow the LSP to reserve bandwidth for the amount
# of traffic it carries
setup_bandwidth_optimized = traffic_in_demand_group / len(routed_lsps_in_group)
# Determine if any of the LSPs can signal for the amount of
# traffic they would carry (setup_bandwidth_optimized)
for lsp in routed_lsps_in_group:
# traffic_in_demand_group will ECMP split over routed_lsps_in_group
# For each lsp in routed_lsp_group, see if it can signal for
# a 'setup_bandwidth_optimized' amount of setup_bandwidth
lsp_path_interfaces_before = lsp.path['interfaces']
lsp_res_bw_before = lsp.reserved_bandwidth
# See if LSP can resignal for setup_bandwidth_optimized
lsp = lsp.find_rsvp_path_w_bw(setup_bandwidth_optimized, input_model)
# If the LSP reserved_bandwidth changes, restore the old
# reserved_bandwidth value to the interfaces in its
# prior path['interfaces'] list
if lsp_res_bw_before != lsp.reserved_bandwidth:
for interface in lsp_path_interfaces_before:
interface.reserved_bandwidth -= lsp_res_bw_before
# . . . and then remove the new reserved bandwidth from the
# new path interfaces
for interface in lsp.path['interfaces']:
interface.reserved_bandwidth += lsp.reserved_bandwidth
def parallel_lsp_groups(self):
"""
Determine LSPs with same source and dest nodes
:return: dict with entries where key is 'source_node_name-dest_node_name' and value is a list of LSPs
with matching source/dest nodes
"""
if self._parallel_lsp_groups == {}:
src_node_names = set([lsp.source_node_object.name for lsp in self.rsvp_lsp_objects])
dest_node_names = set([lsp.dest_node_object.name for lsp in self.rsvp_lsp_objects])
parallel_lsp_groups = {}
for src_node_name in src_node_names:
for dest_node_name in dest_node_names:
key = '{}-{}'.format(src_node_name, dest_node_name)
parallel_lsp_groups[key] = []
for lsp in self.rsvp_lsp_objects:
if (lsp.source_node_object.name == src_node_name and
lsp.dest_node_object.name == dest_node_name):
parallel_lsp_groups[key].append(lsp)
if parallel_lsp_groups[key] == []:
del parallel_lsp_groups[key]
self._parallel_lsp_groups = parallel_lsp_groups
return parallel_lsp_groups
else:
return self._parallel_lsp_groups
def parallel_demand_groups(self):
"""
Determine demands with same source and dest nodes
:return: dict with entries where key is 'source_node_name-dest_node_name' and value is a list of demands
with matching source/dest nodes
"""
src_node_names = set([dmd.source_node_object.name for dmd in self.demand_objects])
dest_node_names = set([dmd.dest_node_object.name for dmd in self.demand_objects])
parallel_demand_groups = {}
for src_node_name in src_node_names:
for dest_node_name in dest_node_names:
key = '{}-{}'.format(src_node_name, dest_node_name)
parallel_demand_groups[key] = []
for dmd in self.demand_objects:
if (dmd.source_node_object.name == src_node_name and
dmd.dest_node_object.name == dest_node_name):
parallel_demand_groups[key].append(dmd)
if parallel_demand_groups[key] == []:
del parallel_demand_groups[key]
return parallel_demand_groups
def _unique_interface_per_node(self):
"""
Checks that the interface names on each node are unique; returns
a message if a duplicate interface name is found on the same node
"""
exception_interfaces = set() # duplicate interfaces
for node in (node for node in self.node_objects):
node_int_list = [interface.name for interface in node.interfaces(self)]
node_int_set = set(node_int_list)
if len(node_int_list) > len(node_int_set):
# Find which ints are duplicate
for item in node_int_set:
node_int_list.remove(item)
# Add the remaining node and interface name to exception_interfaces
for item in node_int_list:
exception_interfaces.add((node, item))
if len(exception_interfaces) > 0:
message = ("Interface names must be unique per node. The following"
" nodes have duplicate interface names {}".format(exception_interfaces))
raise ModelException(message)
else:
return True
@property
def all_interface_circuit_ids(self):
"""
Returns all interface circuit_ids
"""
return set(interface.circuit_id for interface in self.interface_objects)
def add_demand(self, source_node_name, dest_node_name, traffic=0, name='none'):
"""
Adds a traffic load (Demand) from point A to point B in the
model and validates model.
:param source_node_name: name of Demand's source Node
:param dest_node_name: name of Demand's destination Node
:param traffic: amount of traffic (magnitude) of the Demand
:param name: Demand name
:return: A validated Model object with the new demand
"""
source_node_object = self.get_node_object(source_node_name)
dest_node_object = self.get_node_object(dest_node_name)
added_demand = Demand(source_node_object, dest_node_object, traffic, name)
if added_demand._key in set([demand._key for demand in self.demand_objects]):
message = '{} already exists in demand_objects'.format(added_demand)
raise ModelException(message)
self.demand_objects.add(added_demand)
self.validate_model()
@classmethod
def _add_lsp_from_data(cls, demands_info_end_index, lines, lsp_set, node_set): # TODO - same as model
lsp_info_begin_index = demands_info_end_index + 3
lsp_lines = lines[lsp_info_begin_index:]
for lsp_line in lsp_lines:
lsp_info = lsp_line.split()
source = lsp_info[0]
try:
source_node = [node for node in node_set if node.name == source][0]
except IndexError:
err_msg = "No Node with name {} in Model; {}".format(source, lsp_info)
raise ModelException(err_msg)
dest = lsp_info[1]
try:
dest_node = [node for node in node_set if node.name == dest][0]
except IndexError:
err_msg = "No Node with name {} in Model; {}".format(dest, lsp_info)
raise ModelException(err_msg)
name = lsp_info[2]
try:
configured_setup_bw = lsp_info[3]
except IndexError:
configured_setup_bw = None
new_lsp = RSVP_LSP(source_node, dest_node, name, configured_setup_bandwidth=configured_setup_bw)
if new_lsp._key not in set([lsp._key for lsp in lsp_set]):
lsp_set.add(new_lsp)
else:
print("{} already exists in model; disregarding line {}".format(new_lsp, lines.index(lsp_line)))
@classmethod
def _add_demand_from_data(cls, demand_line, demand_set, lines, node_set): # same as Model call
demand_info = demand_line.split()
source = demand_info[0]
try:
source_node = [node for node in node_set if node.name == source][0]
except IndexError:
err_msg = "No Node with name {} in Model; {}".format(source, demand_info)
raise ModelException(err_msg)
dest = demand_info[1]
try:
dest_node = [node for node in node_set if node.name == dest][0]
except IndexError:
err_msg = "No Node with name {} in Model; {}".format(dest, demand_info)
raise ModelException(err_msg)
traffic = int(demand_info[2])
name = demand_info[3]
if name == '':
demand_name = 'none'
else:
demand_name = name
new_demand = Demand(source_node, dest_node, traffic, demand_name)
if new_demand._key not in set([dmd._key for dmd in demand_set]):
demand_set.add(new_demand)
else:
print("{} already exists in model; disregarding line {}".format(new_demand,
lines.index(demand_line)))
@classmethod
def _add_node_from_data(cls, demand_set, interface_set, lines, lsp_set, node_line, node_set):
node_info = node_line.split()
node_name = node_info[0]
try:
node_lat = int(node_info[2])
except (ValueError, IndexError):
node_lat = 0
try:
node_lon = int(node_info[1])
except (ValueError, IndexError):
node_lon = 0
new_node = Node(node_name)
if new_node.name not in set([node.name for node in node_set]): # Pick up orphan nodes
node_set.add(new_node)
new_node.lat = node_lat
new_node.lon = node_lon
else:
existing_node = cls(interface_set, node_set, demand_set, lsp_set).get_node_object(node_name=node_name)
existing_node.lat = node_lat
existing_node.lon = node_lon
|
from bs4 import BeautifulSoup
import requests
from tkinter import *
root = Tk()
root.geometry('500x400')
cityNameLabel = Label(root, text='please enter your city name')
cityNameLabel.pack()
cityNameEntry = Entry(root)
cityNameEntry.pack()
degreeLabel = Label(root, text='please choose').pack()
choosed = StringVar()
choosed.set('choose')
def save():
global city
global value
city = cityNameEntry.get()
value = choosed.get()
procces()
degreeMenu = OptionMenu(root, choosed, "Celsius", "Farenheit")
degreeMenu.pack()
def procces():
address = '''
https://www.google.com/search?rlz=1C1CHBF_enIR891IR891&sxsrf=ALeKk03m9OMWKOC2j_DtHgAfvdyH4BmfXQ%3A1594136371743&ei=M5cEX-PtLMLikgXfmoGgCw&q=weather+{}+{}&oq=weather+{}+{}&gs_lcp=CgZwc3ktYWIQAzIECCMQJzIGCAAQCBAeMgYIABAIEB4yBggAEAgQHjIGCAAQCBAeUJFTWMBXYMhZaABwAHgAgAHgAYgB8QmSAQMyLTaYAQCgAQGqAQdnd3Mtd2l6&sclient=psy-ab&ved=0ahUKEwij06ThvLvqAhVCsaQKHV9NALQQ4dUDCAw&uact=5
'''.format(city,value,city,value)
source = requests.get(address)
soup = BeautifulSoup(source.content, 'html.parser')
global cityName, date, degree
cityName = soup.find('span', class_='BNeawe tAd8D AP7Wnd')
date = soup.find('div', class_='BNeawe tAd8D AP7Wnd')
degree = soup.find('div', class_='BNeawe iBp4i AP7Wnd')
status()
def myDelete():
try:
weatherStatus.destroy()
except Exception:
labelError.destroy()
confirmButton['state']=NORMAL
deleteButton['state']= DISABLED
def status():
global weatherStatus
try:
weatherStatus = Label(root, text="the weather in {} on {} is {}".format(cityName.text,date.text,degree.text))
weatherStatus.pack()
except Exception:
global labelError
labelError= Label(root, text='city not found')
labelError.pack()
confirmButton['state']=DISABLED
deleteButton['state']= NORMAL
confirmButton = Button(root, text='Confirm', command=save)
confirmButton.pack()
deleteButton = Button(root, text='Delete', command=myDelete)
deleteButton.pack()
root.mainloop()
|
inutil = input()
a = [int(x) for x in input().split()]
b = [int(x) for x in input().split()]
temp = True
temp2 = True
for index,i in enumerate(b):
if i not in a:
temp = False
for n in b[0:index]:
for m in b[0:index]:
if m+n == i:
temp = True
if not temp:
print(i)
temp2 = False
break
if temp2:
print("sim")
|
A = [15, 10, 3]
B = [75, 30, 5]
def solution(A, B):
count = 0
for x, y in zip(A, B):
if has_same_prime_divisors(x, y):
count += 1
return count
def has_same_prime_divisors(x, y):
gcd_value = gcd(x, y) # The gcd contains all the common prime divisors
x = remove_common_prime_divisors(x, gcd_value)
if x != 1:
# If x and y have exactly the same common prime divisors, x must be composed by
# the prime divisors in gcd_value. So after previous loop, x must be one.
return False
y = remove_common_prime_divisors(y, gcd_value)
return y == 1
def gcd(x, y):
# Compute the greatest common divisor.
if x % y == 0:
return y
else:
return gcd(y, x % y)
def remove_common_prime_divisors(x, y):
# Remove all prime divisors of x, which also exist in y. And return the remaining part of x.
while x != 1:
gcd_value = gcd(x, y)
if gcd_value == 1:
# x does not contain any more common prime divisors
break
x /= gcd_value
return x
assert (solution(A, B) == 1)
# assert (solution([6059, 551], [442307, 303601]) == ?)
|
class Sudoku():
"""A class to hold the Sudoku Gamea and it's methods."""
def __init__(self):
"""A method that initiates the board with a hardcoded puzzle. It also sets the status as 'Unfinished' """
self.board = [
["*", 8, 4, "*", 5, "9", 2, "*", "*"], #row 1
[6, "*", "*", 1, "*", 3, 7, "*", 8], #row 2
["*", "*", "*", 7, "*", "*", 4, "*", "*"], #row 3
[1, "*", 2, "*", "*", "*", 3, "*", "*"], #row 4
["*", "*", "*", "*", "*", "*", "*", "*", "*"], #row 5
["*", "*", 7, "*", "*", "*", 6, "*", 5], #row 6
["*", "*", 1, "*", "*", 6, "*", "*", "*"], #row 7
[4, "*", 6, 9, "*", 5, "*", "*", 1], #row 8
["*", "*", 9, "*", 8, "*", 5, 6, "*"]] #row 9
self.status = "Unfinished"
def getBoard(self):
"""Returns the game board."""
return self.board
def getStatus(self):
"""Returns the status of the game."""
return self.status
def setStatus(self, value):
"""Sets the game status."""
self.status = value
def giveRules(self):
""" Prints the rules of Sudoku."""
rules1 = "Every row, every column, and every 3x3 section must contain the numbers 1 to 9, with no duplications."
rules1a = "A '*' represents a blank space that needs to be filled in."
rules2 = "Once you have filled out the entire grid, the program will check your attempt with it’s encoded algorithm,"
rules3 = "checkSolution(), to see if that solution is valid."
print(rules1)
print(rules1a)
print(rules2)
print(rules3)
def uniqueRow(self, row):
"""
A method to make sure that a list of given numbers (or a row) is unique and no numbers are repeated.
"""
copy = 0
for i in range(len(row)):
for x in range(len(row)):
if i != x:
if row[i] == row[x]:
copy = 1
if(not copy):
return True
else:
return False
def checkRows(self, list):
"""A method that checks if rows are unique by calling uniqueRow method"""
for x in list:
result = self.uniqueRow(x)
if result == True:
solution = True
continue
else:
solution = False
break
return solution
def checkColumns(self, list):
""" a method that checks if the Columns are unique by calling the checkRows method"""
return self.checkRows(list)
def gatherColumns(self, board):
""" a method that creates "columns" as "rows" and returns a new list"""
columnsList=[[], [], [], [], [], [], [], [], []]
start = 0
while start < 9:
for x in board:
columnsList[start].append(x[start])
start += 1
return columnsList
def gatherSections(self, board):
""" a method that creates a 3x3 sections as a 'row' and returns a new list of sections """
#newList for sections
sectionList = [[], [], [], [], [], [], [], [], []]
#rowCounter
rowCounter = 3
currentRow = 0
#columnCounter
columnCounter = 3
currentColumn = 0
section = 0
#while # of sections is less than 9
while section < 9:
#count 3 rows at a time and make sure it's not our of range
while currentRow < rowCounter and currentRow !=9:
#get section
if columnCounter == 12:
break
else:
sectionList[section] = self.getOneSection(board, rowCounter, columnCounter)
#increase section number
section += 1
#move to next 3 columns
columnCounter += 3
#drop out of while loop and get next 3 rows
currentRow += 3
rowCounter += 3
# start from column 0 and set the Max column to 3
columnCounter = 3
return sectionList
def checkSections(self, board):
""" checks if each section (one row) is has unique numbers"""
return self.checkRows(board)
def getOneSection(self, board, rowMax, columnMax):
"""a method that returns one 3x3 section of a board"""
section = []
for x in range(rowMax-3, rowMax):
for y in range(columnMax-3, columnMax):
section.append(board[x][y])
return section
def checkSolution(self):
"""
This is the algorithm to solve the decision problem of a sudoku problem: if a players solution is correct.
a method that creates if an inputted solution on a board is correct by calling 3 checking methods:
checkRows, checkColumns, checkSections. If all three return true, then the solution is correct. Otherwise, it returns false.
"""
board = self.getBoard()
columnList = self.gatherColumns(board)
sectionList = self.gatherSections(board)
solution = self.checkRows(board) and self.checkColumns(columnList) and self.checkSections(sectionList)
if solution == True:
print("Your solution is correct! Congratulations!")
else:
print("Your solution is incorrect. Please try again.")
def printBoard(self):
"""Prints the sudoku board"""
list = self.getBoard()
currentRow = 0
print(" A B C D E F G H I")
print()
#for each section:
for section in range (0, 3):
#for each row
for k in range(currentRow, currentRow+3):
currentColumn = 0
print(k, " ", end=" ")
#for each section row
for x in range (0, 3):
#colum count 3
for y in range(currentColumn, currentColumn+3):
value = list[k][y]
print(value, end = " ")
print("", end =" ")
# print a |
if x != 2:
print("| ", end =" ")
else:
print("")
currentColumn += 3
currentRow += 3
if section != 2:
print(" _______________________________")
print("")
print("")
def convertCol(self, column):
"""Coverts a Alpha character into the matching column number. """
colList = ["A", "B", "C", "D", "E", "F", "G", "H", "I"]
for x in range(len(colList)):
if column == colList[x]:
return x
# if column value is invalid
elif column not in colList:
return None
def insert(self, column, row, value):
"""A method to insert a value into a square."""
board = self.getBoard()
column = self.convertCol(column)
if column == None:
print("That is not a valid Column input. Please try again.")
return
# an attempt to catch if input is invalid.
if value < 0 or value > 10:
print("That is not a valid number input. Please try again")
# inserts value into the 9X9 matrix.
self.board[row][column] = value
self.checkBoard()
self.checkStatus()
self.printBoard()
def checkBoard(self):
"""A method that checks is the board is filled in or not."""
board = self.getBoard()
boxesFilled = 0
for x in board:
for y in x:
if y == "*": # if the value is equal to a blank square.
break
else:
boxesFilled += 1
if boxesFilled == 81:
self.setStatus("Finished")
def checkStatus(self):
"""A method to check if the game is finished or not."""
status = self.getStatus()
if status == "Unfinished":
pass
else:
self.checkSolution()
def start(self):
"""A method to be called by 'main()' to start the game."""
# Welcome player and give rules
print("Welcome to Sudoku!")
self.giveRules()
ready = input("Type 'yes' when you are ready. ")
#start game
if ready.lower() == 'yes':
status = self.getStatus()
self.printBoard()
# a loop to continue to prompt for a value.
while status == "Unfinished":
print("Please type the column, row, and number you wish to insert.")
columnVal = input("Column: ")
columnVal = columnVal.upper()
rowVal = int(input("Row: "))
value = int(input("Number: "))
else:
#input value:
self.insert(columnVal, rowVal, value)
def main():
"""A main function to be used if the file is run as a script."""
game = Sudoku()
game.start()
if __name__ == '__main__':
main() |
def tester(start):
def nested(label):
print(label, nested.state)
nested.state +=1
nested.state = start
return nested
f = tester(0)
f('stamp')
f('stamp')
f('stamp')
|
from flask import (
Blueprint, redirect, render_template, request, session, url_for, g, jsonify, Response, current_app,send_from_directory
)
from werkzeug.security import check_password_hash
from werkzeug.utils import secure_filename
from datetime import datetime, timedelta
from ISRS.auth import force_login
from ISRS.model import db, User, Sheet
from ISRS.model import Response as db_Response
from ISRS.color import colors
from ISRS.sheet_detect.detect_row import sheet_recognition_type_one
from ISRS.sheet_detect.hough import sheet_recognition_type_one_sam
import os
bp = Blueprint('mobile', __name__, url_prefix='/mobile')
@bp.route('/recognition/<username>/', methods=['POST'])
def upload_photo(username):
"""
upload photo from mobile phone and recognize this photo
args:
username: For checking login!
"""
print(colors.GREEN + '----- Mobile Upload file -----' + colors.END)
print(request.files) # many file
if 'file' not in request.files:
print(colors.RED + 'No file' + colors.END)
return Response('file_failed')
uploaded_file = request.files['file']
if uploaded_file.filename == '':
print(colors.RED + 'No selected file' + colors.END)
return Response('file_failed')
if uploaded_file and allowed_file(uploaded_file.filename):
filename = secure_filename(uploaded_file.filename)
saved_filename = os.path.join(current_app.config['UPLOAD_FOLDER'], filename)
uploaded_file.save(saved_filename)
sheet_id = int(filename.split('_')[0])
print(colors.BLUE + 'File saved to '
+ saved_filename + ', sheet_id: ' + str(sheet_id) + colors.END)
sheet = Sheet.query.filter_by(id=sheet_id).first()
sheet_type = sheet.sheet_type
# TODO : check whether answer is valid -> recognition_failed
# OK TODO : os.path
# TODO sheet_type 1 or 2,
# OK TODO : login user
sheet_answer_wei = sheet_recognition_type_one(saved_filename, sheet.question_number, sheet.option_number)
#sheet_answer_wei = 1
print(colors.BLUE + 'Wei answer: ', end='')
print(sheet_answer_wei, end='')
print(colors.END)
sheet_answer_sam = sheet_recognition_type_one_sam(saved_filename, sheet.question_number, sheet.option_number)
#sheet_answer_sam = 1
print(colors.BLUE + 'Sam answer: ', end='')
print(sheet_answer_sam, end='')
print(colors.END)
if not sheet_answer_wei or not sheet_answer_sam:
print(colors.YELLOW + 'Recognition failed' + colors.END)
return Response('recognition_failed')
elif sheet_answer_wei != sheet_answer_sam:
print(colors.YELLOW + 'Recognition failed' + colors.END)
return Response('recognition_failed')
else:
print(colors.GREEN + 'Recognition successed' + colors.END)
add_response_record(sheet_id, sheet_answer_wei)
return Response('success') # recognition success
print(colors.RED + 'File extension not allowed or file not exist' + colors.END)
return Response('file_failed')
def allowed_file(filename):
"""
Check filename allowed.
rsplit(seprator, max) : return list with length = max+1
"""
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in current_app.config['ALLOWED_EXTENSIONS']
@bp.route('/answer/', methods=['POST'])
def add_response():
data = request.get_json()
print(data)
add_response_record(data['sheet_id'], data['answer_list'])
return 'OK'
def add_response_record(sheet_id, answer_list):
if sheet_id > 0 and answer_list:
sheet = Sheet.query.filter_by(id=sheet_id).first()
new_response = db_Response(response_list=answer_list,
created_at=(datetime.utcnow()+timedelta(hours=8)).strftime('%Y/%m/%d %H:%M:%S'),
sheets=sheet)
db.session.add(new_response)
db.session.commit()
return True
return False
@bp.route('/check_login/', methods=['POST'])
def check_login():
"""
check user login.
"""
print(colors.GREEN + '----- Mobile check login -----' + colors.END)
username = request.form['username']
password = request.form['password']
print(colors.BLUE + 'username: {}'.format(username) + colors.END)
print(colors.BLUE + 'password: {}'.format(password) + colors.END)
user = User.query.filter_by(username=username).first()
if user is None or not check_password_hash(user.password, password):
print(colors.RED + 'Login fail!' + colors.END)
return Response("login_fail")
else:
print(colors.BLUE + 'Login success!' + colors.END)
return Response("login_success")
@bp.route('/list/<username>/')
def mobile_list_sheet(username):
"""
Return user's sheets' id and title
"""
print(colors.GREEN + '----- Mobile list -----' + colors.END)
print('username ', username)
user = User.query.filter_by(username=username).first()
ids = [sheet.id for sheet in user.sheets]
titles = [sheet.title for sheet in user.sheets]
return jsonify(ids=ids, titles=titles)
@bp.route('/app/')
def return_apk():
return send_from_directory(directory=current_app.config['ANDROID_FOLDER'], filename='ISRS.apk', as_attachment=True)
@bp.route('get_img', methods=['GET'])
def get_img():
files = os.listdir(current_app.config['UPLOAD_FOLDER'])
print(files)
return render_template('browser.html', files=files, current_app=current_app)
@bp.route('/imgs/<filename>/')
def imgs(filename):
return send_from_directory(directory=current_app.config['UPLOAD_FOLDER'], filename=filename, as_attachment=True)
@bp.route('/show_img/<filename>/')
def show_img(filename):
return send_from_directory(directory=current_app.config['UPLOAD_FOLDER'], filename=filename, as_attachment=False)
|
#!/usr/bin/env python3
# pylint: disable=no-member
#
# This file is part of LUNA.
#
# Copyright (c) 2020 Great Scott Gadgets <info@greatscottgadgets.com>
# SPDX-License-Identifier: BSD-3-Clause
import os
import sys
import logging
import time
import usb1
from luna.gateware.applets.speed_test import USBInSpeedTestDevice, USBInSuperSpeedTestDevice, BULK_ENDPOINT_NUMBER
from luna import top_level_cli, configure_default_logging
# Set the total amount of data to be used in our speed test.
TEST_DATA_SIZE = 1 * 1024 * 1024
TEST_TRANSFER_SIZE = 16 * 1024
# Size of the host-size "transfer queue" -- this is effectively the number of async transfers we'll
# have scheduled at a given time.
TRANSFER_QUEUE_DEPTH = 16
def run_speed_test():
""" Runs a simple speed test, and reports throughput. """
total_data_exchanged = 0
failed_out = False
_messages = {
1: "error'd out",
2: "timed out",
3: "was prematurely cancelled",
4: "was stalled",
5: "lost the device it was connected to",
6: "sent more data than expected."
}
def _should_terminate():
""" Returns true iff our test should terminate. """
return (total_data_exchanged > TEST_DATA_SIZE) or failed_out
def _transfer_completed(transfer: usb1.USBTransfer):
""" Callback executed when an async transfer completes. """
nonlocal total_data_exchanged, failed_out
status = transfer.getStatus()
# If the transfer completed.
if status in (usb1.TRANSFER_COMPLETED,):
# Count the data exchanged in this packet...
total_data_exchanged += transfer.getActualLength()
# ... and if we should terminate, abort.
if _should_terminate():
return
# Otherwise, re-submit the transfer.
transfer.submit()
else:
failed_out = status
with usb1.USBContext() as context:
# Grab a reference to our device...
device = context.openByVendorIDAndProductID(0x16d0, 0x0f3b)
# ... and claim its bulk interface.
device.claimInterface(0)
# Submit a set of transfers to perform async comms with.
active_transfers = []
for _ in range(TRANSFER_QUEUE_DEPTH):
# Allocate the transfer...
transfer = device.getTransfer()
transfer.setBulk(0x80 | BULK_ENDPOINT_NUMBER, TEST_TRANSFER_SIZE, callback=_transfer_completed, timeout=1000)
# ... and store it.
active_transfers.append(transfer)
# Start our benchmark timer.
start_time = time.time()
# Submit our transfers all at once.
for transfer in active_transfers:
transfer.submit()
# Run our transfers until we get enough data.
while not _should_terminate():
context.handleEvents()
# Figure out how long this took us.
end_time = time.time()
elapsed = end_time - start_time
# Cancel all of our active transfers.
for transfer in active_transfers:
if transfer.isSubmitted():
transfer.cancel()
# If we failed out; indicate it.
if (failed_out):
logging.error(f"Test failed because a transfer {_messages[failed_out]}.")
sys.exit(failed_out)
bytes_per_second = total_data_exchanged / elapsed
logging.info(f"Exchanged {total_data_exchanged / 1000000}MB total at {bytes_per_second / 1000000}MB/s.")
if __name__ == "__main__":
# If our environment is suggesting we rerun tests, do so.
if os.getenv('LUNA_RERUN_TEST'):
configure_default_logging()
logging.info("Running speed test without rebuilding...")
run_speed_test()
# Otherwise, build and run our tests.
else:
# Selectively create our device to be either USB3 or USB2 based on the
# SuperSpeed variable.
if os.getenv('LUNA_SUPERSPEED'):
device = top_level_cli(USBInSuperSpeedTestDevice)
else:
device = top_level_cli(USBInSpeedTestDevice,
fs_only=bool(os.getenv('LUNA_FULL_ONLY')))
logging.info("Giving the device time to connect...")
time.sleep(5)
if device is not None:
logging.info(f"Starting bulk in speed test.")
run_speed_test()
|
# -*- coding: utf-8 -*-
"""
flask_store
===========
Adds simple file handling for different providers to your application. Provides
the following providers out of the box:
* Local file storeage
* Amazon Simple File Storage (requires ``boto`` to be installed)
"""
# Python 2/3 imports
try:
from urllib.parse import urlparse
from urllib.parse import urljoin
except ImportError:
from urlparse.urlparse import urljoin
from urlparse import urlparse
from flask import current_app, send_from_directory
from flask_store.exceptions import NotConfiguredError
from importlib import import_module
from werkzeug.local import LocalProxy
DEFAULT_PROVIDER = "flask_store.providers.local.LocalProvider"
Provider = LocalProxy(lambda: store_provider())
def store_provider():
"""Returns the default provider class as defined in the application
configuration.
Returns
-------
class
The provider class
"""
store = current_app.extensions["store"]
return store.store.Provider
class StoreState(object):
"""Stores the state of Flask-Store from application init."""
def __init__(self, store, app):
self.store = store
self.app = app
class Store(object):
"""Flask-Store integration into Flask applications. Flask-Store can
be integrated in two different ways depending on how you have setup your
Flask application.
You can bind to a specific flask application::
app = Flask(__name__)
store = Store(app)
Or if you use an application factory you can use
:meth:`flask_store.Store.init_app`::
store = Store()
def create_app():
app = Flask(__name__)
store.init_app(app)
return app
"""
def __init__(self, app=None):
"""Constructor. Basically acts as a proxy to
:meth:`flask_store.Store.init_app`.
Key Arguments
-------------
app : flask.app.Flask, optional
Optional Flask application instance, default None
"""
if app:
self.init_app(app)
def init_app(self, app):
"""Sets up application default confugration options and sets a
``Provider`` property which can be used to access the default
provider class which handles the saving of files.
Arguments
---------
app : flask.app.Flask
Flask application instance
"""
app.config.setdefault("STORE_DOMAIN", None)
app.config.setdefault("STORE_PROVIDER", DEFAULT_PROVIDER)
if not hasattr(app, "extensions"):
app.extensions = {}
app.extensions["store"] = StoreState(self, app)
# Set the provider class
self.Provider = self.provider(app)
# Set configuration defaults based on provider
self.set_provider_defaults(app)
# Ensure that any required configuration vars exist
self.check_config(app)
# Register a flask route - the provider must have register_route = True
self.register_route(app)
def check_config(self, app):
"""Checks the required application configuration variables are set
in the flask application.
Arguments
---------
app : flask.app.Flask
Flask application instance
Raises
------
NotConfiguredError
In the event a required config parameter is required by the
Store.
"""
if hasattr(self.Provider, "REQUIRED_CONFIGURATION"):
for name in self.Provider.REQUIRED_CONFIGURATION:
if not app.config.get(name):
raise NotConfiguredError(
"{0} must be configured in your flask application "
"configuration".format(name)
)
def provider(self, app):
"""Fetches the provider class as defined by the application
configuration.
Arguments
---------
app : flask.app.Flask
Flask application instance
Raises
------
ImportError
If the class or module cannot be imported
Returns
-------
class
The provider class
"""
if not hasattr(self, "_provider"):
parts = app.config["STORE_PROVIDER"].split(".")
klass = parts.pop()
path = ".".join(parts)
module = import_module(path)
if not hasattr(module, klass):
raise ImportError("{0} provider not found at {1}".format(klass, path))
self._provider = getattr(module, klass)
return getattr(self, "_provider")
def set_provider_defaults(self, app):
"""If the provider has a ``app_defaults`` static method then this
simply calls that method. This will set sensible application
configuration options for the provider.
Arguments
---------
app : flask.app.Flask
Flask application instance
"""
if hasattr(self.Provider, "app_defaults"):
self.Provider.app_defaults(app)
def register_route(self, app):
"""Registers a default route for serving uploaded assets via
Flask-Store, this is based on the absolute and relative paths
defined in the app configuration.
Arguments
---------
app : flask.app.Flask
Flask application instance
"""
def serve(filename):
return send_from_directory(app.config["STORE_PATH"], filename)
# Only do this if the Provider says so
if self.Provider.register_route:
url = urljoin(
app.config["STORE_URL_PREFIX"].lstrip("/") + "/", "<path:filename>"
)
app.add_url_rule("/" + url, "flask.store.file", serve)
|
class MstrClientException(Exception):
"""
Class used to raise errors in the MstrClient class
"""
def __init__(self, msg, request=None):
self.msg = msg
self.request = request
def __str__(self):
return str(self.msg)
def __repr__(self):
return "{cls}(msg={msg}, request={request}".format(
cls=self.__class__,
msg=self.msg,
request=self.request
)
class MstrReportException(MstrClientException):
"""
Class used to raise errors in the MstrReport class
"""
pass
class MstrDocumentException(MstrClientException):
"""
Class used to raise errors in the Document class
"""
pass |
import os
import argparse
import json
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from trainner import Trainer
from model import Model
from dataset import SingleDataset
from tokenizer import Tokenizer, load_vocab
parser = argparse.ArgumentParser()
parser.add_argument('--char_vocab', required=True)
parser.add_argument('--word_vocab', required=True)
parser.add_argument('--train_file', required=True)
parser.add_argument('--test_file', required=True)
parser.add_argument('--model_config')
parser.add_argument('--batch_size', default=32, type=int)
parser.add_argument('--learning_rate', default=1e-4, type=float)
parser.add_argument('--num_epoch', default=10, type=int)
parser.add_argument('--device', default='cpu')
parser.add_argument('--log_dir', default='logs')
parser.add_argument('--weight_dir', default='weight')
if __name__ == "__main__":
args = parser.parse_args()
print("Load vocab")
tokenizer = load_vocab(args.char_vocab, args.word_vocab)
print("Prepare data")
train_ds = SingleDataset(args.train_file, tokenizer)
test_ds = SingleDataset(args.test_file, tokenizer)
train_dl = DataLoader(train_ds, shuffle=True, batch_size=args.batch_size)
test_dl = DataLoader(test_ds, shuffle=False, batch_size=args.batch_size)
print("Init model")
char_vocab_len = len(tokenizer.char_stoi)
word_vocab_len = len(tokenizer.word_stoi)
model = Model(char_vocab_len, word_vocab_len)
optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate, betas=(0.9, 0.999), eps=1e-9)
sched = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=len(train_dl))
trainner = Trainer(
model, optimizer, train_dl, test_dl,
device=args.device, scheduler=sched,
log_dir=args.log_dir,
weight_dir=args.weight_dir
)
print("Start training")
trainner.train(args.num_epoch)
|
import csv
import nltk
import re
positive_list = []
negative_list = []
poslist = []
#Positive wordlist
with open("Pos_Words.csv", 'r',encoding="utf8") as positive_words:
reader = csv.reader(positive_words)
for row in reader:
word1 = row[0]
positive_list.append(word1)
#Negative wordlist
with open("Neg_Words.csv", 'r',encoding="utf8") as negative_words:
reader = csv.reader(negative_words)
for row in reader:
word2 = row[0]
negative_list.append(word2)
##Cleaning tweets
with open("processedTweets.csv", 'r',encoding="utf8") as file:
reader = csv.reader(file)
for tweet in reader:
# Remove Links
tweet = re.sub(r"http\S+", "", tweet[0])
# removing special characters
tweet = re.sub('[^A-Za-z0-9]+', ' ', tweet[0])
# Performing parts of speech using nltk
tokenize = nltk.word_tokenize(tweet[0])
pos = nltk.pos_tag(tokenize)
poslist.append(pos)
#Tagging polarity based on the created dict
for x in poslist:
count = 0;
for word in x:
word_lower_case = word[0].lower()
if word_lower_case in negative_list:
count = count - 1
if word_lower_case in positive_list:
count = count + 1
if count > 0:
result = 'positive'
elif count < 0:
result = 'negative'
else:
result = 'neutral'
#Writting the outfile with polarity in file
with open("Sentimental_Polarity.csv", "a", newline="", encoding='utf-8') as Result_file:
writer = csv.writer(Result_file, delimiter=",")
writer.writerow([tweet[0], result])
|
import json
import threading
import time
import requests
from reload.filter_ip import manage_ip
from reload.url_load import load_url
from sql_server import MySql
class func_res:#request 的返回值
videoneed = ["created","length","is_union_video","typeid"]
need = ["mid", "name", "sex", "sign", "level", "birthday", "coins", "following", "follower", "black"]
def __init__(self):
self.proxy = ""
self.ip_pool=[]#用来放ip的
# def starts(self,url):
# self.ip_manager = manage_ip()
# self.url = url
# if self.ip_manager.ip_pool != []:
# self.proxy =self.ip_manager.ip_pool.pop()
def manager(self,name):
self.load_url(name)#载入未成功的url
for mid,url in self.urls:
self.starts(url,3,mid,name)
def manager_video(self,name):
self.load_url(name)
for av,url in self.urls:
self.starts(url,3,av,name)
def load_url(self,name):
loads = load_url()
self.urls = loads.load_manager(name)
def starts(self,url,times,mid,name):
print("dd")
time2 = 0
while True:
self.response = self.init_head(url,times)
if self.response ==None or self.response.status_code !=200:
if self.ip_pool != []:
'''
用过的ip改变valid
'''
self.proxy=self.ip_pool.pop()
if len(self.ip_pool)<5:
'''
将ip加入
'''
pass
else:
time2+=1
time.sleep(10)
else:
res = self.response.content.decode()
res = json.loads(res)
self.manager_deal_url(name,res,mid,url)
break
if time2>10:
assert "数据库里没可用ip了,结束运行"
print("等待下一次进入")
def manager_deal_url(self,name,res,mid,url):
if name== "load_url_1":
self.deal_url_1_res(res,mid)
elif name == "load_url_2":
self.deal_url_2_res(res,mid)
elif name == "load_url_3":
self.deal_url_3_res(res,mid,url)
elif name =="load_url_4":
av = mid
self.deal_url_4_res(res,av,url)
elif name =="load_url_5":
av = mid
self.deal_url_5_res(res,av,url)
else:
assert "manager_deal_url name 错了"
def deal_url_1_res(self,res,mid):
info = {}
for i, j in res["data"].items():
if i in self.need:
if j =="":
j = " "
info[i] = j
sql2 = f'''insert into upbasic values({info["mid"]},"{info["name"]}","{info["sex"]}","null",{info["level"]},"{info["birthday"]}",{info["coins"]},null,null,null)'''
sql1 = f'''select mid from upbasic where mid = {mid}'''
sql3 = f'''update baida_up set url_1_valid = 0 where mid = {mid}'''
db = MySql()
call = db.fetch_one(sql1)
print(call)
if call ==None:
print(sql2)
db.execute(sql2)
db.execute(sql3)
print("ok")
def deal_url_2_res(self,res,mid):
info = {}
print("res",res)
for i, j in res["data"].items():
if i in self.need:
if j =="":
j = " "
info[i] = j
sql2 = f'''update upbasic set following={info["following"]},black={info["black"]},follower={info["follower"]} where mid = {mid}'''
sql3 = f'''update baida_up set url_2_valid = 0 where mid = {mid}'''
db = MySql()
print(sql2)
db.execute(sql2)
db.execute(sql3)
print("ok")
def deal_url_3_res(self,dict_json,mid,url):
videolist=[]
moreinfo = {}
for i in dict_json["data"]["list"]["vlist"]:
videolist.append(str(i["aid"]))
moreinfo[str(i["aid"])] = {}
for keyj, valuej in i.items():
if keyj in self.videoneed:
moreinfo[str(i["aid"])][keyj] = valuej
db = MySql()
for i in videolist:
sql_check = f'''select mid from bvlist where av = "{i}" '''
res=db.execute(sql_check)
print("res",res)
if res == 0:
sql = f'''Insert into bvlist values({mid},null,"{i}",1,{moreinfo[i]["typeid"]},{moreinfo[i]["created"]},"{moreinfo[i]["length"]}",{moreinfo[i]["is_union_video"]})'''#将信息插入bvlist
print(sql)
db.execute(sql)
sql2 = f'''update getbvlist set valid=0 where url="{url}"'''
db.execute(sql2)
print("videolist",videolist)
print("moreinfo",moreinfo)
def deal_url_4_res(self,dict_json,av,url):
print(av)
if av ==0 or av==1299:
return
db = MySql()
i = dict_json["data"]
view = i["view"]
if isinstance(view,str):#用来处理视频不见了的状况
view= -1
danmaku = i["danmaku"]
reply = i["reply"]
favorite = i["favorite"]
coin = i["coin"]
share = i["share"]
his_rank = i["his_rank"]
like = i["like"]
sql = f'''Update bvlist set view={view},danmaku={danmaku},reply={reply},favorite={favorite},coin={coin},share={share},his_rank={his_rank},likes={like} where av={av}'''
print(sql)
db.execute(sql)
sql2 = f'''Update video_url set valid_1=0 where av={av}'''
db.execute(sql2)
def deal_url_5_res(self,dict_json,av,url):
db = MySql()
for i in dict_json["data"]:
title = i[title]
sql = f'''Update bvlist set title="{title} where av = {av}"'''
db.execute(sql)
sql2 = f'''Update video_url valid_2=0 where av={av}'''
db.execute(sql2)
def init_head(self,url,time):
if self.proxy != "":
proxies = {
'http': 'http://' + self.proxy,
'https': 'https://' + self.proxy,
}
response = None
try:
response = requests.get(url,proxies=proxies,timeout=time)
except Exception as e:
print(e)
return response
else:
return requests.get(url,timeout=time)
if __name__ == '__main__':
p =func_res()
p.manager_video("load_url_4")
## http://x.fanqieip.com/index.php?s=/Api/IpManager/adminFetchFreeIpRegionInfoList&uid=12522&ukey=caa11eef0b28fa88056de334a5c0c2e3&limit=10&format=0&page=1 |
# Имеется реализованная функция f(x)f(x), принимающая на вход целое число xx, которая вычисляет некоторое целочисленое значение и возвращает его в качестве результата работы.
# Функция вычисляется достаточно долго, ничего не выводит на экран, не пишет в файлы и зависит только от переданного аргумента xx.
# Напишите программу, которой на вход в первой строке подаётся число nn — количество значений xx, для которых требуется узнать значение функции f(x)f(x), после чего сами эти nn значений, каждое на отдельной строке. Программа должна после каждого введённого значения аргумента вывести соответствующие значения функции ff на отдельной строке.
# Для ускорения вычисления необходимо сохранять уже вычисленные значения функции при известных аргументах.
# Обратите внимание, что в этой задаче установлено достаточно сильное ограничение в две секунды по времени исполнения кода на тесте.
# Sample Input:
# 5
# 5
# 12
# 9
# 20
# 12
# Sample Output:
# 11
# 41
# 47
# 61
# 41
d = {}
for _ in range(int(input())):
x = int(input())
if x in d:
print(d[x])
else:
z = f(x)
d[x] = z
print(z) |
from collections import Counter
def solution(str1, str2):
# 대문자로 변환
str1 = str1.upper()
str2 = str2.upper()
# 문자열 파싱
str1_list = []
str2_list = []
Alphabet = 'ABCDEFGHIJKLMNOPQRSTUWXYVZ'
for i in range(len(str1) - 1):
if str1[i] in Alphabet and str1[i + 1] in Alphabet:
str1_list.append(str1[i:i + 2])
for i in range(len(str2) - 1):
if str2[i] in Alphabet and str2[i + 1] in Alphabet:
str2_list.append(str2[i:i + 2])
# 다중 집합인지 확인
str1_set = set(str1_list)
str2_set = set(str2_list)
J = 0
if len(str1_list) == len(str1_set) and len(str2_list) == len(str2_set):
# 다중 집합이 아니면 바로 교집합 및 합집합의 길이 구하기
if len(str1_set) == 0 and len(str2_set) == 0:
return 65536
else:
union = len(str1_set | str2_set)
intersection = len(str1_set & str2_set)
J = intersection / union
else:
# 다중 집합이면
# Counter 안쓰고, set 값을 count 해서 중복 값이 존재하면
# 해당 값의 max or min값을 추가로 넣어주었으면 더 좋은 방법이 되었을 듯
str1_counter = Counter(str1_list)
str2_counter = Counter(str2_list)
union = 0
intersection = 0
for key, val in str1_counter.items():
if key in str2_counter.keys():
union += max(val, str2_counter[key])
intersection += min(val, str2_counter[key])
else:
union += val
for key, val in str2_counter.items():
if key not in str1_counter.keys():
union += val
J = intersection / union
# 65536을 곱하고 정수형으로 변환
return int(J * 65536) |
class Question: #creating class
def __init__(self, q_text, q_answer): #initializing constructor with variable
self.text = q_text
self.answer = q_answer |
# -*- coding: utf-8 -*-
#密碼規則
a=input()
x=0
y=0
for i in a:
if i.isdigit():
x+=1
elif i.isalpha():
y+=1
if (len(a)>=8) and (x>=1) and (y>=1):
print("Valid password")
else:
print("Invalid password")
|
"""
find in string like this 'ABJBWLDILWUDarbsbraBWUDBUWIBDIUWBDUIDWIU' something like this 'arbsbra'
"""
income = 'ABJBWLDILWUDarbsbraBWUDBUWIBDIUWBDUIDWIUIWDI'.upper()
# variant 1
def create_main_dict(income: str, clean: bool = True) -> dict:
"""
:param income: "abchjab
:param clean:
:return: {'a': [0, 5], 'b': [1, 6]}
"""
dic = {}
for ind, letter in enumerate(income):
ll = dic.get(letter, [])
ll.append(ind)
dic[letter] = ll
if clean:
# delete keys with len == 1
keys_remove = []
for k, v in dic.items():
if len(v) < 2:
keys_remove.append(k)
for i in keys_remove:
dic.pop(i)
return dic
def unique_sets(l: list) -> list:
"""
:param l: list like [1, 2, 4, 5]
:return: [{1,2}, {1,4}, {1,5}, {2,4}, {2,5}, {4,5}]
"""
start = 1
result = []
for i in l[:-1]:
for j in l[start:]:
result.append((i, j))
start += 1
return result
def is_it_abcba(st: str) -> bool:
"""
:param st: if == abcba; if == abcca
:return: true; false
"""
l = [i for i in st]
l1 = l[:len(l)//2]
if len(l) % 2 == 0:
# even
l2 = l[len(l)//2:]
else:
# odd
l2 = l[len(l) // 2 + 1:]
l2.reverse()
for el1, el2 in zip(l1, l2):
if el1 != el2:
return False
return True
def get_word_to_check(idx1: int, idx2: int) -> str:
if idx2 == len(income) - 1:
return income[idx1:]
if idx2 < len(income) - 1:
return income[idx1: idx2 + 1]
def main():
last_result = ''
dict_to_analyze = create_main_dict(income)
for _, v in dict_to_analyze.items():
lists_to_analyze = unique_sets(v)
for i in lists_to_analyze:
word = get_word_to_check(i[0], i[1])
if is_it_abcba(word):
if len(last_result) < len(word):
last_result = word
return last_result
res = main()
# variant2
income_list = [i for i in income]
last_result = ''
def find_word(l1: list, l2: list, cental_letter= '') -> str:
word = cental_letter
for el1, el2 in zip(l1, l2):
if el1 != el2:
break
else:
word = el1 + word + el2
continue
return word
def main():
last_result = ''
for i, v in enumerate(income_list):
# without letter in center
l1 = income_list[:i]
l2 = income_list[i:]
l1.reverse()
# with letter in center
ll1 = income_list[:i]
try:
ll2 = income_list[i + 1:]
except IndexError:
break
ll1.reverse()
cental_letter = income_list[i]
word1 = find_word(l1, l2)
word2 = find_word(ll1, ll2, cental_letter)
if len(word1) > 0:
if len(last_result) < len(word1):
last_result = word1
if len(word2) > 0:
if len(last_result) < len(word2):
last_result = word2
return last_result
last_result = main()
# variant 3
# reuse is_it_abcba function
def main():
"""
create range scanner with buffer width starting from max
:return:
"""
word_len = len(income)
last_big_word = ''
for buf_len in range(word_len, 0, -1):
for i in range(word_len - buf_len + 1):
start = i
stop = buf_len + i if buf_len + i <= word_len else None
check_sub_word = income[start:stop]
if is_it_abcba(check_sub_word):
if len(last_big_word) < len(check_sub_word):
return check_sub_word
return last_big_word
last_result = main() |
import os
import re
import sys
import platform
import subprocess
import argparse
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
from distutils.version import LooseVersion
# Extract cmake arguments
parser = argparse.ArgumentParser()
parser.add_argument("-D", action='append', dest='cmake',
help="CMake Options")
args, other_args = parser.parse_known_args(sys.argv)
cmake_clargs = args.cmake
sys.argv = other_args
sourcedir = os.path.dirname(os.path.abspath(__file__))
pybind_url = "https://github.com/pybind/pybind11.git"
pybind_ver = "v2.5.0"
pybind_dir = os.path.join(sourcedir, 'pydiscregrid/pybind11')
eigen_url = "https://gitlab.com/libeigen/eigen.git"
eigen_ver = "3.3.7"
eigen_dir = os.path.join(sourcedir, 'pydiscregrid/eigen')
external_modules = {"pybind": {"url": pybind_url, "ver": pybind_ver, "dir": pybind_dir},
"eigen": {"url": eigen_url, "ver": eigen_ver, "dir": eigen_dir}}
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=''):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
class CMakeBuild(build_ext):
def run(self):
try:
out = subprocess.check_output(['git', '--version'])
out = subprocess.check_output(['cmake', '--version'])
except OSError:
raise RuntimeError("CMake and git must be in PATH to build the following extensions: " +
", ".join(e.name for e in self.extensions))
if platform.system() == "Windows":
cmake_version = LooseVersion(re.search(r'version\s*([\d.]+)', out.decode()).group(1))
if cmake_version < '3.1.0':
raise RuntimeError("CMake >= 3.1.0 is required on Windows")
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
# required for auto-detection of auxiliary "native" libs
if not extdir.endswith(os.path.sep):
extdir += os.path.sep
cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,
'-DPYTHON_EXECUTABLE=' + sys.executable]
cmake_args += ['-DEIGEN3_INCLUDE_DIR={}'.format(os.path.join(sourcedir, 'pydiscregrid/eigen'))]
cfg = 'Debug' if self.debug else 'Release'
# build_args = ['--config', cfg]
build_args = []
cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg]
# Add cmake command line arguments
if cmake_clargs is not None:
cmake_args += ['-D{}'.format(arg) for arg in cmake_clargs]
if platform.system() == "Windows":
cmake_args += ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)]
if sys.maxsize > 2**32:
cmake_args += ['-A', 'x64']
build_args += ['--', '/m']
else:
# cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg]
build_args += ['--', '-j4']
# Add position independent code flags if using gcc on linux probably
if platform.system() == "Linux":
cmake_args += ['-DCMAKE_CXX_FLAGS=-fPIC', '-DCMAKE_C_FLAGS=-fPIC']
env = os.environ.copy()
env['CXXFLAGS'] = '{} -DVERSION_INFO=\\"{}\\"'.format(env.get('CXXFLAGS', ''),
self.distribution.get_version())
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
# Update submodules if .git folder is present. Otherwise clone them manually
try:
subprocess.check_call(['git', 'submodule', 'update', '--init', '--recursive'], cwd=sourcedir)
except subprocess.CalledProcessError:
for module in external_modules:
if len(os.listdir(external_modules[module]["dir"])) > 0:
continue
url, ver, dir = external_modules[module]["url"], external_modules[module]["ver"], external_modules[module]["dir"]
subprocess.check_call(['git', 'clone', url, '--branch', ver, '--single-branch', dir], cwd=sourcedir)
subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env)
subprocess.check_call(['cmake', '--build', '.', "--target", "pydiscregrid"] + build_args, cwd=self.build_temp)
setup(
name='Discregrid',
version='0.0.1',
author='Dan Koschier',
author_email='',
description='Discregrid is a static C++ library for the parallel discretization of (preferably smooth) functions on regular grids.',
license="MIT",
ext_modules=[CMakeExtension('Discregrid')],
cmdclass=dict(build_ext=CMakeBuild),
zip_safe=False,
)
|
a =3
def test():
global a
a= 5
print("local a ",a)
test()
print("global a ",a)
print("end") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.