blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
111859fe0f3c3ce7e9ce96a399d4144f3ca12f60
|
9dca27bf3f45fe55d4ad557ec377836eb5b32611
|
/plottingScripts/myKalman.py
|
65ace041584e5b6767edd4e2309eab98d3e93e59
|
[] |
no_license
|
Hayels406/Tracking
|
42441ffa5724c8c5adece74b45755eccbbd16f5e
|
c82908147f1084937b80943725001f870f4125aa
|
refs/heads/master
| 2021-03-22T04:04:05.040970
| 2018-08-09T12:43:05
| 2018-08-09T12:43:05
| 109,285,863
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 45
|
py
|
/home/b1033128/Documents/Tracking/myKalman.py
|
[
"h.e.l.moore@newcastle.ac.uk"
] |
h.e.l.moore@newcastle.ac.uk
|
0237b7a65a84dd7422316e2658be58ff39456d31
|
4f8bdddd8a6a9b6bd7ef42ee4c2f89a73ce5564b
|
/Image Recognition.py
|
ea31c007b23ba6ffaae6cb9b3c0bed4011e28785
|
[] |
no_license
|
KshitizzB/Image-Recognition
|
b169233d3e10362d32c8df2f2c7b0a3cf85f856d
|
951ab0b08fcf443bd5df58a474f9ff821f56eb21
|
refs/heads/master
| 2020-03-22T11:56:53.908138
| 2018-07-06T16:23:00
| 2018-07-06T16:23:00
| 140,006,522
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,962
|
py
|
# Importing the Keras libraries and packages
from keras.models import Sequential
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Dense
# Initialising the CNN
classifier = Sequential()
# Step 1 - Convolution
classifier.add(Conv2D(32, (3, 3), input_shape = (64, 64, 3), activation = 'relu'))
# Step 2 - Pooling
classifier.add(MaxPooling2D(pool_size = (2, 2)))
# Adding a second convolutional layer
classifier.add(Conv2D(32, (3, 3), activation = 'relu'))
classifier.add(MaxPooling2D(pool_size = (2, 2)))
# Step 3 - Flattening
classifier.add(Flatten())
# Step 4 - Full connection
classifier.add(Dense(units = 128, activation = 'relu'))
classifier.add(Dense(units = 1, activation = 'sigmoid'))
# Compiling the CNN
classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
# Part 2 - Fitting the CNN to the images
from keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(rescale = 1./255,shear_range = 0.2,zoom_range = 0.2,horizontal_flip = True)
test_datagen = ImageDataGenerator(rescale = 1./255)
training_set = train_datagen.flow_from_directory('dataset/training_set',target_size = (64, 64),batch_size = 32,class_mode = 'binary')
test_set = test_datagen.flow_from_directory('dataset/test_set',target_size = (64, 64),batch_size = 32,class_mode = 'binary')
classifier.fit_generator(training_set,steps_per_epoch = 8000,epochs = 1,validation_data = test_set,validation_steps = 2000)
# Part 3 - Making new predictions
import numpy as np
from keras.preprocessing import image
test_image = image.load_img('dataset/single_prediction/cat_or_dog_1.jpg', target_size = (64, 64))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
result = classifier.predict(test_image)
training_set.class_indices
if result[0][0] == 1:
prediction = 'dog'
else:
prediction = 'cat'
|
[
"noreply@github.com"
] |
KshitizzB.noreply@github.com
|
58eef6f7c5bccd89ee0ccd34eafd3e455e420680
|
d40a047b3f409760e5dbcf6768df7047e5f487ff
|
/web/adoption_stories/adopteeStories/urls.py
|
678426b7b507245babddb10a3858b928c423d22e
|
[] |
no_license
|
ourchinastories/ourchinastories
|
72629aed44ea15c43f75f753180e57be9566f87c
|
593904786abac0294c3e933456987ffeff9026e2
|
refs/heads/master
| 2021-06-09T05:20:31.304499
| 2016-10-26T10:28:37
| 2016-10-26T10:28:37
| 71,987,907
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,161
|
py
|
from adopteeStories import views
from django.conf.urls import url
urlpatterns = [
url(r'^adoptee/(?P<pk>[0-9]+)/$', views.AdopteeDetail.as_view(),
name="adopteeDetail"),
# TODO: Eliminate tech debt and have better REST behavior here
url(r'^adopteeCreate/$', views.AdopteeCreate.as_view(),
name="adopteeCreate"),
url(r'^storytellerCreate/$', views.StoryTellerCreate.as_view(),
name="storytellerCreate"),
#url(r'^adoptee/$', views.AdopteeList.as_view(),
# name="adopteeList"),
url(r'^adopteeList/(?P<q>[^/]+)/$', views.AdopteeList.as_view(),
name="adopteeList"),
url(r'^search/adoptee/(?P<q>[^/]+)/$', views.AdopteeSearch.as_view(),
name="adopteeSearch"),
url(r'^category/$', views.CategoryListAndCreate.as_view(),
name="categoryListAndCreate"),
url(r'^photoUpload/$', views.PhotoFileCreate.as_view(),
name="photoCreate"),
url(r'^audio/$', views.AudioCreate.as_view(),
name="audioCreate"),
url(r'^video/$', views.VideoCreate.as_view(),
name="videoCreate"),
url(r'^about/$', views.AboutPersonList.as_view(),
name="aboutList")
]
|
[
"jennah@stedwards.edu"
] |
jennah@stedwards.edu
|
4aa4a264e521e8263d040372856d3cbd5cf179fc
|
d6f3a38313fa132bf882173602a58b4e88615230
|
/compare_image/compare4.py
|
2517d6f51e53ad79f2ac004f76f4bd854d624f25
|
[] |
no_license
|
thouger/FGO_Battle
|
531173ab209f30a915dc0e6b75e65babded87364
|
7ab00b6b939a1b3d8e7de89df73c5fe9e6f17285
|
refs/heads/master
| 2020-03-30T00:51:18.842574
| 2018-11-26T10:09:49
| 2018-11-26T10:09:49
| 150,547,308
| 1
| 0
| null | 2018-09-29T05:06:52
| 2018-09-27T07:32:44
|
Python
|
UTF-8
|
Python
| false
| false
| 3,771
|
py
|
import cv2
import imageio
import numpy as np
import scipy
import scipy.spatial
import pickle
import random
import os
import matplotlib.pyplot as plt
# Feature extractor
def extract_features(image_path, vector_size=32):
image = imageio.imread(image_path, pilmode="RGB")
try:
# Using KAZE, cause SIFT, ORB and other was moved to additional module
# which is adding addtional pain during install
alg = cv2.KAZE_create()
# Dinding image keypoints
kps = alg.detect(image)
# Getting first 32 of them.
# Number of keypoints is varies depend on image size and color pallet
# Sorting them based on keypoint response value(bigger is better)
kps = sorted(kps, key=lambda x: -x.response)[:vector_size]
# computing descriptors vector
kps, dsc = alg.compute(image, kps)
# Flatten all of them in one big vector - our feature vector
dsc = dsc.flatten()
# Making descriptor of same size
# Descriptor vector size is 64
needed_size = (vector_size * 64)
if dsc.size < needed_size:
# if we have less the 32 descriptors then just adding zeros at the
# end of our feature vector
dsc = np.concatenate([dsc, np.zeros(needed_size - dsc.size)])
except cv2.error as e:
print('Error: ', e)
return None
return dsc
def batch_extractor(images_path, pickled_db_path="features.pck"):
files = [os.path.join(images_path, p) for p in sorted(os.listdir(images_path))]
result = {}
for f in files:
print('Extracting features from image %s' % f)
name = f.split('/')[-1].lower()
result[name] = extract_features(f)
# saving all our feature vectors in pickled file
with open(pickled_db_path, 'wb') as fp:
pickle.dump(result, fp)
class Matcher(object):
def __init__(self, pickled_db_path="features.pck"):
with open(pickled_db_path,'rb') as fp:
self.data = pickle.load(fp)
self.names = []
self.matrix = []
for k, v in self.data.items():
self.names.append(k)
self.matrix.append(v)
self.matrix = np.array(self.matrix)
self.names = np.array(self.names)
def cos_cdist(self, vector):
# getting cosine distance between search image and images database
v = vector.reshape(1, -1)
return scipy.spatial.distance.cdist(self.matrix, v, 'cosine').reshape(-1)
def match(self, image_path, topn=5):
features = extract_features(image_path)
img_distances = self.cos_cdist(features)
# getting top 5 records
nearest_ids = np.argsort(img_distances)[:topn].tolist()
nearest_img_paths = self.names[nearest_ids].tolist()
return nearest_img_paths, img_distances[nearest_ids].tolist()
def show_img(path):
img = imageio.imread(path, pilmode="RGB")
plt.imshow(img)
plt.show()
def run():
images_path = './resources/images/'
files = [os.path.join(images_path, p) for p in sorted(os.listdir(images_path))]
# getting 3 random images
sample = random.sample(files, 9)
batch_extractor(images_path)
ma = Matcher('features.pck')
for s in sample:
print('Query image ==========================================')
show_img(s)
names, match = ma.match(s, topn=3)
print('Result images ========================================')
for i in range(3):
# we got cosine distance, less cosine distance between vectors
# more they similar, thus we subtruct it from 1 to get match value
print('Match %s' % (1-match[i]))
show_img(os.path.join(images_path, names[i]))
if __name__ == '__main__':
run()
|
[
"t@e.com"
] |
t@e.com
|
b1b10f74c7b2b141fab2f67520ef2bafb047a1f3
|
051d25888b6a36e50714fa5940f6a31ee951ce77
|
/gentb_website/tb_website/apps/dropbox_helper/dropbox_util.py
|
f43f6ff658915949644e878347d6b70ddd524912
|
[
"MIT"
] |
permissive
|
cchoirat/gentb-site
|
d0d627ffc160c53b61d92dc8f02a11f930a2b09a
|
24ebce58cd5f5e0a2f1449e2f14b1f75b592f28f
|
refs/heads/master
| 2021-01-21T02:20:55.909012
| 2015-11-25T18:27:23
| 2015-11-25T18:27:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,476
|
py
|
from os.path import join, isdir
import os
from apps.dropbox_helper.dropbox_retriever import DropboxRetriever
from apps.dropbox_helper.models import DropboxRetrievalLog
from django.conf import settings
def get_dropbox_metadata(predict_dataset):
"""
Wrap the DropboxRetriever function
- (True, DropboxRetrievalLog object)
- (False, error message string)
"""
if predict_dataset is None:
return (False, "The dataset was not found.")
# Initialize
#
dr = DropboxRetriever(predict_dataset.dropbox_url,
destination_dir=predict_dataset.file_directory,
file_patterns=self.predict_dataset.get_file_patterns())
db_log = DropboxRetrievalLog(dataset=predict_dataset)
if dr.err_found:
db_log.file_metadata_err_msg = dr.err_msg
db_log.save()
return (False, dr.err_msg)
# Get the metadata
#
if not dr.step1_retrieve_metadata():
db_log.file_metadata_err_msg = dr.err_msg
db_log.save()
return (False, dr.err_msg)
# Does it have what we want?
#
if not dr.step2_check_file_matches():
db_log.file_metadata_err_msg = dr.err_msg
db_log.save()
return (False, dr.err_msg)
# Yes!
db_log.file_metadata = dr.dropbox_link_metadata
db_log.selected_files = dr.matching_files_metadata
db_log.save()
return (True, dr)
def get_dropbox_metadata_from_link(dropbox_link, file_patterns=None):
"""
Wrap the DropboxRetriever function
- (True, DropboxRetriever object)
- (False, error message string)
"""
if dropbox_link is None:
return (False, "The dataset was not found.")
# This directory doesn't actually get used
#
tmp_dir = join(settings.TB_SHARED_DATAFILE_DIRECTORY, 'tmp')
if not isdir(tmp_dir):
os.makedirs(tmp_dir)
# Initialize
#
if file_patterns:
dr = DropboxRetriever(dropbox_link,\
destination_dir=tmp_dir,\
file_patterns=file_patterns)
else:
dr = DropboxRetriever(dropbox_link,\
destination_dir=tmp_dir)
if dr.err_found:
return (False, dr.err_msg)
# Get the metadata
#
if not dr.step1_retrieve_metadata():
return (False, dr.err_msg)
# Does it have what we want?
#
if not dr.step2_check_file_matches():
return (False, dr.err_msg)
# Yes!
return (True, dr)
|
[
"raman_prasad@harvard.edu"
] |
raman_prasad@harvard.edu
|
3b973d3e09c9f97de38156ab5d5c65f3f37eac88
|
3b9898c95102c35ac05079ebbff68a2553951c65
|
/Ch5.DePitta/pycustommodules/solvers/solver_utils.py
|
797b9b1319b828083195c02d56ad4b284c422b59
|
[] |
no_license
|
savagedude3/comp-glia-book
|
902460f1137c4ee0d9fc53993412376eec08668e
|
c2e3fa54c22c9c2e04376e9af4cb3491ad1dfe91
|
refs/heads/master
| 2022-03-29T04:38:16.565159
| 2019-04-27T20:48:29
| 2019-04-27T20:48:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,196
|
py
|
"""
solvers_utils.py
Library that defines solver related data structures.
Methods:
- solver_opts : create dictionary for settings of "continuous solvers such as
Euler or RK4 solvers (in related H and CPP files).
v1.0
Maurizio De Pitta', The University of Chicago, March 1st, 2015.
"""
import numpy as np
from scipy import *
import sys, os
base_dir = '/Ch4.DePitta'
sys.path.append(os.path.join(os.path.expanduser('~'),base_dir+'/pycustommodules'))
import pycustommodules.general_utils as gu
def solver_opts(method='euler',**kwargs):
"""
Define a standard dictionary to use in C/C++ simulators.
Use:
options = solver_opts(...)
Input:
- method : {'euler'} | 'rk4' | 'gsl' | 'gsl_rk8pd' | 'gsl_msadams'
- ndims : {1} | Integer Number of equations (i.e. dimension) of the system to integrate
**kwargs:
- t0 : initial instant of integration [s]
- tfin : final instant of integration [s]
- transient : transient to drop from output result [s]
- tbin : time bin of solution [s]
- dt : step of integration [s]
- solver : string {"euler"} | "rk4"
Output:
- options : dictionary of solver settings (keys are inputs).
v1.3
Added options for GSL solvers.
Append 'solver' key at the end for all methods.
Maurizio De Pitta', INRIA Rhone-Alpes, November 1st, 2017.
"""
## User-defined parameters
if method in ['euler','rk4']:
opts = {'t0' : 0.0,
'tfin' : 20.0,
'transient' : 0.0,
'tbin' : 1e-2,
'dt' : 1e-3
}
elif method in ['gsl', 'gsl_rk8pd', 'gsl_msadams']:
opts = {'t0' : 0.0,
'tfin' : 1.0,
'dt' : 1e-3,
'atol' : 1e-8,
'rtol' : 1e-6
}
else:
# This is the case of solver 'None','none','steady_state'
opts = {'nmax' : 1000, # Max number of iterations for the solver
'atol' : 1e-10,# Absolute tolerance on error
'rtol' : 0.0 # Relative tolerance on error (default: 0.0: not considered)
}
opts = gu.varargin(opts, **kwargs)
for k,item in opts.iteritems():
if (not isinstance(item, (basestring, bool)))&(not hasattr(item, '__len__'))&(item != None):
opts[k] = float(item)
if method in ['gsl','gsl_rk8pd']:
opts['nstep'] = (opts['tfin']-opts['t0'])//opts['dt'] + 1 # +1 if when we start counting from zero
if (not method) or (method in ['none','steady_state']):
for p in ['nmax']:
opts[p] = np.intc(opts[p])
# Include solver specifier in the final dictionary
opts['solver'] = method
return opts
#-----------------------------------------------------------------------------------------------------------------------
# Testing
#-----------------------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
algparams = solver_opts(method='gsl_rk8pd',ndims=7)
print algparams
|
[
"maurizio.depitta@gmail.com"
] |
maurizio.depitta@gmail.com
|
bcb660a70c23a57e06a129b7d5e0ac8a48ccc062
|
440736bf8cdfff0b5569105519204414414b455a
|
/reviewboard/hostingsvcs/tests/test_bitbucket.py
|
17ebf6a31a0ba0b0ec1a00ab52f35359a3acad75
|
[
"MIT"
] |
permissive
|
bbbbbgit/reviewboard
|
ef456bbd848a26a0babe9bbf9d1ccfdf6cafcffc
|
f9cff6699ec387600d1d0540db32d1a0517c203d
|
refs/heads/master
| 2022-07-17T01:38:12.774767
| 2020-05-12T02:43:03
| 2020-05-12T02:43:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 60,819
|
py
|
"""Unit tests for the Bitbucket hosting service."""
from __future__ import unicode_literals
import logging
from django.contrib.auth.models import User
from django.test.client import RequestFactory
from django.utils.safestring import SafeText
from djblets.testing.decorators import add_fixtures
from reviewboard.hostingsvcs.bitbucket import BitbucketAuthForm
from reviewboard.hostingsvcs.errors import (AuthorizationError,
RepositoryError)
from reviewboard.hostingsvcs.testing import HostingServiceTestCase
from reviewboard.reviews.models import ReviewRequest
from reviewboard.scmtools.core import Branch, Commit
from reviewboard.scmtools.crypto_utils import (decrypt_password,
encrypt_password)
from reviewboard.scmtools.errors import FileNotFoundError
from reviewboard.site.models import LocalSite
from reviewboard.site.urlresolvers import local_site_reverse
class BitbucketTestCase(HostingServiceTestCase):
"""Base class for Bitbucket test suites."""
service_name = 'bitbucket'
fixtures = ['test_scmtools']
default_account_data = {
'password': encrypt_password(HostingServiceTestCase.default_password),
}
default_repository_extra_data = {
'bitbucket_repo_name': 'myrepo',
}
class BitbucketTests(BitbucketTestCase):
"""Unit tests for the Bitbucket hosting service."""
def test_service_support(self):
"""Testing Bitbucket service support capabilities"""
self.assertTrue(self.service_class.supports_bug_trackers)
self.assertTrue(self.service_class.supports_repositories)
def test_get_repository_fields_with_git_and_personal_plan(self):
"""Testing Bitbucket.get_repository_fields for Git and plan=personal"""
self.assertEqual(
self.get_repository_fields(
'Git',
fields={
'bitbucket_repo_name': 'myrepo',
},
plan='personal'
),
{
'path': 'git@bitbucket.org:myuser/myrepo.git',
'mirror_path': ('https://myuser@bitbucket.org/myuser/'
'myrepo.git'),
})
def test_get_repository_fields_with_mercurial_and_personal_plan(self):
"""Testing Bitbucket.get_repository_fields for Mercurial and
plan=personal
"""
self.assertEqual(
self.get_repository_fields(
'Mercurial',
fields={
'bitbucket_repo_name': 'myrepo',
},
plan='personal'
),
{
'path': 'https://myuser@bitbucket.org/myuser/myrepo',
'mirror_path': 'ssh://hg@bitbucket.org/myuser/myrepo',
})
def test_get_repository_fields_with_git_and_team_plan(self):
"""Testing Bitbucket.get_repository_fields for Git and plan=team"""
self.assertEqual(
self.get_repository_fields(
'Git',
fields={
'bitbucket_team_name': 'myteam',
'bitbucket_team_repo_name': 'myrepo',
},
plan='team'
),
{
'path': 'git@bitbucket.org:myteam/myrepo.git',
'mirror_path': ('https://myuser@bitbucket.org/myteam/'
'myrepo.git'),
})
def test_get_repository_fields_with_mercurial_and_team_plan(self):
"""Testing Bitbucket.get_repository_fields for Mercurial and plan=team
"""
self.assertEqual(
self.get_repository_fields(
'Mercurial',
fields={
'bitbucket_team_name': 'myteam',
'bitbucket_team_repo_name': 'myrepo',
},
plan='team'
),
{
'path': 'https://myuser@bitbucket.org/myteam/myrepo',
'mirror_path': 'ssh://hg@bitbucket.org/myteam/myrepo',
})
def test_get_repository_fields_with_git_and_other_user_plan(self):
"""Testing Bitbucket.get_repository_fields for Git and plan=other-user
"""
self.assertEqual(
self.get_repository_fields(
'Git',
fields={
'bitbucket_other_user_username': 'someuser',
'bitbucket_other_user_repo_name': 'myrepo',
},
plan='other-user'
),
{
'path': 'git@bitbucket.org:someuser/myrepo.git',
'mirror_path': ('https://myuser@bitbucket.org/someuser/'
'myrepo.git'),
})
def test_get_repository_fields_with_mercurial_and_other_user_plan(self):
"""Testing Bitbucket.get_repository_fields for Mercurial and
plan=other-user
"""
self.assertEqual(
self.get_repository_fields(
'Mercurial',
fields={
'bitbucket_other_user_username': 'someuser',
'bitbucket_other_user_repo_name': 'myrepo',
},
plan='other-user'
),
{
'path': 'https://myuser@bitbucket.org/someuser/myrepo',
'mirror_path': 'ssh://hg@bitbucket.org/someuser/myrepo',
})
def test_get_bug_tracker_field_with_personal_plan(self):
"""Testing Bitbucket.get_bug_tracker_field with plan=personal"""
self.assertTrue(self.service_class.get_bug_tracker_requires_username(
plan='personal'))
self.assertEqual(
self.service_class.get_bug_tracker_field(
'personal',
{
'bitbucket_repo_name': 'myrepo',
'hosting_account_username': 'myuser',
}),
'https://bitbucket.org/myuser/myrepo/issue/%s/')
def test_get_bug_tracker_field_with_team_plan(self):
"""Testing Bitbucket.get_bug_tracker_field with plan=team"""
self.assertFalse(self.service_class.get_bug_tracker_requires_username(
plan='team'))
self.assertEqual(
self.service_class.get_bug_tracker_field(
'team',
{
'bitbucket_team_name': 'myteam',
'bitbucket_team_repo_name': 'myrepo',
}),
'https://bitbucket.org/myteam/myrepo/issue/%s/')
def test_get_bug_tracker_field_with_other_user_plan(self):
"""Testing Bitbucket.get_bug_tracker_field with plan=other-user"""
self.assertFalse(self.service_class.get_bug_tracker_requires_username(
plan='other-user'))
self.assertEqual(
self.service_class.get_bug_tracker_field(
'other-user',
{
'bitbucket_other_user_username': 'someuser',
'bitbucket_other_user_repo_name': 'myrepo',
}),
'https://bitbucket.org/someuser/myrepo/issue/%s/')
def test_get_repository_hook_instructions(self):
"""Testing BitBucket.get_repository_hook_instructions"""
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
hooks_uuid = repository.get_or_create_hooks_uuid()
request = RequestFactory().get(path='/')
request.user = User.objects.create(username='test-user')
content = repository.hosting_service.get_repository_hook_instructions(
request=request,
repository=repository)
self.assertIsInstance(content, SafeText)
self.assertIn(
('https://bitbucket.org/myuser/myrepo/admin/addon/admin/'
'bitbucket-webhooks/bb-webhooks-repo-admin'),
content)
self.assertIn(
('http://example.com/repos/1/bitbucket/hooks/%s/close-submitted/'
% hooks_uuid),
content)
self.assertIn('Review Board supports closing', content)
self.assertIn('<code>Review Board</code>', content)
def test_check_repository_with_personal_plan(self):
"""Testing Bitbucket.check_repository with plan=personal"""
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
ctx.service.check_repository(bitbucket_repo_name='myrepo',
plan='personal',
tool_name='Git')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo'
'?fields=scm'))
def test_check_repository_with_team_plan(self):
"""Testing Bitbucket.check_repository with plan=team"""
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
ctx.service.check_repository(bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo',
tool_name='Git',
plan='team')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myteam/myrepo'
'?fields=scm'))
def test_check_repository_with_other_user_plan(self):
"""Testing Bitbucket.check_repository with plan=other-user"""
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
ctx.service.check_repository(
bitbucket_other_user_username='someuser',
bitbucket_other_user_repo_name='myrepo',
plan='other-user',
tool_name='Git')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/someuser/myrepo'
'?fields=scm'))
def test_check_repository_with_slash(self):
"""Testing Bitbucket.check_repository with /"""
expected_message = \
'Please specify just the name of the repository, not a path.'
with self.setup_http_test(expected_http_calls=0) as ctx:
with self.assertRaisesMessage(RepositoryError, expected_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myteam/myrepo',
plan='team')
def test_check_repository_with_dot_git(self):
"""Testing Bitbucket.check_repository with .git"""
expected_message = \
'Please specify just the name of the repository without ".git".'
with self.setup_http_test(expected_http_calls=0) as ctx:
with self.assertRaisesMessage(RepositoryError, expected_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo.git',
plan='team')
def test_check_repository_with_type_mismatch(self):
"""Testing Bitbucket.check_repository with type mismatch"""
error_message = (
'The Bitbucket repository being configured does not match the '
'type of repository you have selected.'
)
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
# Check Git repositories.
with self.assertRaisesMessage(RepositoryError, error_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo',
plan='team',
tool_name='Mercurial')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myteam/myrepo'
'?fields=scm'))
# Now check Mercurial repositories.
with self.setup_http_test(payload=b'{"scm": "hg"}',
expected_http_calls=1) as ctx:
with self.assertRaisesMessage(RepositoryError, error_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo',
plan='team',
tool_name='Git')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myteam/myrepo'
'?fields=scm'))
def test_authorize(self):
"""Testing Bitbucket.authorize"""
hosting_account = self.create_hosting_account(data={})
with self.setup_http_test(payload=b'{}',
hosting_account=hosting_account,
expected_http_calls=1) as ctx:
self.assertFalse(ctx.service.is_authorized())
ctx.service.authorize(username='myuser',
password='abc123')
self.assertIn('password', hosting_account.data)
self.assertNotEqual(hosting_account.data['password'], 'abc123')
self.assertEqual(decrypt_password(hosting_account.data['password']),
'abc123')
self.assertTrue(ctx.service.is_authorized())
ctx.assertHTTPCall(
0,
url='https://bitbucket.org/api/2.0/user',
username='myuser',
password='abc123')
def test_authorize_with_bad_credentials(self):
"""Testing Bitbucket.authorize with bad credentials"""
hosting_account = self.create_hosting_account(data={})
expected_message = (
'Invalid Bitbucket username or password. Make sure you are using '
'your Bitbucket username and not e-mail address, and are using an '
'app password if two-factor authentication is enabled.'
)
with self.setup_http_test(status_code=401,
hosting_account=hosting_account,
expected_http_calls=1) as ctx:
self.assertFalse(ctx.service.is_authorized())
with self.assertRaisesMessage(AuthorizationError,
expected_message):
ctx.service.authorize(username='myuser',
password='abc123')
self.assertNotIn('password', hosting_account.data)
self.assertFalse(ctx.service.is_authorized())
ctx.assertHTTPCall(
0,
url='https://bitbucket.org/api/2.0/user',
username='myuser',
password='abc123')
def test_get_file_with_mercurial_and_base_commit_id(self):
"""Testing Bitbucket.get_file with Mercurial and base commit ID"""
self._test_get_file(
tool_name='Mercurial',
revision='123',
base_commit_id='456',
expected_revision='456')
def test_get_file_with_mercurial_and_revision(self):
"""Testing Bitbucket.get_file with Mercurial and revision"""
self._test_get_file(
tool_name='Mercurial',
revision='123',
base_commit_id=None,
expected_revision='123')
def test_get_file_with_git_and_base_commit_id(self):
"""Testing Bitbucket.get_file with Git and base commit ID"""
self._test_get_file(
tool_name='Git',
revision='123',
base_commit_id='456',
expected_revision='456')
def test_get_file_with_git_and_revision(self):
"""Testing Bitbucket.get_file with Git and revision"""
with self.assertRaises(FileNotFoundError):
self._test_get_file(tool_name='Git',
revision='123',
base_commit_id=None,
expected_revision='123')
def test_get_file_exists_with_mercurial_and_base_commit_id(self):
"""Testing Bitbucket.get_file_exists with Mercurial and base commit ID
"""
self._test_get_file_exists(
tool_name='Mercurial',
revision='123',
base_commit_id='456',
expected_revision='456',
expected_found=True)
def test_get_file_exists_with_mercurial_and_revision(self):
"""Testing Bitbucket.get_file_exists with Mercurial and revision"""
self._test_get_file_exists(
tool_name='Mercurial',
revision='123',
base_commit_id=None,
expected_revision='123',
expected_found=True)
def test_get_file_exists_with_git_and_base_commit_id(self):
"""Testing Bitbucket.get_file_exists with Git and base commit ID"""
self._test_get_file_exists(
tool_name='Git',
revision='123',
base_commit_id='456',
expected_revision='456',
expected_found=True)
def test_get_file_exists_with_git_and_revision(self):
"""Testing Bitbucket.get_file_exists with Git and revision"""
self._test_get_file_exists(
tool_name='Git',
revision='123',
base_commit_id=None,
expected_revision='123',
expected_found=False,
expected_http_called=False)
def test_get_file_exists_with_git_and_404(self):
"""Testing BitBucket.get_file_exists with Git and a 404 error"""
self._test_get_file_exists(
tool_name='Git',
revision='123',
base_commit_id='456',
expected_revision='456',
expected_found=False)
def test_get_branches(self):
"""Testing Bitbucket.get_branches"""
branches_api_response_1 = self.dump_json({
'next': ('https://bitbucket.org/api/2.0/repositories/myuser/'
'myrepo/refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext'
'&pagelen=100&page=2'),
'values': [
{
'name': 'branch1',
'target': {
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
},
},
{
'name': 'branch2',
'target': {
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
},
],
})
branches_api_response_2 = self.dump_json({
'values': [
{
'name': 'branch3',
'target': {
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
},
{
'name': 'branch4',
'target': {
'hash': 'd286691517e6325fea5c7a21d5e44568f7d33647',
},
},
],
})
get_repository_api_response = self.dump_json({
'mainbranch': {
'name': 'branch3',
},
})
paths = {
'/api/2.0/repositories/myuser/myrepo': {
'payload': get_repository_api_response,
},
('/api/2.0/repositories/myuser/myrepo/refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext&pagelen=100'): {
'payload': branches_api_response_1,
},
('/api/2.0/repositories/myuser/myrepo/refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext&page=2'
'&pagelen=100'): {
'payload': branches_api_response_2,
},
}
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=3) as ctx:
repository = self.create_repository(tool_name='Git')
branches = ctx.service.get_branches(repository)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo'
'?fields=mainbranch.name'))
ctx.assertHTTPCall(
1,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext'
'&pagelen=100'))
ctx.assertHTTPCall(
2,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext'
'&page=2&pagelen=100'))
self.assertEqual(
branches,
[
Branch(id='branch1',
commit='1c44b461cebe5874a857c51a4a13a849a4d1e52d'),
Branch(id='branch2',
commit='44568f7d33647d286691517e6325fea5c7a21d5e'),
Branch(id='branch3',
commit='e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
default=True),
Branch(id='branch4',
commit='d286691517e6325fea5c7a21d5e44568f7d33647'),
])
def test_get_commits(self):
"""Testing Bitbucket.get_commits"""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <user1@example.com>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <user2@example.com>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(repository)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <user1@example.com>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <user2@example.com>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_commits_with_start(self):
"""Testing Bitbucket.get_commits with start="""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <user1@example.com>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <user2@example.com>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(
repository,
start='1c44b461cebe5874a857c51a4a13a849a4d1e5')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits/1c44b461cebe5874a857c51a4a13a849a4d1e5'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <user1@example.com>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <user2@example.com>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_commits_with_branch(self):
"""Testing Bitbucket.get_commits with branch="""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <user1@example.com>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <user2@example.com>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(repository,
branch='master')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits/master'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <user1@example.com>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <user2@example.com>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_commits_with_start_and_branch(self):
"""Testing Bitbucket.get_commits with start= and branch="""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <user1@example.com>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <user2@example.com>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(
repository,
start='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
branch='master')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits/1c44b461cebe5874a857c51a4a13a849a4d1e52d'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <user1@example.com>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <user2@example.com>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_change(self):
"""Testing BitBucket.get_change"""
commit_sha = '1c44b461cebe5874a857c51a4a13a849a4d1e52d'
parent_sha = '44568f7d33647d286691517e6325fea5c7a21d5e'
paths = {
'/api/2.0/repositories/myuser/myrepo/commit/%s' % commit_sha: {
'payload': self.dump_json({
'hash': commit_sha,
'author': {
'raw': 'Some User <user@example.com>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is a message.',
'parents': [{'hash': parent_sha}],
}),
},
'/api/2.0/repositories/myuser/myrepo/diff/%s' % commit_sha: {
'payload': b'This is a test \xc7.',
},
}
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=2) as ctx:
repository = ctx.create_repository(tool_name='Git')
commit = ctx.service.get_change(repository, commit_sha)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commit/1c44b461cebe5874a857c51a4a13a849a4d1e52d'
'?fields=author.raw%2Chash%2Cdate%2Cmessage%2Cparents.hash'))
ctx.assertHTTPCall(
1,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'diff/1c44b461cebe5874a857c51a4a13a849a4d1e52d'))
self.assertEqual(
commit,
Commit(author_name='Some User <user@example.com>',
date='2017-01-24T13:11:22+00:00',
id=commit_sha,
message='This is a message.',
parent=parent_sha))
self.assertEqual(commit.diff, b'This is a test \xc7.\n')
def _test_get_file(self, tool_name, revision, base_commit_id,
expected_revision):
"""Test file fetching.
Args:
tool_name (unicode):
The name of the SCM Tool to test with.
revision (unicode, optional):
The revision to check.
base_commit_id (unicode, optional):
The base commit to fetch against.
expected_revision (unicode, optional):
The revision expected in the payload.
"""
with self.setup_http_test(payload=b'My data',
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name=tool_name)
result = ctx.service.get_file(repository=repository,
path='path',
revision=revision,
base_commit_id=base_commit_id)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'src/%s/path'
% expected_revision))
self.assertIsInstance(result, bytes)
self.assertEqual(result, b'My data')
def _test_get_file_exists(self, tool_name, revision, base_commit_id,
expected_revision, expected_found,
expected_http_called=True):
"""Test file existence checks.
Args:
tool_name (unicode):
The name of the SCM Tool to test with.
revision (unicode, optional):
The revision to check.
base_commit_id (unicode, optional):
The base commit to fetch against.
expected_revision (unicode, optional):
The revision expected in the payload.
expected_found (bool, optional):
Whether a truthy response should be expected.
expected_http_called (bool, optional):
Whether an HTTP request is expected to have been made.
"""
if expected_found:
payload = b'file...'
status_code = None
else:
payload = None
status_code = 404
if expected_http_called:
expected_calls = 1
else:
expected_calls = 0
with self.setup_http_test(payload=payload,
status_code=status_code,
expected_http_calls=expected_calls) as ctx:
repository = ctx.create_repository(tool_name=tool_name)
result = ctx.service.get_file_exists(repository=repository,
path='path',
revision=revision,
base_commit_id=base_commit_id)
if expected_http_called:
ctx.assertHTTPCall(
0,
method='HEAD',
url=('https://bitbucket.org/api/2.0/repositories/myuser/'
'myrepo/src/%s/path'
% expected_revision))
self.assertEqual(result, expected_found)
class BitbucketAuthFormTests(BitbucketTestCase):
"""Unit tests for BitbucketAuthForm."""
def test_clean_hosting_account_username_with_username(self):
"""Testing BitbucketAuthForm.clean_hosting_account_username with
username
"""
form = BitbucketAuthForm(
hosting_service_cls=self.service_class,
data={
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
})
self.assertTrue(form.is_valid())
def test_clean_hosting_account_username_with_email(self):
"""Testing BitbucketAuthForm.clean_hosting_account_username with
e-mail address
"""
form = BitbucketAuthForm(
hosting_service_cls=self.service_class,
data={
'hosting_account_username': 'myuser@example.com',
'hosting_account_password': 'mypass',
})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['hosting_account_username'],
['This must be your Bitbucket username (the same one '
'you would see in URLs for your own repositories), '
'not your Atlassian e-mail address.'])
class CloseSubmittedHookTests(BitbucketTestCase):
"""Unit tests for the Bitbucket close-submitted webhook."""
fixtures = ['test_users', 'test_scmtools']
COMMITS_URL = ('/api/2.0/repositories/test/test/commits'
'?exclude=abc123&include=def123')
def test_close_submitted_hook(self):
"""Testing BitBucket close_submitted hook"""
self._test_post_commit_hook()
@add_fixtures(['test_site'])
def test_close_submitted_hook_with_local_site(self):
"""Testing BitBucket close_submitted hook with a Local Site"""
self._test_post_commit_hook(
LocalSite.objects.get(name=self.local_site_name))
def test_close_submitted_hook_with_truncated_commits(self):
"""Testing BitBucket close_submitted hook with truncated list of
commits
"""
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
# Create two review requests: One per referenced commit.
review_request1 = self.create_review_request(id=99,
repository=repository,
publish=True)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
review_request2 = self.create_review_request(id=100,
repository=repository,
publish=True)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status,
review_request2.PENDING_REVIEW)
page2_url = '%s&page=2&pagelen=100' % self.COMMITS_URL
paths = {
'%s&pagelen=100' % self.COMMITS_URL: {
'payload': self.dump_json({
'next': page2_url,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a84'
'9a4d1e52d',
'message': 'This is my fancy commit.\n'
'\n'
'Reviewed at http://example.com%s'
% review_request1.get_absolute_url(),
},
],
}),
},
page2_url: {
'payload': self.dump_json({
'values': [
{
'hash': '9fad89712ebe5874a857c5112a3c9d1'
'87ada0dbc',
'message': 'This is another commit\n'
'\n'
'Reviewed at http://example.com%s'
% review_request2.get_absolute_url(),
},
],
}),
}
}
# Simulate the webhook.
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=2):
self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request1.get_absolute_url(),
truncated=True)
# Check the first review request.
#
# The first review request has an entry in the truncated list and the
# fetched list. We'll make sure we've only processed it once.
review_request1 = ReviewRequest.objects.get(pk=review_request1.pk)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status, review_request1.SUBMITTED)
self.assertEqual(review_request1.changedescs.count(), 1)
changedesc = review_request1.changedescs.get()
self.assertEqual(changedesc.text, 'Pushed to master (1c44b46)')
# Check the first review request.
review_request2 = ReviewRequest.objects.get(pk=review_request2.pk)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status, review_request2.SUBMITTED)
self.assertEqual(review_request2.changedescs.count(), 1)
changedesc = review_request2.changedescs.get()
self.assertEqual(changedesc.text, 'Pushed to master (9fad897)')
def test_close_submitted_hook_with_truncated_commits_limits(self):
"""Testing BitBucket close_submitted hook with truncated list of
commits obeys limits
"""
paths = {
'%s&pagelen=100' % self.COMMITS_URL: {
'payload': self.dump_json({
'next': '%s&page=2' % self.COMMITS_URL,
'values': [],
}),
},
}
paths.update({
'%s&page=%s&pagelen=100' % (self.COMMITS_URL, i): {
'payload': self.dump_json({
'next': '%s&page=%s' % (self.COMMITS_URL, i + 1),
'values': [],
}),
}
for i in range(1, 10)
})
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
# Create two review requests: One per referenced commit.
review_request1 = self.create_review_request(id=99,
repository=repository,
publish=True)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
review_request2 = self.create_review_request(id=100,
repository=repository,
publish=True)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status,
review_request2.PENDING_REVIEW)
# Simulate the webhook.
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
# There should have been 5 API requests. We'll never hit the final
# page.
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=5):
self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request1.get_absolute_url(),
truncated=True)
# The review requests should not have been updated.
review_request1 = ReviewRequest.objects.get(pk=review_request1.pk)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request1.changedescs.count(), 0)
# Check the first review request.
review_request2 = ReviewRequest.objects.get(pk=review_request2.pk)
self.assertTrue(review_request2.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request2.changedescs.count(), 0)
def test_close_submitted_hook_with_truncated_and_auth_error(self):
"""Testing BitBucket close_submitted hook with truncated list of
commits and authentication error talking to Bitbucket
"""
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
# Create two review requests: One per referenced commit.
review_request1 = self.create_review_request(id=99,
repository=repository,
publish=True)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
review_request2 = self.create_review_request(id=100,
repository=repository,
publish=True)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status,
review_request2.PENDING_REVIEW)
# Simulate the webhook.
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
with self.setup_http_test(status_code=401,
hosting_account=account,
expected_http_calls=1):
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request1.get_absolute_url(),
truncated=True)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content,
b'Incorrect username or password configured for '
b'this repository on Review Board.')
# The review requests should not have been updated.
review_request1 = ReviewRequest.objects.get(pk=review_request1.pk)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request1.changedescs.count(), 0)
# Check the first review request.
review_request2 = ReviewRequest.objects.get(pk=review_request2.pk)
self.assertTrue(review_request2.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request2.changedescs.count(), 0)
def test_close_submitted_hook_with_invalid_repo(self):
"""Testing BitBucket close_submitted hook with invalid repository"""
repository = self.create_repository()
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
self.assertEqual(response.status_code, 404)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
@add_fixtures(['test_site'])
def test_close_submitted_hook_with_invalid_site(self):
"""Testing BitBucket close_submitted hook with invalid Local Site"""
local_site = LocalSite.objects.get(name=self.local_site_name)
account = self.create_hosting_account(local_site=local_site)
account.save()
repository = self.create_repository(hosting_account=account,
local_site=local_site)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
local_site_name='badsite',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
self.assertEqual(response.status_code, 404)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
def test_close_submitted_hook_with_invalid_service_id(self):
"""Testing BitBucket close_submitted hook with invalid hosting
service ID
"""
# We'll test against GitHub for this test.
account = self.create_hosting_account()
account.service_name = 'github'
account.save()
repository = self.create_repository(hosting_account=account)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
self.assertEqual(response.status_code, 404)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
def test_close_submitted_hook_with_invalid_review_request(self):
"""Testing BitBucket close_submitted hook with invalid review request
"""
self.spy_on(logging.error)
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url='/r/9999/')
self.assertEqual(response.status_code, 200)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
self.assertTrue(logging.error.called_with(
'close_all_review_requests: Review request #%s does not exist.',
9999))
def _test_post_commit_hook(self, local_site=None):
"""Testing posting to a commit hook.
This will simulate pushing a commit and posting the resulting webhook
payload from Bitbucket to the handler for the hook.
Args:
local_site (reviewboard.site.models.LocalSite, optional):
The Local Site owning the review request.
"""
account = self.create_hosting_account(local_site=local_site)
repository = self.create_repository(hosting_account=account,
local_site=local_site)
review_request = self.create_review_request(repository=repository,
local_site=local_site,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
local_site=local_site,
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.SUBMITTED)
self.assertEqual(review_request.changedescs.count(), 1)
changedesc = review_request.changedescs.get()
self.assertEqual(changedesc.text, 'Pushed to master (1c44b46)')
def _post_commit_hook_payload(self, post_url, review_request_url,
truncated=False):
"""Post a payload for a hook for testing.
Args:
post_url (unicode):
The URL to post to.
review_request_url (unicode):
The URL of the review request being represented in the
payload.
truncated (bool, optional):
Whether the commit list should be marked truncated.
Results:
django.core.handlers.request.wsgi.WSGIRequest:
The post request.
"""
return self.client.post(
post_url,
content_type='application/json',
data=self.dump_json({
# NOTE: This payload only contains the content we make
# use of in the hook.
'push': {
'changes': [
{
'new': {
'type': 'branch',
'name': 'master',
},
'truncated': truncated,
'commits': [
{
'hash': '1c44b461cebe5874a857c51a4a13a84'
'9a4d1e52d',
'message': 'This is my fancy commit\n'
'\n'
'Reviewed at http://example.com'
'%s'
% review_request_url,
},
],
'links': {
'commits': {
'href': self.COMMITS_URL,
},
},
},
# Some entries containing missing keys.
{
'new': {
'type': 'frobblegobble',
'name': 'master',
},
'truncated': truncated,
'commits': [
{
'hash': '1c44b461cebe5874a857c51a4a13a84'
'9a4d1e52d',
'message': 'This is my fancy commit\n'
'\n'
'Reviewed at http://example.com'
'%s'
% review_request_url,
},
],
'links': {
'commits': {
'href': self.COMMITS_URL,
},
},
},
{
'new': {
'type': 'branch',
'name': 'other',
},
'truncated': truncated,
'commits': [
{
'hash': 'f46a13a1cc43bebea857c558741a484'
'1e52d9a4d',
'message': 'Ignored commit.'
},
],
'links': {},
},
{
'new': {},
'commits': [],
},
{
'new': None,
'commits': None,
},
{
}
],
}
}, for_response=False))
|
[
"christian@beanbaginc.com"
] |
christian@beanbaginc.com
|
b793c18b73cf718d4a56d2d86079e007fcdd1110
|
5b2525863ad7d02aae57328e598493a5dd816335
|
/oli_crm/cliente.py
|
7bc5f3e6aaed34d06c324b2800cf9c5c9473d4db
|
[] |
no_license
|
rejamen/oli_studio
|
4707a718baa27630571caadd0298ee654799ef6b
|
56fc6a2456dff51d9d9182648b3d62bb6cc69dee
|
refs/heads/master
| 2020-06-14T17:20:00.239332
| 2018-01-24T21:19:05
| 2018-01-24T21:19:05
| 195,070,415
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,536
|
py
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
import os, sys
from openerp.osv import osv
from openerp.tools.translate import _
from datetime import datetime, date
class cliente_cliente(models.Model):
_name = 'cliente.cliente'
def check_permission(self, cr, uid, ids, context=None):
for s in self.browse(cr, uid, ids, context=context):
res = False
# group_obj = self.pool.get('res.groups')
# manager_ids = group_obj.search(cr, uid, [('name','=', 'Configuration')])
# if uid == manager_ids[0]:
# res = True
s.update({'admin': res})
def set_vip(self, cr, uid, ids, context=None):
for c in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, c.id, {'tipo': 'vip'})
return True
def set_normal(self, cr, uid, ids, context=None):
for c in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, c.id, {'tipo': 'normal'})
return True
@api.depends('tipo') #establece color segun el estado
def _get_color_from_type(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
color = 1
if record.tipo == 'vip':
color = 2
record.update({'color': color})
def clear_historial(self, cr, uid, ids, context=None):
for e in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, [e.id], {'state': 'contratado'})
return True
@api.depends('historial_line.cliente_id')
def _get_total_trabajos(self):
for order in self:
total = 0
abonado = 0.0
for line in order.historial_line:
total += 1
abonado += line.costo
order.update({'total_abonado': abonado})
name = fields.Char('Nombre y Apellidos', required=True)
tipo = fields.Selection([
('normal', 'Normal'),
('vip', 'VIP'),
], 'Tipo', help="Los clientes VIP disfrutan de facilidades segun las estrategias de marketing", default='normal')
phone = fields.Char('Telefono')
mobile = fields.Char('Movil')
mail = fields.Char('e-mail')
address = fields.Char('Direccion')
color = fields.Char('Color', compute='_get_color_from_type')
historial_line = fields.One2many('historial.cliente', 'cliente_id', 'Historial Lines')
total_abonado = fields.Float(compute='_get_total_trabajos', string='Total abonado')
admin = fields.Boolean(compute='check_permission', string='Admin')
# @api.multi
# def unlink(self):
# if not self.admin:
# raise UserError(_('No se puede eliminar un cliente pues se afectan los registros que dependen de él.'))
# ret = super(cliente_cliente, self).unlink()
# return ret
def name_get(self, cr, uid, ids, context=None):
res = []
for e in self.browse(cr,uid, ids, context=context):
name = e.name
res.append((e['id'],name))
return res
class historial_cliente(models.Model):
_name="historial.cliente"
_order = "fecha desc"
def preparar_historial(self, cr, uid, cliente, fecha, description, costo, context=None):
historial_vals = {
'cliente_id': cliente.id,
'fecha': fecha,
'description': description,
'costo': costo,
}
return historial_vals
cliente_id = fields.Many2one('cliente.cliente', 'Cliente', select=True, ondelete='cascade')
fecha = fields.Date('Fecha', default=fields.Date.context_today)
description = fields.Char('Descripcion')
costo = fields.Float('Costo', help="Costo del trabajo realizado")
|
[
"rejamen@gmail.com"
] |
rejamen@gmail.com
|
0f575114c740a9d4d0a6ef18d6c3eed531df65f9
|
fff3c194287eea1260c18a638d2d0287b7ce80d7
|
/Fazt-Python/functions.py
|
c87f77e9c30fab3ed7fe2178a8fa86871f35eab8
|
[] |
no_license
|
jav2074/python-curso
|
8efecd848df9d9949220d1edcadbf546662c9972
|
8bbc1215a00efe80ecade8b7bc79607e9a201010
|
refs/heads/master
| 2022-05-23T03:09:11.874813
| 2020-04-28T04:05:20
| 2020-04-28T04:05:20
| 259,351,416
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 169
|
py
|
def funcion():
print("Hello dude!!") ## Declaración normal
sumar = lambda n1, n2: n1 + n2 ## Declaración más cool
print(f"La suma de 10 + 30 = {sumar(10,30)}")
|
[
"jav.electrochance@gmail.com"
] |
jav.electrochance@gmail.com
|
7d73a85d5a3225d378732273639e817589855b7f
|
1f6fcaba4fb7c5023e21105f3941ff921b48f9da
|
/manage.py
|
20465c7d94f9f938d476f83ea55f69edb8f3ccd8
|
[
"MIT"
] |
permissive
|
Hugo-mesk/sb-admin-django
|
79564c2cda609be1159413de16ddc103daa9efca
|
55cb7c6704db9ac287ead67c76b4e9666e55a231
|
refs/heads/master
| 2020-04-29T19:52:31.816341
| 2019-09-18T01:14:09
| 2019-09-18T01:14:09
| 176,368,959
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 628
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sb_admin.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"hugo.mesk@gmail.com"
] |
hugo.mesk@gmail.com
|
32f23cb372dfdf98567ae16228bdbb95e6934524
|
74549d7c57b4746ac2a9c275aa12bfc577b0e8af
|
/hogwartsEmailAdderss.py
|
245b738aa52c6cc12e18274915c1042e79fc0fa9
|
[] |
no_license
|
abidkhan484/hackerrank_solution
|
af9dbf6ec1ead920dc18df233f40db0c867720b4
|
b0a98e4bdfa71a4671999f16ab313cc5c76a1b7a
|
refs/heads/master
| 2022-05-02T11:13:29.447127
| 2022-04-13T03:02:59
| 2022-04-13T03:02:59
| 99,207,401
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 354
|
py
|
#!/bin/python3
import string
def isValid(email):
for i in range(5):
if email[i] not in string.ascii_lowercase:
return 'No'
else:
if email[5:] != '@hogwarts.com':
return 'No'
return 'Yes'
if __name__ == "__main__":
s = input().strip()
result = isValid(s)
print(result)
|
[
"noreply@github.com"
] |
abidkhan484.noreply@github.com
|
30be4f592bf60c640c8d02efec383730b5e1a5b5
|
8bcf166b364427c77daebaca28e30145ffefe72e
|
/v0.5/training/keyword_spotting/eval_quantized_model.py
|
9f9648df0cb1234ca6e78506c79f4b0a2fd1c2c3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
shahbazabbasi313/tiny
|
61992854194460a03b94b45ee5719d60ad7aa4a6
|
0b04bcd402ee28f84e79fa86d8bb8e731d9497b8
|
refs/heads/master
| 2023-07-22T13:04:03.353053
| 2021-08-16T15:30:00
| 2021-08-16T15:30:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,654
|
py
|
import tensorflow as tf
import os
import numpy as np
import argparse
import get_dataset as kws_data
import kws_util
def predict(interpreter, data):
# Get input and output tensors.
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# Test the model on input data.
input_shape = input_details[0]['shape']
input_data = np.array(data, dtype=np.int8)
output_data = np.empty_like(data)
interpreter.set_tensor(input_details[0]['index'], input_data[i:i+1, :])
interpreter.invoke()
# The function `get_tensor()` returns a copy of the tensor data.
# Use `tensor()` in order to get a pointer to the tensor.
output_data[i:i+1, :] = interpreter.get_tensor(output_details[0]['index'])
return output_data
if __name__ == '__main__':
Flags, unparsed = kws_util.parse_command()
ds_train, ds_test, ds_val = kws_data.get_training_data(Flags)
interpreter = tf.lite.Interpreter(model_path=Flags.tfl_file_name)
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
input_shape = input_details[0]['shape']
output_data = []
labels = []
if Flags.target_set[0:3].lower() == 'val':
eval_data = ds_val
print("Evaluating on the validation set")
elif Flags.target_set[0:4].lower() == 'test':
eval_data = ds_test
print("Evaluating on the test set")
elif Flags.target_set[0:5].lower() == 'train':
eval_data = ds_train
print("Evaluating on the training set")
eval_data = eval_data.unbatch().batch(1).as_numpy_iterator()
input_scale, input_zero_point = input_details[0]["quantization"]
for dat, label in eval_data:
if input_details[0]['dtype'] == np.float32:
interpreter.set_tensor(input_details[0]['index'], dat)
elif input_details[0]['dtype'] == np.int8:
dat_q = np.array(dat/input_scale + input_zero_point, dtype=np.int8) # should match input type in quantize.py
interpreter.set_tensor(input_details[0]['index'], dat_q)
else:
raise ValueError("TFLite file has input dtype {:}. Only np.int8 and np.float32 are supported".format(
input_details[0]['dtype']))
interpreter.invoke()
# The function `get_tensor()` returns a copy of the tensor data.
# Use `tensor()` in order to get a pointer to the tensor.
output_data.append(np.argmax(interpreter.get_tensor(output_details[0]['index'])))
labels.append(label[0])
num_correct = np.sum(np.array(labels) == output_data)
acc = num_correct / len(labels)
print(f"Accuracy on {Flags.target_set} = {acc:5.3f} ({num_correct}/{len(labels)})")
|
[
"noreply@github.com"
] |
shahbazabbasi313.noreply@github.com
|
dbab2a986b7d8d5f43a5b9fe7805078b099fd92d
|
42b6dbe23ba392fe53d958e26d77a596ee5b7c5a
|
/RegistroHoras/Actividades/admin.py
|
6b324b6805062a400c7aa894be36788e987df86c
|
[] |
no_license
|
parraletz/RegistroHoras
|
611d0eb44542d595b65f97c66f17bfff9af167d3
|
6096f26af84932f4f2cb8764e76cc2bd54f8bb7a
|
refs/heads/master
| 2020-04-06T07:06:11.313349
| 2012-09-13T02:10:33
| 2012-09-13T02:10:33
| 5,774,219
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 347
|
py
|
from django.contrib import admin
from RegistroHoras.Actividades.models import Actividad
class ActividadAdmin(admin.ModelAdmin):
list_display = ('ticket', 'usuario' , 'fecha', )
ordering = ['ticket',]
class Media:
js = ('/media/js/tiny_mce/tiny_mce.js', '/media/js/textarea.js')
admin.site.register(Actividad, ActividadAdmin)
|
[
"parraletz@MacBook-de-Alex.local"
] |
parraletz@MacBook-de-Alex.local
|
6d77d6d52951c53f4d0ef07221cb6ae33d1286a2
|
2c904b9f2cdb7baf1f84dbd622312357e05de734
|
/config/wsgi.py
|
b6b2b1f00bf49f88e5a2ca9848f049a169f6e0c1
|
[] |
no_license
|
teresaliu20/Social-News-Backend
|
e026701531df198592138f11b1d5ab71bc0ea8b9
|
86d0823f27a265d6e977240de7fbbe9e524199f4
|
refs/heads/master
| 2021-09-09T09:33:50.202607
| 2019-05-06T03:02:42
| 2019-05-06T03:02:42
| 170,370,281
| 0
| 0
| null | 2021-09-08T00:59:58
| 2019-02-12T18:35:01
|
Python
|
UTF-8
|
Python
| false
| false
| 1,942
|
py
|
"""
WSGI config for paper project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# paper directory.
app_path = os.path.abspath(os.path.join(
os.path.dirname(os.path.abspath(__file__)), os.pardir))
sys.path.append(os.path.join(app_path, 'paper'))
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
application = Sentry(application)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
[
"teresali@usc.edu"
] |
teresali@usc.edu
|
1fcb89c5c39abd1703dd85a4ef5a5f187e638036
|
e0985213177cb63350db6efa9c2be93af3dccc55
|
/1. Neural Networks and Deep Learning/Week 3/planar_utils.py
|
852c34d6eca94af4e2e40240ffd97764b5cd764b
|
[] |
no_license
|
Tinsae/Deep-Learning-Projects
|
c3e7172d31e6b4ebae63b23b7b8a4584b243c7b3
|
9853ed9d0e0675dab5d612bcc267ae9a666c17e2
|
refs/heads/master
| 2020-04-05T13:47:22.006512
| 2019-06-10T00:20:49
| 2019-06-10T00:20:49
| 156,909,625
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,716
|
py
|
import matplotlib.pyplot as plt
import numpy as np
import sklearn
import sklearn.datasets
import sklearn.linear_model
def plot_decision_boundary(model, X, y):
# Set min and max values and give it some padding
x_min, x_max = X[0, :].min() - 1, X[0, :].max() + 1
y_min, y_max = X[1, :].min() - 1, X[1, :].max() + 1
h = 0.01
# Generate a grid of points with distance h between them
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Predict the function value for the whole grid
# TGA Note: this will unroll xx into 1D array and also yy.
## Then it concatenates the 2 to create a 2 column table
## which correspond to a 2 feature dataset
## Remember that sklearn likes data to be in rows and
## features to be in columns.'model' is sklearn function
Z = model(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# Plot the contour and training examples
# TGA Note: this one is predicted class for range of values
# the range will surely cover the data in X
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
plt.ylabel('x2')
plt.xlabel('x1')
## TGA Note: this one is a plot using the ground truth
plt.scatter(X[0, :], X[1, :], c=y, cmap=plt.cm.Spectral)
def sigmoid(x):
"""
Compute the sigmoid of x
Arguments:
x -- A scalar or numpy array of any size.
Return:
s -- sigmoid(x)
"""
s = 1/(1+np.exp(-x))
return s
def load_planar_dataset():
np.random.seed(1)
m = 400 # number of examples
N = int(m/2) # number of points per class
D = 2 # dimensionality
X = np.zeros((m,D)) # data matrix where each row is a single example
Y = np.zeros((m,1), dtype='uint8') # labels vector (0 for red, 1 for blue)
a = 4 # maximum ray of the flower
for j in range(2):
ix = range(N*j,N*(j+1))
t = np.linspace(j*3.12,(j+1)*3.12,N) + np.random.randn(N)*0.2 # theta
r = a*np.sin(4*t) + np.random.randn(N)*0.2 # radius
X[ix] = np.c_[r*np.sin(t), r*np.cos(t)]
Y[ix] = j
X = X.T
Y = Y.T
return X, Y
def load_extra_datasets():
N = 200
noisy_circles = sklearn.datasets.make_circles(n_samples=N, factor=.5, noise=.3)
noisy_moons = sklearn.datasets.make_moons(n_samples=N, noise=.2)
blobs = sklearn.datasets.make_blobs(n_samples=N, random_state=5, n_features=2, centers=6)
gaussian_quantiles = sklearn.datasets.make_gaussian_quantiles(mean=None, cov=0.5, n_samples=N, n_features=2, n_classes=2, shuffle=True, random_state=None)
no_structure = np.random.rand(N, 2), np.random.rand(N, 2)
return noisy_circles, noisy_moons, blobs, gaussian_quantiles, no_structure
|
[
"Tinsae@users.noreply.github.com"
] |
Tinsae@users.noreply.github.com
|
f401333e5549b41f09b8c1318936448c3a83d737
|
98e1716c1c3d071b2fedef0ac029eb410f55762c
|
/part9-Manipulating-DataFrames-with-pandas/No08-Changing-index-of-a-DataFrame.py
|
c7f11201fbcc7e64a48481ca0e8a27f8c2375844
|
[] |
no_license
|
iamashu/Data-Camp-exercise-PythonTrack
|
564531bcf1dff119949cbb75e1fd63d89cb2779f
|
c72a4e806494f0e263ced9594597dc8882c2131c
|
refs/heads/master
| 2020-07-22T00:23:12.024386
| 2019-04-12T09:24:42
| 2019-04-12T09:24:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,477
|
py
|
#Changing index of a DataFrame
'''
As you saw in the previous exercise, indexes are immutable objects. This means that if you want to change or modify the index in a DataFrame, then you need to change the whole index. You will do this now, using a list comprehension to create the new index.
A list comprehension is a succinct way to generate a list in one line. For example, the following list comprehension generates a list that contains the cubes of all numbers from 0 to 9: cubes = [i**3 for i in range(10)]. This is equivalent to the following code:
cubes = []
for i in range(10):
cubes.append(i**3)
Before getting started, print the sales DataFrame in the IPython Shell and verify that the index is given by month abbreviations containing lowercase characters.
Instructions
100 XP
Create a list new_idx with the same elements as in sales.index, but with all characters capitalized.
Assign new_idx to sales.index.
Print the sales dataframe. This has been done for you, so hit 'Submit Answer' and to see how the index changed.
'''
# Code
# Create the list of new indexes: new_idx
new_idx = [i.upper() for i in sales.index]
#my error: new_idx = [sales.index.upper() for sales.index in sales.index]
# Assign new_idx to sales.index
sales.index = new_idx
# Print the sales DataFrame
print(sales)
'''result
eggs salt spam
JAN 47 12.0 17
FEB 110 50.0 31
MAR 221 89.0 72
APR 77 87.0 20
MAY 132 NaN 52
JUN 205 60.0 55
'''
|
[
"beiran@hotmail.com"
] |
beiran@hotmail.com
|
66f58fbdd6e05d8b3b7be819f956b8858a9e2dd3
|
df1ce1c88d5997259a8429dfbb07377a561113f0
|
/assignments/2008-2012/universityOfWaterloo/cs116/a6/a6q1.py
|
c984ed2cee674a280cf845e45c2f89b6f98d8798
|
[] |
no_license
|
watadarkstar/school-works
|
0b4c5603461b02cd573ac00caa04660ddd58ad78
|
d4b7c8a19415bc2d74c73eb4a5da0932020ee5a6
|
refs/heads/master
| 2016-09-05T22:53:27.341377
| 2013-08-26T06:04:43
| 2013-08-26T06:04:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 740
|
py
|
##
## *****************************************************
##
## CS 116 Assignment 6, Question 1
## Darren Poon
## (How_many_buckets)
##
## *****************************************************
## Contract: how_many_buckets: int[>o] int[>0] int[>0] -> int[>0]
## Purpose: consumes the height and radius of a cylindrical bucket and the number of cubic centimetres of sand available and produce an integer indicating the number of complete buckets of sand possible.
## Examples:
## how_many_buckets(2,1,62.8) => 39
## how_many_buckets(1,1,1) => 0
## how_many_buckets(2,1,30) => 4
## Definition:
import math
def how_many_buckets(height,radius,cubic_cm):
return int(cubic_cm /(radius *radius *math.pi *height))
## No test is needed
|
[
"dyhpoon@gmail.com"
] |
dyhpoon@gmail.com
|
ffcd183098cfe16aeeca43b435378c6bfc58a74c
|
586973165efb8ac1347b1d5e5187200d9fbaa4e1
|
/prediction/Long_short_term_memory_keras_in_tf.py
|
bd0fd73b9bf1c26a15965bd57caa0e27b27bdfba
|
[] |
no_license
|
Jaehoon-Cha/machine_learning
|
01dd84445fa646786b503122b21477209a574f8a
|
a0f84da3dc1df5a99a05ec09b4913c14bf4820f2
|
refs/heads/master
| 2020-06-10T20:20:33.194291
| 2019-07-17T03:05:58
| 2019-07-17T03:05:58
| 193,735,274
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,785
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 28 13:10:13 2019
@author: jaehooncha
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.preprocessing import scale
import tensorflow as tf
from tensorflow.keras.layers import Dense, Dropout, CuDNNLSTM, BatchNormalization
from tensorflow.keras.layers import TimeDistributed
### data load ###
features = pd.read_pickle('../datasets/four_features.pickle')
features = np.array(features)
features = scale(features)
time_seq = 10
n_features = features.shape[1]-1
def make_sequence(features, s_seq):
features_seq = []
m, n = features.shape
for i in range(m - s_seq + 1):
features_seq.append([np.array(features[i:i+s_seq, :-1]), features[i+s_seq-1, -1]])
return features_seq
features_seq = make_sequence(features, time_seq)
train = features_seq[:-365]
test = features_seq[-365:]
### seperate features and target ###
train_x = [item[0] for item in train]
train_x = np.concatenate(train_x)
train_x = train_x.reshape(-1,time_seq, n_features)
train_y = [item[1] for item in train]
train_y = np.array(train_y)
test_x = [item[0] for item in test]
test_x = np.concatenate(test_x)
test_x = test_x.reshape(-1,time_seq, n_features)
test_y = [item[1] for item in test]
test_y = np.array(test_y)
### LSTM ###
def rnn_train(X, Y, h1 = 64, h2 = 32, n_epochs = 100, s_batch = 100, Lr = 0.001):
model = tf.keras.models.Sequential()
model.add(CuDNNLSTM(h1, input_shape = (X.shape[1:]), return_sequences = True))
model.add(CuDNNLSTM(h2))
model.add(Dense(1))
optimizer = tf.keras.optimizers.Adam(lr=Lr)
model.compile(optimizer = optimizer,
loss = 'mse',
metrics =['mae', 'mse'])
hist = model.fit(X, Y, epochs = n_epochs, batch_size = s_batch)
return hist
def rnn_predict(rnn_model, X):
rnn = rnn_model
rnn_prediction = rnn.predict(X)
rnn_prediction = rnn_prediction.reshape(-1)
return rnn_prediction
### implement ###
rnn_model = rnn_train(train_x, train_y, h1 = 64, h2 = 32, n_epochs = 100, s_batch = 100).model
train_predict_y = rnn_predict(rnn_model, train_x)
test_predict_y = rnn_predict(rnn_model, test_x)
### root mean squared error ###
train_rmse = np.sqrt(np.mean((train_predict_y - train_y)**2))
test_rmse = np.sqrt(np.mean((test_predict_y - test_y)**2))
print('train RMSE is %.4f' %(train_rmse))
print('test RMSE is %.4f' %(test_rmse))
### font size ###
plt.rcParams.update({'font.size': 15})
### draw outputs ###
plt.figure(figsize=(15,7))
plt.plot(test_y, label = 'true', c = 'r', marker = '_')
plt.plot(test_predict_y, label = 'prediction', c = 'k')
plt.title('Long Short Term Memory Keras')
plt.xlabel('X', size = 20)
plt.ylabel('Y', size = 20)
plt.legend(loc = 1)
|
[
"Jaehoon.Cha@xjtlu.edu.cn"
] |
Jaehoon.Cha@xjtlu.edu.cn
|
317c96201c4bbb1152cd1d5db29ad9863b206e8f
|
e2e9714ed29e337598f104873e3ee6f476fd1030
|
/URET/uret/__init__.py
|
ff4f758dc2ce7d0931600204c606355fe8e9e787
|
[
"MIT"
] |
permissive
|
shekoelnawawy/Ohio_13
|
5cc776431e31fcb2d4b0aed43d35d3d4cf25dea8
|
14ec296603a3a0cbd0a0ff0dceebf4cd48e24de9
|
refs/heads/master
| 2023-08-09T21:28:13.880192
| 2023-07-25T19:04:46
| 2023-07-25T19:04:46
| 637,802,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 211
|
py
|
"""
Universal Robustness Evaluation Toolkit (URET).
"""
import logging.config
# Project Imports
from uret import core
from uret import transformers
from uret import utils
# Semantic Version
__version__ = "0.1"
|
[
"shekoelnawawy@gmail.com"
] |
shekoelnawawy@gmail.com
|
d027bb05b1862af384adc59f927dd4b314f9cd57
|
2ab1359e620d3cde6aa6dbbc4ac0052aa4d8b60d
|
/not_used/pd_dash.py
|
0176797fff5b79e89fe8824ed903ff1dd563d52b
|
[] |
no_license
|
lauratomkins/gis714_project
|
167fef58e09ae3d841dca07c96b6d1186c7e2541
|
5261b4751ba040e4b9732cdf553a80b886570408
|
refs/heads/master
| 2022-11-12T08:07:57.670108
| 2020-06-25T18:10:49
| 2020-06-25T18:10:49
| 258,236,602
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,010
|
py
|
# step3_map.py
# GIS 715 Student-led lab - Dash
# Add map of California fires by cause.
# Now we will add a second component to the Dash layout. We can use the scattermapbox plotly object to create a map of fire point locations.
# For now the map is not interactive (beyond the out of the box functionality).
# We will use a style sheet (CSS) to place the components within the layout into rows and columns.
import dash
from dash.dependencies import Input, Output
import dash_html_components as html
import dash_core_components as dcc
import numpy as np
import os
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
# Boostrap CSS for styling.
#app.css.append_css({'external_url': 'https://codepen.io/amyoshino/pen/jzXypZ.css'})
filepath = 'G://My Drive//phd//plotly//data//pd_waves//KTYX//20200218//'
filelist = os.listdir(filepath)
df = pd.read_pickle(filepath + filelist[0])
df = df.dropna(axis=0, how='all', subset=['ref', 'rho', 'vel']) # if all values are NaN then remove that row
# Mapbox API key
# Insert your key below.
mapbox_access_token = 'pk.eyJ1Ijoia2VsbHlubSIsImEiOiJjanM0eDF0ZngwMjZ0M3lwYjV5MWxqZm1xIn0.49GaijkpupewHcHf2niUDA'
fig = px.scatter_mapbox(
df,
lat='lat',
lon='lon',
color='ref',
color_continuous_scale=px.colors.sequential.Magma[::-1],
zoom=4,
opacity=0.8,
range_color=[0,40])
#fig.update_traces(marker_size=0.5)
fig.update_layout(
title="Reflectivity [dBZ]",
autosize=True,
height=800,
margin=dict(
l=35, r=35, b=35, t=25),
hovermode="closest",
mapbox=dict(
accesstoken=mapbox_access_token,
style="light",
center = dict(
lon=-75.6791,
lat=43.7558),
zoom=6)
)
fig2 = px.scatter_mapbox(
df,
lat='lat',
lon='lon',
color='rho',
color_continuous_scale=px.colors.sequential.deep,
zoom=4,
opacity=0.8,
#title="rhoHV",
range_color=[0.8,1])
fig2.update_layout(
title='rhoHV',
autosize=True,
height=800,
margin=dict(
l=35, r=35, b=35, t=25),
hovermode="closest",
mapbox=dict(
accesstoken=mapbox_access_token,
style="light",
center = dict(
lon=-75.6791,
lat=43.7558),
zoom=6)
)
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
app.title = 'Radar Image Muting'
app.layout = html.Div([
html.H1(children='Radar Image Muting'),
html.H2(children='GIS715 Class Project, Spring 2020'),
html.Div([
html.Div([html.Label(["Select file"]),dcc.Dropdown(
id="file-selector",
options=[
{'label': i, 'value': i} for i in filelist
],
value = filelist[0]
)], style={'padding':10,'backgroundColor':'transparent'},className = "six columns"),
], style={'padding':10},className = "row"),
html.Div([
html.Div([html.Label(["Select rhoHV threshold to 'mute' reflectivity"]),dcc.Slider(
id="threshold-slider",
min=0.5,
max=1,
step=0.01,
value=0,
marks={
i: '{:1.2f}'.format(i) for i in np.arange(0.5,1.05,0.05)
},
)], style={'padding':10,'backgroundColor':'transparent'},className = "six columns"),
html.Div([html.Label(["Select secondary variable to plot"]),dcc.Dropdown(
id='variable-checklist',
options=[
{'label': 'Correlation Coefficient', 'value': 'rho'},
{'label': 'Waves', 'value': 'waves'},
{'label': 'Velocity', 'value': 'vel'}
],
value= 'rho',
)],style={'padding':10,'backgroundColor':'transparent'},className = "six columns"),
], style={'padding':10},className = "row"),
html.Div([
html.Div(dcc.Graph(id = "ref_map", figure=fig), style={'padding':10},className = "six columns"),
html.Div(dcc.Graph(id = "rho-vel_map", figure=fig2), style={'padding': 10},className = "six columns")
], style={'padding':10},className = 'row')
])
@app.callback(Output('rho-vel_map', "figure"),
[Input('variable-checklist', 'value'),
Input('file-selector', 'value')])
def update_graph(variable_value, file_value):
df = pd.read_pickle(filepath + file_value)
df = df.dropna(axis=0, how='all', subset=['ref', 'rho', 'vel']) # if all values are NaN then remove that row
if variable_value == 'rho':
rng = [0.8,1]
cb = px.colors.sequential.deep
title_label='rhoHV'
elif variable_value == 'waves':
df = df[df['waves']==1]
cb = px.colors.sequential.gray[::-1]
rng = [0,1]
title_label = 'Waves'
elif variable_value == 'vel':
rng = [-30, 30]
cb = px.colors.sequential.RdBu[::-1]
title_label="Velocity [m/s]"
fig = px.scatter_mapbox(
df,
lat='lat',
lon='lon',
color=variable_value,
color_continuous_scale=cb,
zoom=4,
opacity=0.8,
range_color=rng)
fig.update_layout(
title=title_label,
autosize=True,
height=800,
margin=dict(
l=35, r=35, b=35, t=25
),
hovermode="closest",
mapbox=dict(
accesstoken=mapbox_access_token,
style="light",
center = dict(
lon=-75.6791,
lat=43.7558
),
zoom=6)
)
return fig
@app.callback(Output('ref_map', "figure"),
[Input('threshold-slider', 'value'),
Input('file-selector', 'value')])
def update_graph2(threshold_value, file_value):
df = pd.read_pickle(filepath + file_value)
df = df.dropna(axis=0, how='all', subset=['ref', 'rho', 'vel']) # if all values are NaN then remove that row
temp = df
temp.loc[df['rho'] < threshold_value, 'ref'] = np.nan
fig = px.scatter_mapbox(
temp,
lat='lat',
lon='lon',
color='ref',
color_continuous_scale=px.colors.sequential.Magma[::-1],
zoom=4,
opacity=0.8,
range_color=[0,40])
#fig.update_traces(marker_size=0.5)
fig.update_layout(
title="Reflectivity [dBZ]",
autosize=True,
height=800,
margin=dict(
l=35, r=35,b=35,t=25
),
hovermode="closest",
mapbox=dict(
accesstoken=mapbox_access_token,
style="light",
center = dict(
lon=-75.6791,
lat=43.7558
),
zoom=6)
)
return fig
if __name__ == '__main__':
app.run_server(debug=True)
|
[
"lmtomkin@ncsu.edu"
] |
lmtomkin@ncsu.edu
|
f66ad507a19057092e4b6df1111b5029c988e1a9
|
20448841fe75541d2f5b9879178e0f874daede38
|
/Whats Your Name.py
|
57d7231ef7080fc9104d81d3a0bc0a99dc7fff91
|
[] |
no_license
|
dnskvamsi/Hacker_rank_python_solutions
|
d421bfe231c3d6913fc5285c254ec67ea2c76845
|
d118734f2cee1df64df02e0421fff68aa5c51cc7
|
refs/heads/main
| 2023-05-03T23:28:20.035678
| 2021-05-26T10:59:31
| 2021-05-26T10:59:31
| 370,945,352
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 272
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
def print_full_name(first, last):
print("Hello "+first+" "+last+"! You just delved into python.")
if __name__ == '__main__':
first_name = input()
last_name = input()
print_full_name(first_name, last_name)
|
[
"noreply@github.com"
] |
dnskvamsi.noreply@github.com
|
844110cd890f6188de3e075fd83348fe08cb99db
|
14869a028857850c9847780a18949e40f23f2390
|
/format.py
|
1195d3754b01c9375087971705df0cceb8462387
|
[] |
no_license
|
djdevastate123/gittest
|
97c17428b3d7d05475c2094258c06f3afabecf8a
|
9b9f554ba8231cd3639cc9e80f2d6c75623c0534
|
refs/heads/master
| 2021-01-20T15:07:08.724089
| 2018-04-26T05:58:07
| 2018-04-26T05:58:07
| 82,796,154
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 480
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# format.py
#
def main(args):
print("{} {}".format(1,2))
print("{1:>20} {0:>10}".format(1,2))
print("{1:_>20} {0:.>10}".format(1,2))
print("{1:_^20} {0:.^10}".format(1,2))
print("{:10.5f}".format(3.14159265))
print("{:*^30.5f}".format(3.14159265))
print(hex(100))
print(oct(100))
print(bin(100))
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
|
[
"krzysiek.st12@vp.pl"
] |
krzysiek.st12@vp.pl
|
9d5dc3893913b6b5006a5bcb85823c9e26f502f5
|
c923b6c128d59346b628f8e7efb44733dea75188
|
/serialize.py
|
20c0fad1302e25937454ce0dec9c66ff53788846
|
[] |
no_license
|
archcompyym/lab_1
|
391fc9d9e4aaa11d19c80d1b27c2a8fa0d867d37
|
29a0ceec5fefe41935b939670d7e7eaa0d4725ab
|
refs/heads/master
| 2021-01-19T02:50:48.285775
| 2017-06-07T09:55:16
| 2017-06-07T09:55:16
| 87,296,523
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 817
|
py
|
import pickle
import yaml
import json
pickle_type = "pickle"
yaml_type = "yaml"
json_type = "json"
def load(file_obj, serialize_method):
""" deserialize data"""
if serialize_method == pickle_type:
return pickle.load(file_obj)
elif serialize_method == yaml_type:
return yaml.load(file_obj)
elif serialize_method == json_type:
return json.load(file_obj)
def save(data, file_obj, serialize_method):
""" serialize data """
if serialize_method == pickle_type:
if file_obj:
return pickle.dump(data, file_obj)
else:
return pickle.dumps(data)
elif serialize_method == yaml_type:
return yaml.dump(data, file_obj, default_flow_style=False)
elif serialize_method == json_type:
return json.dumps(data, file_obj)
|
[
"vimikeva@gmail.com"
] |
vimikeva@gmail.com
|
24558780b52d38e43180b755205c210b0960481d
|
80308dc6fcbc87294d4ae96c4b30d0aac6e88d6a
|
/multifield_batch_update/dialogs/change_log.py
|
33ff4545cf46192dd83b0ce3824a14148f628e55
|
[
"Apache-2.0"
] |
permissive
|
d3v3l0/anki_multifield_batch_update
|
0d2924b4d2c164d7aee5702fb6b648878da5aaea
|
4960a6e969256ff0df2ebf37312e80fa009fcddb
|
refs/heads/master
| 2022-01-07T00:08:58.959963
| 2019-07-16T04:54:29
| 2019-07-16T04:54:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,817
|
py
|
# Copyright 2019 Matthew Hayes
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import datetime
import os
import traceback
from aqt.qt import (QDialog, QDialogButtonBox, QFileDialog, QFontDatabase, QHBoxLayout, QLabel, QPlainTextEdit,
QStandardPaths, Qt, QVBoxLayout)
from aqt.utils import askUser, tooltip
from ..db.change_log import ChangeLog
class ChangeLogDialog(QDialog):
"""Dialog to view changelog"""
def __init__(self, browser):
super().__init__(parent=browser)
self.browser = browser
self.changelog = ChangeLog()
self.display_limit = 500
self._setup_ui()
def _setup_ui(self):
self.setWindowTitle("View Log")
self.setMinimumWidth(600)
self.setMinimumHeight(400)
vbox = QVBoxLayout()
vbox.addLayout(self._ui_top_row())
vbox.addWidget(self._ui_log())
vbox.addLayout(self._ui_bottom_row())
self.setLayout(vbox)
self.fillLog()
def _ui_top_row(self):
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Last {} updates".format(self.display_limit)))
return hbox
def _ui_log(self):
self.log = QPlainTextEdit()
self.log.setTabChangesFocus(False)
self.log.setReadOnly(True)
font = QFontDatabase.systemFont(QFontDatabase.FixedFont)
font.setPointSize(self.log.font().pointSize() - 2)
self.log.setFont(font)
return self.log
def _ui_bottom_row(self):
hbox = QHBoxLayout()
buttons = QDialogButtonBox(Qt.Horizontal, self)
# Button to export changelog to a CSV file
export_btn = buttons.addButton("&Export full history",
QDialogButtonBox.ActionRole)
export_btn.setToolTip("Export full history to CSV")
export_btn.clicked.connect(lambda _: self.onExport())
# Button to close this dialog
close_btn = buttons.addButton("&Close",
QDialogButtonBox.RejectRole)
close_btn.clicked.connect(self.close)
hbox.addWidget(buttons)
return hbox
def fillLog(self):
append_to_log = self.log.appendPlainText
self.has_records = False
for rec in reversed(self.changelog.db.all("""
select op, ts, nid, fld, old, new from changelog
order by ts desc
limit {}
""".format(self.display_limit))):
self.has_records = True
op, ts, nid, fld, old, new = rec
dt = datetime.datetime.utcfromtimestamp(ts / 1000)
ts_formatted = dt.strftime("%Y-%m-%dT%H:%M:%S")
append_to_log("""{} [{}] Change {} of nid {}:\n{}\n=>\n{}\n""".format(
ts_formatted, op, fld, nid, old, new))
# Ensure QPlainTextEdit refreshes (not clear why this is necessary)
self.log.repaint()
def onExport(self):
append_to_log = self.log.appendPlainText
if not self.has_records:
tooltip("Log is empty")
return
try:
ext = ".csv"
default_path = QStandardPaths.writableLocation(QStandardPaths.DocumentsLocation)
path = os.path.join(default_path, f"changes{ext}")
options = QFileDialog.Options()
# native doesn't seem to works
options |= QFileDialog.DontUseNativeDialog
# we'll confirm ourselves
options |= QFileDialog.DontConfirmOverwrite
result = QFileDialog.getSaveFileName(
self, "Save CSV", path, f"CSV (*{ext})",
options=options)
if not isinstance(result, tuple):
raise Exception("Expected a tuple from save dialog")
file = result[0]
if file:
do_save = True
if not file.lower().endswith(ext):
file += ext
if os.path.exists(file):
if not askUser("{} already exists. Are you sure you want to overwrite it?".format(file),
parent=self):
do_save = False
if do_save:
append_to_log("Saving to {}".format(file))
with open(file, "w", encoding="utf-8") as outf:
field_names = ["ts", "op", "nid", "fld", "old", "new"]
writer = csv.DictWriter(outf, fieldnames=field_names)
writer.writeheader()
for rec in self.changelog.db.all("""
select op, ts, nid, fld, old, new from changelog
order by ts asc
"""):
op, ts, nid, fld, old, new = rec
writer.writerow({
"op": op,
"ts": ts,
"nid": nid,
"fld": fld,
"old": old,
"new": new
})
append_to_log("Done")
except Exception:
append_to_log("Failed while writing CSV:\n{}".format(traceback.format_exc()))
|
[
"matthew.terence.hayes@gmail.com"
] |
matthew.terence.hayes@gmail.com
|
5dd3020194be4c171728f463e6545624855f4e17
|
0ce2222dea1171a118fa248eb4c5e5f0f6c653a3
|
/tests/main_test.py
|
cb849772a8efd556c8df21c09b3cbf5d4deb0f8b
|
[] |
no_license
|
ijandric97/pawtrails-api
|
b7e801edee740f1ba8e78d94c9bee147f09074a1
|
04f821ba92642d36eed7131388e52a8a7e17134c
|
refs/heads/master
| 2023-08-05T04:49:46.486757
| 2021-08-20T13:17:12
| 2021-08-20T13:17:12
| 365,889,839
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 210
|
py
|
from fastapi.testclient import TestClient
def test_healthcheck(client: TestClient) -> None:
response = client.get("/healthcheck")
assert response.status_code == 200
assert response.json() == "OK"
|
[
"ijandric97@gmail.com"
] |
ijandric97@gmail.com
|
8ba3cfe7aac4b2be3b6c7f45f3b277c2dfa3d565
|
4de1ebd9870e264128a9bbdce1f25af5256de543
|
/Functions2.py
|
453854b2456236d4ccdd5d8020a59e86f3b7aeb2
|
[] |
no_license
|
lhenslee/superscreener
|
80d2bbfcf750f3f7b0b42d2b21b223f1ba680dbc
|
b92374d8b392be2bca36191f36505234a2949492
|
refs/heads/master
| 2021-01-23T20:38:33.655630
| 2017-09-11T20:59:26
| 2017-09-11T20:59:26
| 102,870,037
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,430
|
py
|
from alpha_vantage.timeseries import TimeSeries
from bs4 import BeautifulSoup as bs
import datetime as dt
from decimal import *
import numpy as np
import os
import pandas as pd
import pandas_datareader as pdr
from pandas_datareader._utils import RemoteDataError
import pickle
import threading
from urllib.request import urlopen as uReq
from queue import Queue
my_api = 'SJVPVKHFF14WYJ5Q'
class C:
screen_folder = ''
pickle_file = ''
csv_folder = ''
screener_csv = ''
analysis_file = ''
# Used in setting up the list of tickers
def set_recent_screener(a,b):
C.screen_folder = a
C.pickle_file = b
def new_ticker_list(my_url):
count = 1
tickers = []
uClient = uReq(my_url)
page_html = uClient.read()
uClient.close()
soup = bs(page_html,'html.parser')
screen_name = soup.title.text.replace('Stock Screener - Overview','').title().strip()
list_name = screen_name.replace(" ","")+'.pickle'
C.screen_folder = 'Recent Screens/'+screen_name+'/'
C.pickle_file = C.screen_folder+list_name
if not os.path.exists(C.screen_folder):
os.makedirs(C.screen_folder)
page_classes = soup.findAll('a',{'class':'screener-pages'})
last_page_holder = len(page_classes)-1
last_page = page_classes[last_page_holder].text
print('You have '+str(last_page)+' pages to go through.')
for number in range(int(last_page)):
my_url = my_url+'&r='+str(count)
uClient = uReq(my_url)
page_html = uClient.read()
soup = bs(page_html,'html.parser')
count+=20
ticker_holders = soup.findAll('a',{'class':'screener-link-primary'})
for ticker_holder in ticker_holders:
ticker = ticker_holder.text
tickers.append(ticker)
with open(C.pickle_file,'wb') as f:
pickle.dump(tickers,f)
print(tickers)
# Used in gathering data for each ticker in the list
class V:
Date = []
Open = []
High = []
Low = []
Close = []
Vol = []
def pct_oc(i):
return str('%.2f' %(100*(V.Close[i]-V.Open[i])/V.Open[i]))+'%'
def pct_gap(i):
if i<1:
return 'nan'
else:
return str('%.2f' %(100*(V.Open[i]-V.Close[i-1])/V.Close[i-1]))+'%'
def pct_cc(i):
if i<1:
return 'nan'
else:
return str('%.2f' %(100*(V.Close[i]-V.Close[i-1])/V.Close[i-1]))+'%'
def pct_2(i):
if i<2:
return 'nan'
else:
return str('%.2f' %(100*(V.Close[i]-V.Close[i-2])/V.Close[i-2]))+'%'
def pct_3(i):
if i<3:
return 'nan'
else:
return str('%.2f' %(100*(V.Close[i]-V.Close[i-3])/V.Close[i-3]))+'%'
def make_percents(csv_file):
f = open(C.csv_folder+csv_file,'w')
headers = 'Date,Open,High,Low,Close,Volume,O/C%,Gap%,C/C%,2Days,3Days+\n'
f.write(headers)
for i in range(len(V.Date)):
line = (str(V.Date[i])+','+str(V.Open[i])+','+str(V.High[i])+','+str(V.Low[i])+','+str(V.Close[i])+','+
str(V.Vol[i])+','+pct_oc(i)+','+pct_gap(i)+','+pct_cc(i)+','+pct_2(i)+','+pct_3(i)+'\n')
f.write(line)
f.close()
def get_data(start_year,start_month,start_day,end_year,end_month,end_day):
with open(C.pickle_file,'rb') as f:
tickers = pickle.load(f)
start_string = str(start_year)+'-'+str(start_month)+'-'+str(start_day)
end_string = str(end_year)+'-'+str(end_month)+'-'+str(end_day)
start = dt.datetime(start_year,start_month,start_day)
end = dt.datetime(end_year,end_month,end_day)
C.csv_folder = 'stock_dfs/'+start_string+' to '+end_string+'/'
if not os.path.exists(C.csv_folder):
os.makedirs(C.csv_folder)
for ticker in tickers:
csv_file = ticker+'.csv'
if not os.path.exists(C.csv_folder+csv_file):
try:
df = pdr.DataReader(ticker,'google',start,end).reset_index()
V.Date = df['Date']
V.Open = df['Open']
V.High = df['High']
V.Low = df['Low']
V.Close = df['Close']
V.Vol = df['Volume']
make_percents(csv_file)
except RemoteDataError:
print('Unable to read '+ticker)
tickers.remove(ticker)
with open(C.pickle_file,'wb') as f:
pickle.dump(tickers,f)
''' This screener takes screen criteria that the user inputs
into the GUI.'''
class S:
Date,Open,High,Low,Close,Vol,oc,gap,cc,cc2,cc3 = ([] for i in range(11))
(d1min,d1max,d2min,d2max,d3min,d3max,
gmin,gmax,pmin,pmax,vmin,vmax) = ('' for i in range(12))
oo2ar,oc2ar,ocar,ccar,cc2ar,gapar =([] for i in range(6))
def set_entries(d1min,d1max,d2min,d2max,d3min,d3max,gmin,gmax,pmin,pmax,vmin,vmax):
S.d1min = d1min
S.d1max = d1max
S.d2min = d2min
S.d2max = d2max
S.d3min = d3min
S.d3max = d3max
S.gmin = gmin
S.gmax = gmax
S.pmin = pmin
S.pmax = pmax
S.vmin = vmin
S.vmax = vmax
def big_nan(var):
pos = str(1000000000000)
return Decimal(var.replace('%','').replace('nan',pos))
def small_nan(var):
neg = str(-1000000000000)
return Decimal(var.replace('%','').replace('nan',neg))
def makeD(var):
return Decimal(var.replace('%',''))
def comb_screens(f,ticker):
for i in range(1,len(S.Date)-2):
if (small_nan(S.cc[i])>S.d1min and big_nan(S.cc[i])<S.d1max and
small_nan(S.cc2[i])>S.d2min and big_nan(S.cc2[i])<S.d2max and
small_nan(S.cc3[i])>S.d3min and big_nan(S.cc3[i])<S.d2max and
small_nan(S.gap[i+1])>S.gmin and big_nan(S.gap[i+1])<S.gmax and
small_nan(S.Open[i])>S.pmin and big_nan(S.Open[i])<S.pmax and
small_nan(S.Vol[i])>S.vmin and big_nan(S.Vol[i])<S.pmax):
oc2 = str('%.2f' %(100*(makeD(S.Close[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
oo2 = str('%.2f' %(100*(makeD(S.Open[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
S.oo2ar.append(oo2)
S.oc2ar.append(oc2)
S.ocar.append(S.oc[i+1])
S.ccar.append(S.cc[i+1])
S.cc2ar.append(S.cc[i+2])
S.gapar.append(S.gap[i+1])
line = (ticker+','+S.Date[i]+','+S.Open[i]+','+S.High[i]+','+S.Low[i]+','+
S.Close[i]+','+S.Vol[i]+','+S.oc[i]+','+S.gap[i]+','+S.cc[i]+','+S.cc2[i]+
','+S.cc3[i]+',,'+S.Open[i+1]+','+S.High[i+1]+','+S.Low[i+1]+','+S.Close[i+1]+
','+S.Vol[i+1]+',,'+S.oc[i+1]+','+oo2+','+oc2+','+S.cc[i+1]+','+
S.cc[i+2]+','+S.gap[i+1]+'\n')
f.write(line)
def firstred(f,ticker):
for i in range(1,len(S.Date)-2):
if (small_nan(S.cc[i])>S.d1min and big_nan(S.cc[i])<S.d1max and
small_nan(S.cc2[i])>S.d2min and big_nan(S.cc2[i])<S.d2max and
small_nan(S.cc3[i])>S.d3min and big_nan(S.cc3[i])<S.d2max and
small_nan(S.gap[i+1])>S.gmin and big_nan(S.gap[i+1])<S.gmax and
small_nan(S.Open[i])>S.pmin and big_nan(S.Open[i])<S.pmax and
small_nan(S.Vol[i])>S.vmin and big_nan(S.Vol[i])<S.pmax and
small_nan(S.cc[i-1])>0):
oc2 = str('%.2f' %(100*(makeD(S.Close[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
oo2 = str('%.2f' %(100*(makeD(S.Open[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
S.oo2ar.append(oo2)
S.oc2ar.append(oc2)
S.ocar.append(S.oc[i+1])
S.ccar.append(S.cc[i+1])
S.cc2ar.append(S.cc[i+2])
S.gapar.append(S.gap[i+1])
line = (ticker+','+S.Date[i]+','+S.Open[i]+','+S.High[i]+','+S.Low[i]+','+
S.Close[i]+','+S.Vol[i]+','+S.oc[i]+','+S.gap[i]+','+S.cc[i]+','+S.cc2[i]+
','+S.cc3[i]+',,'+S.Open[i+1]+','+S.High[i+1]+','+S.Low[i+1]+','+S.Close[i+1]+
','+S.Vol[i+1]+',,'+S.oc[i+1]+','+oo2+','+oc2+','+S.cc[i+1]+','+
S.cc[i+2]+','+S.gap[i+1]+'\n')
f.write(line)
def firstgreen(f,ticker):
for i in range(1,len(S.Date)-2):
if (small_nan(S.cc[i])>S.d1min and big_nan(S.cc[i])<S.d1max and
small_nan(S.cc2[i])>S.d2min and big_nan(S.cc2[i])<S.d2max and
small_nan(S.cc3[i])>S.d3min and big_nan(S.cc3[i])<S.d2max and
small_nan(S.gap[i+1])>S.gmin and big_nan(S.gap[i+1])<S.gmax and
small_nan(S.Open[i])>S.pmin and big_nan(S.Open[i])<S.pmax and
small_nan(S.Vol[i])>S.vmin and big_nan(S.Vol[i])<S.pmax and
big_nan(S.cc[i-1])<0):
oc2 = str('%.2f' %(100*(makeD(S.Close[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
oo2 = str('%.2f' %(100*(makeD(S.Open[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
S.oo2ar.append(oo2)
S.oc2ar.append(oc2)
S.ocar.append(S.oc[i+1])
S.ccar.append(S.cc[i+1])
S.cc2ar.append(S.cc[i+2])
S.gapar.append(S.gap[i+1])
line = (ticker+','+S.Date[i]+','+S.Open[i]+','+S.High[i]+','+S.Low[i]+','+
S.Close[i]+','+S.Vol[i]+','+S.oc[i]+','+S.gap[i]+','+S.cc[i]+','+S.cc2[i]+
','+S.cc3[i]+',,'+S.Open[i+1]+','+S.High[i+1]+','+S.Low[i+1]+','+S.Close[i+1]+
','+S.Vol[i+1]+',,'+S.oc[i+1]+','+oo2+','+oc2+','+S.cc[i+1]+','+
S.cc[i+2]+','+S.gap[i+1]+'\n')
f.write(line)
def analysis(col):
total = len(col)
win_count = 0
sums = 0
sumdown = 0
sumup = 0
for i in range(total):
sums+=makeD(col[i])
if makeD(col[i])<0:
sumdown+=makeD(col[i])
if makeD(col[i])>0:
win_count+=1
sumup+=makeD(col[i])
win_percent = str('%.2f' %(win_count/total*100))+'%'
avgdown = str('%.2f' %(sumdown/total))+'%'
avgup = str('%.2f' %(sumup/total))+'%'
avg = str('%.2f' %(sums/total))
return (win_percent, avgup, str(sumup)+'%', avgdown, str(sumdown)+'%', avg, str(sums)+
'%', (','.join([win_percent, avgup, str(sumup)+'%', avgdown, str(sumdown)+'%', avg, str(sums)+
'%\n'])))
def screener_analysis():
C.analysis_file = C.screen_folder+'analysis.csv'
f = open(C.analysis_file,'w')
file = ('Strats,Win%,Avg Up,Sum Up,Avg Down,Sum Down,Avg,Sum,'+str(len(S.ocar))+' Passed'+'\n'+
'O/C,'+analysis(S.ocar)[7]+'O/O2,'+analysis(S.oo2ar)[7]+'O/C2,'+analysis(S.oc2ar)[7]+'C/C,'+analysis(S.ccar)[7]+
'C/C2,'+analysis(S.cc2ar)[7]+'Gap,'+analysis(S.gapar)[7])
f.write(file)
f.close()
def screener(gr,rg):
S.oo2ar,S.oc2ar,S.ocar,S.ccar,S.cc2ar,S.gapar =([] for i in range(6))
C.screener_csv = C.screen_folder+'screener.csv'
with open(C.pickle_file,'rb') as f:
tickers = pickle.load(f)
f = open(C.screener_csv,'w')
headers = ('Ticker,Date,Open,High,Low,Close,Volume,O/C,Gap,C/C,2Days,3Days,Day2'+
',Open,High,Low,Close,Volume,Srats,O1/C1,O/O2,O1/C2,C/C1,C1/C2,Gap\n')
f.write(headers)
for ticker in tickers:
try:
(S.Date,S.Open,S.High,S.Low,S.Close,S.Vol,S.oc,
S.gap,S.cc,S.cc2,S.cc3) = np.loadtxt(C.csv_folder+ticker+'.csv',
delimiter=',',
unpack=True,
dtype='str')
if gr==1 and rg==0:
firstred(f,ticker)
if gr==0 and rg==1:
firstgreen(f,ticker)
if gr==1 and rg==1:
pass
if gr==0 and rg==0:
comb_screens(f,ticker)
except InvalidOperation:
print(ticker+' was banned for being empty')
tickers.remove(ticker)
f.close()
screener_analysis()
with open(C.pickle_file,'wb') as f:
pickle.dump(tickers,f)
return C.analysis_file
'''
This is the screener of every combination of screeners. A screen will be ran
for each input for the screener function. The user can identify stocks needed
to pass the screen, winning percent, and the average needed for the screen
strategy to appear. The user close on the first, second and third day. User may
specify the previous day as a positive or negative. User can decide whether or
not gap ups are involved in checking the strategies as well.
'''
class ms:
stocks_passed = 15
win_needed = 80
loss_needed = 20
avg_needed = 5
numbers = [0,5,10,15,20]
line_1day = []
tickers = []
volume = 10000
def set_ms_crit(passed,win,loss,avg):
ms.line_1day = []
ms.stocks_passed = passed
ms.win_needed = win
ms.loss_needed = loss
ms.avg_needed = avg
def single_day_screen(d1min):
(S.oo2ar,S.oc2ar,S.ocar,S.ccar,S.cc2ar,S.gapar) =([] for i in range(6))
for ticker in ms.tickers:
try:
(S.Date,S.Open,S.High,S.Low,S.Close,S.Vol,S.oc,
S.gap,S.cc,S.cc2,S.cc3) = np.loadtxt(C.csv_folder+ticker+'.csv',
delimiter=',',
unpack=True,
dtype='str')
for i in range(1,len(S.Date)-2):
if small_nan(S.cc[i])>d1min:
oc2 = str('%.2f' %(100*(makeD(S.Close[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
oo2 = str('%.2f' %(100*(makeD(S.Open[i+2])-makeD(S.Open[i+1]))/makeD(S.Open[i+1])))+'%'
S.oo2ar.append(oo2)
S.oc2ar.append(oc2)
S.ocar.append(S.oc[i+1])
S.ccar.append(S.cc[i+1])
S.cc2ar.append(S.cc[i+2])
S.gapar.append(S.gap[i+1])
strats = ['O/O2','O/C2','O/C','C/C1','C/C2','Gap']
count = 0
for x in [S.oo2ar,S.oc2ar,S.ocar,S.ccar,S.cc2ar,S.gapar]:
temp = analysis(x)
if (makeD(temp[0])>ms.win_needed and makeD(temp[5])>ms.avg_needed and len(S.ocar)>ms.stocks_passed or
makeD(temp[0])<ms.loss_needed and makeD(temp[5])<-ms.avg_needed and len(S.ocar)>ms.stocks_passed):
ms.line_1day.append(','.join([strats[count], str(d1min)+'%', temp[0], temp[5], temp[6], str(len(S.ocar))]))
count+=1
except (InvalidOperation, FileNotFoundError):
print(ticker+' was banned for being empty')
ms.tickers.remove(ticker)
def print_strats():
with open(C.pickle_file,'rb') as f:
ms.tickers = pickle.load(f)
C.best_screens_file = C.screen_folder+'bestscreens.csv'
f = open(C.best_screens_file,'w')
f.write('Strat,1Day min,Win%,Avg,Sum,Passed\n')
for d1min in ms.numbers:
single_day_screen(d1min)
file = '\n'.join(ms.line_1day)
f.write(file)
f.close()
with open(C.pickle_file,'wb') as f:
pickle.dump(ms.tickers,f)
return C.best_screens_file
|
[
"ilanetheboss@debianlane.lane.com"
] |
ilanetheboss@debianlane.lane.com
|
8e9117700b1d68aac821093d352d3191e6d3d04b
|
77296ca8e8e7d52fdf5b7a51edfd81b4d2a6411e
|
/{{cookiecutter.project_slug}}/tests/params.py
|
28a95129ea1c626d96bc858e9f7aefc608dc0884
|
[] |
no_license
|
ann-cooper/cookiecutter-estimator
|
686bd73805e8305adda4a04024a3460583c61c42
|
f68bdf6dc041128b071b9ff85e1405746567b553
|
refs/heads/main
| 2023-03-24T01:38:47.668365
| 2021-03-23T11:12:24
| 2021-03-23T11:12:24
| 349,731,710
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,008
|
py
|
from collections import Counter
rules_params = [
("1. 1, 1 returns 3", 1, 1, 3, False),
("2. 0, 1 returns 3", 0, 1, 3, False),
("3. 1,0 returns 1", 1, 0, 1, False),
("4. 3, 0 returns 3", 3, 0, 1, False),
("5. 0, 2 returns 3", 0, 2, 3, False),
("6. 3, 2 returns 5", 3, 2, 5, False),
("7. 3, 1 returns 5", 3, 1, 3, False),
("8. 3, 4 returns too_many_points", 3, 4, False, True),
("9. 0, 2 returns 3", 0, 2, 3, False),
("10. 4, 0 returns 3", 4, 0, 3, False),
("11. 6, 0 returns 5", 6, 0, 5, False),
("12. 0, 3 returns 5", 0, 3, 5, False),
]
rules_ids = [x[0] for x in rules_params]
rules_invalid = [
("1. 0,0 raises exception", 0, 0, False),
("2. None input raises exception", None, None, False),
("3. Negative numbers raises exception", -1, -2, False),
("4. Non-integer input raises exception", "a", "b", False),
]
invalid_ids = [x[0] for x in rules_invalid]
profile_params = [
(
"1. Valid options and work factors",
["type1"],
{
"simple": 2,
"complex": 1,
"counts": Counter({"simple": 2, "A": 1, "B": 1, "complex": 1, "E": 1}),
},
)
]
profile_ids = [x[0] for x in profile_params]
estimate_points = [
(
"1. Valid profile input succeeds",
(2, 1, Counter({"simple": 2, "A": 1, "B": 1, "complex": 1, "E": 1})),
3,
{"E", "A", "B"},
),
(
"2. Invalid options and work factors",
(
4,
4,
Counter(
{
"simple": 4,
"A": 1,
"B": 1,
"C": 1,
"H": 1,
"complex": 4,
"E": 1,
"D": 1,
"F": 1,
"G": 1,
}
),
),
False,
{"A", "B", "C", "H", "E", "D", "F", "G"},
),
]
estimate_points_ids = [x[0] for x in estimate_points]
|
[
"cooperannc@gmail.com"
] |
cooperannc@gmail.com
|
013654ac9a924e69f4700208a6c896d4f852b00f
|
3afab94a839e6a3b3c393adc489d49d3f95e325f
|
/core/api/viewsets.py
|
ff0065ac63d86c96d4b140574d35b45b14660695
|
[] |
no_license
|
gustavolio/DjangoREST-APIPontosTuristicos
|
ec9b88560bd4b619cdb0ae7e53b11a5c97e0aa36
|
f1bae32a5979eefea14c4ee02dc647fa8041aa37
|
refs/heads/master
| 2023-01-23T10:49:25.542990
| 2020-12-03T20:02:44
| 2020-12-03T20:02:44
| 284,767,780
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,969
|
py
|
from rest_framework import viewsets, filters
from .serializers import PontoTuristicoSerializer
from core.models import PontoTuristico
from rest_framework.permissions import IsAuthenticated
# from rest_framework.permissions import IsAdminUser
from rest_framework.authentication import TokenAuthentication
class PontoTuristicoViewSet(viewsets.ModelViewSet):
serializer_class = PontoTuristicoSerializer
filter_backends = [filters.SearchFilter]
permission_classes = [IsAuthenticated]
authentication_classes = [TokenAuthentication]
search_fields = ['nome', 'descricao', 'endereco__linha1']
# lookup_field = 'nome'
def get_queryset(self):
id = self.request.query_params.get('id', None)
nome = self.request.query_params.get('nome', None)
descricao = self.request.query_params.get('descricao', None)
query_set = PontoTuristico.objects.all()
if id:
query_set = query_set.filter(id=id)
if nome:
query_set = query_set.filter(nome__iexact=nome)
if descricao:
query_set = query_set.filter(descricao__iexact=descricao)
return query_set
def list(self, request, *args, **kwargs):
return super(PontoTuristicoViewSet, self).list(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
return super(PontoTuristicoViewSet, self).create(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return super(PontoTuristicoViewSet, self). destroy(request, *args, **kwargs)
def retrieve(self, request, *args, **kwargs):
return super(PontoTuristicoViewSet, self).retrieve(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super(PontoTuristicoViewSet, self).update(request, *args, **kwargs)
def partial_update(self, *args, **kwargs):
return super(PontoTuristico, self).partial_update(request, *args, **kwargs)
|
[
"g.cdcomp@gmail.com"
] |
g.cdcomp@gmail.com
|
7a557dc7b524ac47a104f6c74e8d7e1557c61e19
|
93b6ec0a0b7d350e42a96f662bd8c19038915373
|
/scripts/gen_release.py
|
dc62bf7ab977f1199b48f0e2e110064bddfc86ea
|
[
"Apache-2.0"
] |
permissive
|
thought-machine/please-servers
|
10c2fc20207249d18c116f2baa5d1c76a5a95c5f
|
d31c2a2ade0b80aac9067b00a43d7d0a073489d7
|
refs/heads/master
| 2023-08-22T13:58:36.814065
| 2023-08-15T14:43:24
| 2023-08-15T14:43:24
| 232,153,786
| 7
| 12
|
Apache-2.0
| 2023-08-24T08:39:55
| 2020-01-06T17:47:53
|
Go
|
UTF-8
|
Python
| false
| false
| 3,661
|
py
|
#!/usr/bin/env python3
"""Script to create Github releases."""
import hashlib
import json
import logging
import os
import subprocess
import sys
import zipfile
from third_party.python import colorlog, requests
from third_party.python.absl import app, flags
logging.root.handlers[0].setFormatter(colorlog.ColoredFormatter('%(log_color)s%(levelname)s: %(message)s'))
flags.DEFINE_string('github_token', os.environ.get('GITHUB_TOKEN'), 'Github API token')
flags.DEFINE_bool('dry_run', False, "Don't actually do the release, just print it.")
flags.mark_flag_as_required('github_token')
FLAGS = flags.FLAGS
class ReleaseGen:
def __init__(self, github_token:str, dry_run:bool=False):
self.url = 'https://api.github.com'
self.releases_url = self.url + '/repos/thought-machine/please-servers/releases'
self.upload_url = self.releases_url.replace('api.', 'uploads.') + '/<id>/assets?name='
self.session = requests.Session()
self.session.verify = '/etc/ssl/certs/ca-certificates.crt'
if not dry_run:
self.session.headers.update({
'Accept': 'application/vnd.github.v3+json',
'Authorization': 'token ' + github_token,
})
self.version = self.read_file('VERSION').strip()
self.version_name = 'Version ' + self.version
def needs_release(self):
"""Returns true if the current version is not yet released to Github."""
url = self.releases_url + '/tags/v' + self.version
logging.info('Checking %s for release...', url)
response = self.session.get(url)
return response.status_code == 404
def release(self):
"""Submits a new release to Github."""
data = {
'tag_name': 'v' + self.version,
'target_commitish': os.environ.get('CIRCLE_SHA1'),
'name': 'v' + self.version,
'body': '',
'prerelease': False,
'draft': False,
}
if FLAGS.dry_run:
logging.info('Would post the following to Github: %s', json.dumps(data, indent=4))
return
logging.info('Creating release: %s', json.dumps(data, indent=4))
response = self.session.post(self.releases_url, json=data)
response.raise_for_status()
data = response.json()
self.upload_url = data['upload_url'].replace('{?name,label}', '?name=')
logging.info('Release id %s created', data['id'])
def upload(self, artifact:str):
"""Uploads the given artifact to the new release."""
filename = os.path.basename(artifact)
content_type = 'application/octet-stream'
url = self.upload_url + filename
if FLAGS.dry_run:
logging.info('Would upload %s to %s as %s', filename, url, content_type)
return
logging.info('Uploading %s to %s as %s', filename, url, content_type)
with open(artifact, 'rb') as f:
self.session.headers.update({'Content-Type': content_type})
response = self.session.post(url, data=f)
response.raise_for_status()
print('%s uploaded' % filename)
def read_file(self, filename):
"""Read a file from the .pex."""
with zipfile.ZipFile(sys.argv[0]) as zf:
return zf.read(filename).decode('utf-8')
def main(argv):
r = ReleaseGen(FLAGS.github_token, dry_run=FLAGS.dry_run)
if not r.needs_release():
logging.info('Current version has already been released, nothing to be done!')
return
r.release()
for artifact in argv[1:]:
r.upload(artifact)
if __name__ == '__main__':
app.run(main)
|
[
"peter.ebden@gmail.com"
] |
peter.ebden@gmail.com
|
2c876b1f1e1c38c15823f76a07a89de077f6621b
|
b6ba70e8535ccd4df1d14dc7c07a093642e281a0
|
/examples/function_and_class/knn.py
|
f7a495bf0ea5c9b8a18579b30927ac40d3e3b7a3
|
[
"MIT"
] |
permissive
|
Obarads/torchpcp
|
99b233c086617697dfc7e5f0c04ae16aff247daf
|
86e19cc5c1196f22f609f2d98504b913272cbba8
|
refs/heads/master
| 2023-07-05T20:44:20.485218
| 2021-08-16T01:53:47
| 2021-08-16T01:53:47
| 293,244,333
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 51,451
|
py
|
import numpy as np
import torch
from torch.utils.data import DataLoader
# local package
from libs import tpcpath
from libs.dataset import SimpleSceneDataset
from libs.three_nn import three_nn # PointRCNN
# torch-points-kernels
import torch_points_kernels as tpk
# torchpcp pacakage
from torchpcp.modules.functional.other import index2points
from torchpcp.modules.functional.sampling import furthest_point_sampling
from torchpcp.modules.functional import nns
from torchpcp.utils.monitor import timecheck
from torchpcp.utils import pytorch_tools
# pytorch_tools.set_seed(0)
device = pytorch_tools.select_device("cuda")
def speed_test(method, loader):
for i, data in enumerate(loader): pass # for speed processing
# print name
if method == 0:
t_name = "original c++ impl. time"
elif method == 1:
t_name = "original py impl. time"
elif method == 2:
t_name = "other c++ impl. time"
elif method == 3:
t_name = "tpk impl. time"
else:
raise NotImplementedError()
# timer start
t = timecheck()
for _ in range(100):
for i, data in enumerate(loader):
point_clouds, sem_labels, ins_labels = data
point_clouds = torch.transpose(point_clouds[:, :, :3].to(device), 1, 2)
center_idxs = furthest_point_sampling(point_clouds, 1024)
center_pc = index2points(point_clouds, center_idxs)
if method == 0:
pred, _ = nns.k_nearest_neighbors(center_pc, point_clouds, k=3)
elif method == 1:
pred, _ = nns.py_k_nearest_neighbors(center_pc, point_clouds, k=3, memory_saving=False)
elif method == 2:
pred = three_nn(center_pc.transpose(1,2).contiguous(), point_clouds.transpose(1,2).contiguous())
elif method == 3:
pred, _ = tpk.knn(point_clouds.transpose(1,2).contiguous(), center_pc.transpose(1,2).contiguous(), 3)
else:
raise NotImplementedError()
# timer end
timecheck(t, t_name)
gt, _ = nns.py_k_nearest_neighbors(center_pc, point_clouds, k=3, memory_saving=False)
print(False in (pred == gt))
np.set_printoptions(threshold=np.inf)
torch.set_printoptions(threshold=np.inf)
# get dataset
dataset = SimpleSceneDataset()
points, sem_label, ins_label = dataset[0]
pc = torch.tensor([points[:, :3]], device="cuda").transpose(1,2)
# compare knn
k = 20
idx, dist = nns.k_nearest_neighbors(pc, pc, k)
idx2, dist2 = nns.py_k_nearest_neighbors(pc, pc, k)
check_idx = idx == idx2
if True:
for ib in range(len(check_idx)):
b_idxs = check_idx[ib]
for i_n in range(len(b_idxs)):
n_idxs = b_idxs[i_n]
if False in n_idxs:
for i_p in range(len(n_idxs)):
k_idxs = n_idxs[i_p]
if False == k_idxs:
print("pyknn ib {}, in {}, ip {} dist {} idx {}".format(ib, i_n, i_p, dist[ib, i_n, i_p], idx[ib, i_n, i_p]))
# print("pybq and dist2 {}".format(dist[ib, i_n, i_p]))
print("dist {} idx {}".format(dist2[ib, i_n, i_p], idx2[ib, i_n, i_p]))
# else:
# print("pyknn ib {}, in {}, ip {} dist {} idx {}".format(ib, i_n, i_p, dist[ib, i_n, i_p], idx[ib, i_n, i_p]))
print("CHECK1:", False in (check_idx))
# ↑ result (a part)
"""
pyknn ib 0, in 3397, ip 5 dist 0.0005849996232427657 idx 2927
dist 0.0005850791931152344 idx 542
pyknn ib 0, in 3398, ip 10 dist 0.009334004484117031 idx 3892
dist 0.00933384895324707 idx 1082
pyknn ib 0, in 3398, ip 11 dist 0.009334005415439606 idx 1082
dist 0.00933384895324707 idx 3892
pyknn ib 0, in 3402, ip 6 dist 0.01157099287956953 idx 1313
dist 0.011570453643798828 idx 2092
pyknn ib 0, in 3402, ip 7 dist 0.01157099287956953 idx 2092
dist 0.011570453643798828 idx 1313
pyknn ib 0, in 3402, ip 9 dist 0.013051005080342293 idx 2169
dist 0.013050079345703125 idx 2962
pyknn ib 0, in 3402, ip 10 dist 0.013051005080342293 idx 2962
dist 0.013050079345703125 idx 2169
pyknn ib 0, in 3402, ip 14 dist 0.019140997901558876 idx 128
dist 0.019140243530273438 idx 1118
pyknn ib 0, in 3402, ip 15 dist 0.019140997901558876 idx 1118
dist 0.019140243530273438 idx 128
pyknn ib 0, in 3403, ip 2 dist 0.0013459994224831462 idx 3251
dist 0.001345992088317871 idx 3264
pyknn ib 0, in 3403, ip 3 dist 0.0013459994224831462 idx 3264
dist 0.001345992088317871 idx 3251
pyknn ib 0, in 3404, ip 13 dist 0.005812999792397022 idx 2701
dist 0.0058135986328125 idx 3754
pyknn ib 0, in 3404, ip 14 dist 0.005812999792397022 idx 3754
dist 0.0058135986328125 idx 2701
pyknn ib 0, in 3407, ip 4 dist 0.002997000701725483 idx 1947
dist 0.0029970109462738037 idx 3656
pyknn ib 0, in 3407, ip 5 dist 0.002997000701725483 idx 3656
dist 0.0029970109462738037 idx 1947
pyknn ib 0, in 3409, ip 0 dist 0.0 idx 1269
dist -2.384185791015625e-07 idx 3409
pyknn ib 0, in 3409, ip 1 dist 0.0 idx 3409
dist -2.384185791015625e-07 idx 1269
pyknn ib 0, in 3409, ip 17 dist 0.008043000474572182 idx 1042
dist 0.00804293155670166 idx 1482
pyknn ib 0, in 3409, ip 18 dist 0.008043000474572182 idx 1482
dist 0.00804293155670166 idx 1042
pyknn ib 0, in 3410, ip 11 dist 0.005334001034498215 idx 1024
dist 0.00533401221036911 idx 2834
pyknn ib 0, in 3410, ip 12 dist 0.005334001034498215 idx 2834
dist 0.00533401221036911 idx 1024
pyknn ib 0, in 3413, ip 0 dist 0.0 idx 152
dist 0.0 idx 3413
pyknn ib 0, in 3413, ip 1 dist 0.0 idx 3413
dist 0.0 idx 152
pyknn ib 0, in 3413, ip 14 dist 0.010057998821139336 idx 2104
dist 0.010058045387268066 idx 2583
pyknn ib 0, in 3413, ip 15 dist 0.010057998821139336 idx 2583
dist 0.010058045387268066 idx 2104
pyknn ib 0, in 3414, ip 9 dist 0.00410101655870676 idx 2055
dist 0.004100799560546875 idx 2936
pyknn ib 0, in 3414, ip 10 dist 0.00410101655870676 idx 2936
dist 0.004100799560546875 idx 2055
pyknn ib 0, in 3415, ip 1 dist 0.00021200145420152694 idx 1176
dist 0.0002117156982421875 idx 3866
pyknn ib 0, in 3415, ip 2 dist 0.00021200145420152694 idx 3866
dist 0.0002117156982421875 idx 1176
pyknn ib 0, in 3415, ip 11 dist 0.007836993783712387 idx 3519
dist 0.007837295532226562 idx 4066
pyknn ib 0, in 3415, ip 12 dist 0.007836993783712387 idx 4066
dist 0.007837295532226562 idx 3519
pyknn ib 0, in 3416, ip 4 dist 0.001897998503409326 idx 3325
dist 0.001898050308227539 idx 3729
pyknn ib 0, in 3416, ip 5 dist 0.001897998503409326 idx 3729
dist 0.001898050308227539 idx 3325
pyknn ib 0, in 3421, ip 7 dist 0.004901004955172539 idx 1018
dist 0.00490117073059082 idx 3982
pyknn ib 0, in 3421, ip 8 dist 0.004901004955172539 idx 3982
dist 0.00490117073059082 idx 1018
pyknn ib 0, in 3429, ip 3 dist 0.00023299954773392528 idx 1024
dist 0.00023300200700759888 idx 2834
pyknn ib 0, in 3429, ip 4 dist 0.00023299954773392528 idx 2834
dist 0.00023300200700759888 idx 1024
pyknn ib 0, in 3430, ip 13 dist 0.00894500408321619 idx 810
dist 0.008943557739257812 idx 3766
pyknn ib 0, in 3430, ip 14 dist 0.00894500408321619 idx 3766
dist 0.008943557739257812 idx 810
pyknn ib 0, in 3440, ip 4 dist 0.003592999652028084 idx 2143
dist 0.0035930201411247253 idx 4072
pyknn ib 0, in 3440, ip 5 dist 0.003592999652028084 idx 4072
dist 0.0035930201411247253 idx 2143
pyknn ib 0, in 3440, ip 11 dist 0.006083999294787645 idx 2987
dist 0.006083987653255463 idx 868
pyknn ib 0, in 3440, ip 12 dist 0.006084000691771507 idx 868
dist 0.00608399510383606 idx 2987
pyknn ib 0, in 3440, ip 18 dist 0.010029001161456108 idx 0
dist 0.010029010474681854 idx 1032
pyknn ib 0, in 3440, ip 19 dist 0.010029001161456108 idx 1032
dist 0.010029010474681854 idx 0
pyknn ib 0, in 3441, ip 16 dist 0.007686000782996416 idx 0
dist 0.007686004042625427 idx 1032
pyknn ib 0, in 3441, ip 17 dist 0.007686000782996416 idx 1032
dist 0.007686004042625427 idx 1850
pyknn ib 0, in 3441, ip 18 dist 0.007686000782996416 idx 1850
dist 0.007686004042625427 idx 0
pyknn ib 0, in 3442, ip 11 dist 0.004209999926388264 idx 1947
dist 0.004209995269775391 idx 3656
pyknn ib 0, in 3442, ip 12 dist 0.004209999926388264 idx 3656
dist 0.004209995269775391 idx 1947
pyknn ib 0, in 3445, ip 10 dist 0.004147999454289675 idx 488
dist 0.004148006439208984 idx 2434
pyknn ib 0, in 3445, ip 11 dist 0.004147999454289675 idx 2434
dist 0.004148006439208984 idx 488
pyknn ib 0, in 3456, ip 7 dist 0.0031460002064704895 idx 1024
dist 0.0031460076570510864 idx 2834
pyknn ib 0, in 3456, ip 8 dist 0.0031460002064704895 idx 2834
dist 0.0031460076570510864 idx 1024
pyknn ib 0, in 3456, ip 9 dist 0.004821000155061483 idx 363
dist 0.004821024835109711 idx 694
pyknn ib 0, in 3456, ip 10 dist 0.004821000155061483 idx 694
dist 0.004821024835109711 idx 363
pyknn ib 0, in 3456, ip 11 dist 0.004900998901575804 idx 2953
dist 0.004901014268398285 idx 3429
pyknn ib 0, in 3456, ip 12 dist 0.004900998901575804 idx 3429
dist 0.004901014268398285 idx 2953
pyknn ib 0, in 3461, ip 13 dist 0.013138998299837112 idx 1212
dist 0.013138949871063232 idx 3522
pyknn ib 0, in 3461, ip 14 dist 0.013138998299837112 idx 3522
dist 0.013138949871063232 idx 1212
pyknn ib 0, in 3462, ip 0 dist 0.0 idx 3462
dist 0.0 idx 3465
pyknn ib 0, in 3462, ip 1 dist 0.0 idx 3465
dist 0.0 idx 3462
pyknn ib 0, in 3465, ip 0 dist 0.0 idx 3462
dist 0.0 idx 3465
pyknn ib 0, in 3465, ip 1 dist 0.0 idx 3465
dist 0.0 idx 3462
pyknn ib 0, in 3470, ip 9 dist 0.0076049999333918095 idx 281
dist 0.007604971528053284 idx 2437
pyknn ib 0, in 3470, ip 10 dist 0.0076049999333918095 idx 2437
dist 0.007604971528053284 idx 281
pyknn ib 0, in 3472, ip 18 dist 0.014760998077690601 idx 158
dist 0.014760971069335938 idx 1404
pyknn ib 0, in 3472, ip 19 dist 0.014760998077690601 idx 1404
dist 0.014760971069335938 idx 158
pyknn ib 0, in 3474, ip 12 dist 0.010493999347090721 idx 666
dist 0.010493874549865723 idx 2543
pyknn ib 0, in 3474, ip 13 dist 0.010493999347090721 idx 2543
dist 0.010493874549865723 idx 666
pyknn ib 0, in 3474, ip 15 dist 0.011125998571515083 idx 1590
dist 0.011126160621643066 idx 2927
pyknn ib 0, in 3474, ip 16 dist 0.011125998571515083 idx 2927
dist 0.011126160621643066 idx 1590
pyknn ib 0, in 3480, ip 4 dist 0.012818001210689545 idx 1951
dist 0.012817949056625366 idx 2710
pyknn ib 0, in 3480, ip 5 dist 0.012818001210689545 idx 2710
dist 0.012817949056625366 idx 1951
pyknn ib 0, in 3481, ip 2 dist 0.000298000784823671 idx 128
dist 0.0002970695495605469 idx 1118
pyknn ib 0, in 3481, ip 3 dist 0.000298000784823671 idx 1118
dist 0.0002970695495605469 idx 128
pyknn ib 0, in 3481, ip 9 dist 0.004514000378549099 idx 1313
dist 0.004513740539550781 idx 2092
pyknn ib 0, in 3481, ip 10 dist 0.004514000378549099 idx 2092
dist 0.004513740539550781 idx 1313
pyknn ib 0, in 3488, ip 6 dist 0.005204994697123766 idx 1304
dist 0.005205094814300537 idx 2654
pyknn ib 0, in 3488, ip 7 dist 0.005204994697123766 idx 2654
dist 0.005205094814300537 idx 1304
pyknn ib 0, in 3488, ip 11 dist 0.00811500009149313 idx 2397
dist 0.008115053176879883 idx 4050
pyknn ib 0, in 3488, ip 12 dist 0.00811500009149313 idx 4050
dist 0.008115053176879883 idx 2397
pyknn ib 0, in 3491, ip 18 dist 0.008860995061695576 idx 1304
dist 0.008860945701599121 idx 2654
pyknn ib 0, in 3491, ip 19 dist 0.008860995061695576 idx 2654
dist 0.008860945701599121 idx 1304
pyknn ib 0, in 3495, ip 0 dist 0.0 idx 1425
dist 0.0 idx 3495
pyknn ib 0, in 3495, ip 1 dist 0.0 idx 3495
dist 0.0 idx 1425
pyknn ib 0, in 3500, ip 18 dist 0.013730003498494625 idx 1487
dist 0.01372992992401123 idx 3004
pyknn ib 0, in 3500, ip 19 dist 0.013730003498494625 idx 3004
dist 0.01372992992401123 idx 1487
pyknn ib 0, in 3501, ip 2 dist 0.0018849981715902686 idx 1225
dist 0.0018854141235351562 idx 1637
pyknn ib 0, in 3501, ip 3 dist 0.0018849981715902686 idx 1637
dist 0.0018854141235351562 idx 1225
pyknn ib 0, in 3504, ip 3 dist 0.0021460067946463823 idx 869
dist 0.0021452903747558594 idx 2454
pyknn ib 0, in 3504, ip 4 dist 0.0021460067946463823 idx 2454
dist 0.0021452903747558594 idx 869
pyknn ib 0, in 3507, ip 10 dist 0.006412987597286701 idx 1487
dist 0.0064127445220947266 idx 3004
pyknn ib 0, in 3507, ip 11 dist 0.006412987597286701 idx 3004
dist 0.0064127445220947266 idx 1487
pyknn ib 0, in 3508, ip 1 dist 0.001189997885376215 idx 1052
dist 0.0011899471282958984 idx 2093
pyknn ib 0, in 3508, ip 2 dist 0.001189997885376215 idx 2093
dist 0.0011899471282958984 idx 1052
pyknn ib 0, in 3509, ip 1 dist 0.0012090002419427037 idx 1304
dist 0.001208961009979248 idx 2654
pyknn ib 0, in 3509, ip 2 dist 0.0012090002419427037 idx 2654
dist 0.001208961009979248 idx 1304
pyknn ib 0, in 3511, ip 8 dist 0.002437000395730138 idx 2701
dist 0.002437591552734375 idx 3754
pyknn ib 0, in 3511, ip 9 dist 0.002437000395730138 idx 3754
dist 0.002437591552734375 idx 2701
pyknn ib 0, in 3512, ip 10 dist 0.006725003011524677 idx 937
dist 0.006725311279296875 idx 2228
pyknn ib 0, in 3512, ip 11 dist 0.006725003011524677 idx 2228
dist 0.006725311279296875 idx 937
pyknn ib 0, in 3512, ip 18 dist 0.014550035819411278 idx 1400
dist 0.014549732208251953 idx 3069
pyknn ib 0, in 3512, ip 19 dist 0.014550035819411278 idx 3069
dist 0.014549732208251953 idx 1400
pyknn ib 0, in 3519, ip 0 dist 0.0 idx 3519
dist 0.0 idx 4066
pyknn ib 0, in 3519, ip 1 dist 0.0 idx 4066
dist 0.0 idx 3519
pyknn ib 0, in 3519, ip 2 dist 0.0004419961478561163 idx 829
dist 0.00044155120849609375 idx 3173
pyknn ib 0, in 3519, ip 3 dist 0.0004419961478561163 idx 3173
dist 0.00044155120849609375 idx 829
pyknn ib 0, in 3520, ip 1 dist 0.00022599961084779352 idx 818
dist 0.00022605061531066895 idx 3318
pyknn ib 0, in 3520, ip 2 dist 0.00022599961084779352 idx 3318
dist 0.00022605061531066895 idx 818
pyknn ib 0, in 3524, ip 10 dist 0.006727982312440872 idx 810
dist 0.006728172302246094 idx 3766
pyknn ib 0, in 3524, ip 11 dist 0.006727982312440872 idx 3766
dist 0.006728172302246094 idx 810
pyknn ib 0, in 3525, ip 4 dist 0.001409997814334929 idx 1649
dist 0.0014100074768066406 idx 3471
pyknn ib 0, in 3525, ip 5 dist 0.001409997814334929 idx 3471
dist 0.0014100074768066406 idx 1649
pyknn ib 0, in 3526, ip 6 dist 0.00230899965390563 idx 1123
dist 0.0023088455200195312 idx 2937
pyknn ib 0, in 3526, ip 7 dist 0.00230899965390563 idx 2937
dist 0.0023088455200195312 idx 1123
pyknn ib 0, in 3527, ip 10 dist 0.009864001534879208 idx 1212
dist 0.009863987565040588 idx 3522
pyknn ib 0, in 3527, ip 11 dist 0.009864001534879208 idx 3522
dist 0.009863987565040588 idx 1212
pyknn ib 0, in 3528, ip 3 dist 0.0019439997849985957 idx 1947
dist 0.0019440054893493652 idx 3656
pyknn ib 0, in 3528, ip 4 dist 0.0019439997849985957 idx 3656
dist 0.0019440054893493652 idx 1947
pyknn ib 0, in 3530, ip 11 dist 0.004930997267365456 idx 1871
dist 0.0049304962158203125 idx 3541
pyknn ib 0, in 3530, ip 12 dist 0.004930997267365456 idx 3541
dist 0.0049304962158203125 idx 1871
pyknn ib 0, in 3535, ip 2 dist 0.00040199910290539265 idx 2176
dist 0.00040340423583984375 idx 2815
pyknn ib 0, in 3535, ip 3 dist 0.00040199910290539265 idx 2815
dist 0.00040340423583984375 idx 2176
pyknn ib 0, in 3541, ip 0 dist 0.0 idx 1871
dist -1.9073486328125e-06 idx 3541
pyknn ib 0, in 3541, ip 1 dist 0.0 idx 3541
dist -1.9073486328125e-06 idx 1871
pyknn ib 0, in 3546, ip 11 dist 0.005651996936649084 idx 2316
dist 0.005651950836181641 idx 2761
pyknn ib 0, in 3546, ip 12 dist 0.005651996936649084 idx 2761
dist 0.005651950836181641 idx 2316
pyknn ib 0, in 3553, ip 16 dist 0.003945999778807163 idx 2502
dist 0.003945887088775635 idx 3168
pyknn ib 0, in 3553, ip 17 dist 0.003945999778807163 idx 3168
dist 0.003945887088775635 idx 2502
pyknn ib 0, in 3560, ip 11 dist 0.00826500728726387 idx 2745
dist 0.008264780044555664 idx 3933
pyknn ib 0, in 3560, ip 12 dist 0.00826500728726387 idx 3933
dist 0.008264780044555664 idx 2745
pyknn ib 0, in 3567, ip 7 dist 0.004524988122284412 idx 1297
dist 0.00452423095703125 idx 2984
pyknn ib 0, in 3567, ip 8 dist 0.004524995107203722 idx 2984
dist 0.00452423095703125 idx 1297
pyknn ib 0, in 3575, ip 2 dist 0.0017249988159164786 idx 2854
dist 0.0017247200012207031 idx 3996
pyknn ib 0, in 3575, ip 3 dist 0.0017249988159164786 idx 3996
dist 0.0017247200012207031 idx 2854
pyknn ib 0, in 3579, ip 1 dist 0.00023399594647344202 idx 810
dist 0.000232696533203125 idx 3766
pyknn ib 0, in 3579, ip 2 dist 0.00023399594647344202 idx 3766
dist 0.000232696533203125 idx 810
pyknn ib 0, in 3579, ip 10 dist 0.008099023252725601 idx 1093
dist 0.008098602294921875 idx 2320
pyknn ib 0, in 3579, ip 11 dist 0.008099023252725601 idx 2320
dist 0.008098602294921875 idx 1093
pyknn ib 0, in 3581, ip 9 dist 0.006384999491274357 idx 2176
dist 0.00638580322265625 idx 2815
pyknn ib 0, in 3581, ip 10 dist 0.006384999491274357 idx 2815
dist 0.00638580322265625 idx 2176
pyknn ib 0, in 3582, ip 11 dist 0.006272999569773674 idx 1952
dist 0.00627291202545166 idx 2823
pyknn ib 0, in 3582, ip 12 dist 0.006272999569773674 idx 2823
dist 0.00627291202545166 idx 1952
pyknn ib 0, in 3583, ip 1 dist 0.0015939960721880198 idx 1313
dist 0.0015935897827148438 idx 2092
pyknn ib 0, in 3583, ip 2 dist 0.0015939960721880198 idx 2092
dist 0.0015935897827148438 idx 1313
pyknn ib 0, in 3594, ip 12 dist 0.006574000231921673 idx 849
dist 0.006572723388671875 idx 1128
pyknn ib 0, in 3594, ip 13 dist 0.006574000231921673 idx 1128
dist 0.006572723388671875 idx 849
pyknn ib 0, in 3598, ip 18 dist 0.008245003409683704 idx 193
dist 0.008244991302490234 idx 231
pyknn ib 0, in 3598, ip 19 dist 0.008245003409683704 idx 231
dist 0.008244991302490234 idx 193
pyknn ib 0, in 3600, ip 11 dist 0.0064890035428106785 idx 1059
dist 0.006489008665084839 idx 3232
pyknn ib 0, in 3600, ip 12 dist 0.0064890035428106785 idx 3232
dist 0.006489008665084839 idx 1059
pyknn ib 0, in 3610, ip 14 dist 0.007984996773302555 idx 1925
dist 0.00798499584197998 idx 2416
pyknn ib 0, in 3610, ip 15 dist 0.007984996773302555 idx 2416
dist 0.00798499584197998 idx 1925
pyknn ib 0, in 3612, ip 5 dist 0.0035329984966665506 idx 2316
dist 0.003532886505126953 idx 2761
pyknn ib 0, in 3612, ip 6 dist 0.0035329984966665506 idx 2761
dist 0.003532886505126953 idx 2316
pyknn ib 0, in 3621, ip 10 dist 0.006896963343024254 idx 937
dist 0.006896495819091797 idx 2228
pyknn ib 0, in 3621, ip 11 dist 0.006896963343024254 idx 2228
dist 0.006896495819091797 idx 937
pyknn ib 0, in 3627, ip 11 dist 0.008366001769900322 idx 767
dist 0.008365988731384277 idx 1375
pyknn ib 0, in 3627, ip 13 dist 0.008366001769900322 idx 1375
dist 0.008365988731384277 idx 767
pyknn ib 0, in 3629, ip 12 dist 0.0073130009695887566 idx 403
dist 0.007312774658203125 idx 2045
pyknn ib 0, in 3629, ip 13 dist 0.0073130009695887566 idx 2045
dist 0.007312774658203125 idx 403
pyknn ib 0, in 3631, ip 0 dist 0.0 idx 3631
dist 0.0 idx 4089
pyknn ib 0, in 3631, ip 1 dist 0.0 idx 4089
dist 0.0 idx 3631
pyknn ib 0, in 3634, ip 18 dist 0.009790007025003433 idx 2397
dist 0.00978994369506836 idx 4050
pyknn ib 0, in 3634, ip 19 dist 0.009790007025003433 idx 4050
dist 0.00978994369506836 idx 2397
pyknn ib 0, in 3635, ip 0 dist 0.0 idx 1906
dist 9.5367431640625e-07 idx 3635
pyknn ib 0, in 3635, ip 1 dist 0.0 idx 3635
dist 9.5367431640625e-07 idx 1906
pyknn ib 0, in 3635, ip 7 dist 0.006624998524785042 idx 2790
dist 0.006625652313232422 idx 3722
pyknn ib 0, in 3635, ip 8 dist 0.006624998524785042 idx 3722
dist 0.006625652313232422 idx 2790
pyknn ib 0, in 3637, ip 18 dist 0.010817998088896275 idx 1123
dist 0.010816574096679688 idx 2937
pyknn ib 0, in 3637, ip 19 dist 0.010817998088896275 idx 2937
dist 0.010816574096679688 idx 1123
pyknn ib 0, in 3638, ip 0 dist 0.0 idx 2780
dist -4.76837158203125e-07 idx 3638
pyknn ib 0, in 3638, ip 1 dist 0.0 idx 3638
dist -4.76837158203125e-07 idx 2780
pyknn ib 0, in 3642, ip 4 dist 0.0014510012697428465 idx 363
dist 0.0014509856700897217 idx 694
pyknn ib 0, in 3642, ip 5 dist 0.0014510012697428465 idx 694
dist 0.0014509856700897217 idx 363
pyknn ib 0, in 3645, ip 8 dist 0.0035089822486042976 idx 615
dist 0.0035085678100585938 idx 2297
pyknn ib 0, in 3645, ip 9 dist 0.0035089822486042976 idx 2297
dist 0.0035085678100585938 idx 615
pyknn ib 0, in 3653, ip 8 dist 0.005330007988959551 idx 236
dist 0.0053310394287109375 idx 1959
pyknn ib 0, in 3653, ip 9 dist 0.005330007988959551 idx 1959
dist 0.0053310394287109375 idx 236
pyknn ib 0, in 3653, ip 12 dist 0.007883005775511265 idx 483
dist 0.007884025573730469 idx 2764
pyknn ib 0, in 3653, ip 13 dist 0.007883005775511265 idx 2764
dist 0.007884025573730469 idx 483
pyknn ib 0, in 3655, ip 10 dist 0.005937003064900637 idx 962
dist 0.005936622619628906 idx 3213
pyknn ib 0, in 3655, ip 11 dist 0.005937003064900637 idx 3213
dist 0.005936622619628906 idx 962
pyknn ib 0, in 3656, ip 0 dist 0.0 idx 1947
dist 5.960464477539063e-08 idx 3656
pyknn ib 0, in 3656, ip 1 dist 0.0 idx 3656
dist 5.960464477539063e-08 idx 1947
pyknn ib 0, in 3668, ip 4 dist 0.002050999319180846 idx 829
dist 0.0020503997802734375 idx 3173
pyknn ib 0, in 3668, ip 5 dist 0.002050999319180846 idx 3173
dist 0.0020503997802734375 idx 829
pyknn ib 0, in 3668, ip 9 dist 0.0033809910528361797 idx 3519
dist 0.0033788681030273438 idx 4066
pyknn ib 0, in 3668, ip 10 dist 0.0033809910528361797 idx 4066
dist 0.0033788681030273438 idx 3519
pyknn ib 0, in 3673, ip 11 dist 0.005558010656386614 idx 1052
dist 0.0055577754974365234 idx 2093
pyknn ib 0, in 3673, ip 12 dist 0.005558010656386614 idx 2093
dist 0.0055577754974365234 idx 1052
pyknn ib 0, in 3674, ip 2 dist 0.0007700005662627518 idx 344
dist 0.0007699877023696899 idx 1751
pyknn ib 0, in 3674, ip 3 dist 0.0007700005662627518 idx 1751
dist 0.0007699877023696899 idx 344
pyknn ib 0, in 3675, ip 10 dist 0.006197004113346338 idx 3140
dist 0.006197214126586914 idx 2736
pyknn ib 0, in 3675, ip 11 dist 0.006197008304297924 idx 2736
dist 0.006197214126586914 idx 3140
pyknn ib 0, in 3682, ip 2 dist 0.000953999871853739 idx 1031
dist 0.0009539127349853516 idx 1100
pyknn ib 0, in 3682, ip 3 dist 0.000953999871853739 idx 1100
dist 0.0009539127349853516 idx 2711
pyknn ib 0, in 3682, ip 4 dist 0.000953999871853739 idx 2711
dist 0.0009539127349853516 idx 1031
pyknn ib 0, in 3682, ip 5 dist 0.002277006395161152 idx 744
dist 0.002276897430419922 idx 3909
pyknn ib 0, in 3682, ip 6 dist 0.002277006395161152 idx 3909
dist 0.002276897430419922 idx 744
pyknn ib 0, in 3685, ip 6 dist 0.0022769994102418423 idx 1032
dist 0.002277001738548279 idx 1850
pyknn ib 0, in 3685, ip 7 dist 0.0022769994102418423 idx 1850
dist 0.002277001738548279 idx 1032
pyknn ib 0, in 3686, ip 18 dist 0.012762016616761684 idx 962
dist 0.012761116027832031 idx 3213
pyknn ib 0, in 3686, ip 19 dist 0.012762016616761684 idx 3213
dist 0.012761116027832031 idx 962
pyknn ib 0, in 3699, ip 9 dist 0.006161997560411692 idx 158
dist 0.006161689758300781 idx 1404
pyknn ib 0, in 3699, ip 10 dist 0.006161997560411692 idx 1404
dist 0.006161689758300781 idx 158
pyknn ib 0, in 3703, ip 0 dist 0.0 idx 963
dist 0.0 idx 3703
pyknn ib 0, in 3703, ip 1 dist 0.0 idx 3703
dist 0.0 idx 963
pyknn ib 0, in 3705, ip 7 dist 0.005197002552449703 idx 869
dist 0.005197048187255859 idx 2454
pyknn ib 0, in 3705, ip 8 dist 0.005197002552449703 idx 2454
dist 0.005197048187255859 idx 869
pyknn ib 0, in 3706, ip 1 dist 0.0005649998784065247 idx 497
dist 0.0005649328231811523 idx 2785
pyknn ib 0, in 3706, ip 2 dist 0.0005649998784065247 idx 2785
dist 0.0005649328231811523 idx 497
pyknn ib 0, in 3707, ip 0 dist 0.0 idx 719
dist -4.76837158203125e-07 idx 3707
pyknn ib 0, in 3707, ip 1 dist 0.0 idx 3707
dist -4.76837158203125e-07 idx 719
pyknn ib 0, in 3714, ip 5 dist 0.0016370017547160387 idx 2328
dist 0.0016369819641113281 idx 3774
pyknn ib 0, in 3714, ip 6 dist 0.0016370017547160387 idx 3774
dist 0.0016369819641113281 idx 2328
pyknn ib 0, in 3715, ip 3 dist 0.000400999968405813 idx 636
dist 0.0004010051488876343 idx 3362
pyknn ib 0, in 3715, ip 4 dist 0.000400999968405813 idx 3362
dist 0.0004010051488876343 idx 636
pyknn ib 0, in 3717, ip 10 dist 0.0069059995003044605 idx 767
dist 0.0069060176610946655 idx 1375
pyknn ib 0, in 3717, ip 12 dist 0.0069059995003044605 idx 1375
dist 0.0069060176610946655 idx 767
pyknn ib 0, in 3720, ip 5 dist 0.0029769993852823973 idx 1024
dist 0.0029769912362098694 idx 2834
pyknn ib 0, in 3720, ip 6 dist 0.0029769993852823973 idx 2834
dist 0.0029769912362098694 idx 1024
pyknn ib 0, in 3720, ip 8 dist 0.0040999967604875565 idx 2953
dist 0.004099994897842407 idx 3429
pyknn ib 0, in 3720, ip 9 dist 0.0040999967604875565 idx 3429
dist 0.004099994897842407 idx 2953
pyknn ib 0, in 3721, ip 1 dist 0.0002810000441968441 idx 1647
dist 0.0002810955047607422 idx 2457
pyknn ib 0, in 3721, ip 2 dist 0.0002810000441968441 idx 2457
dist 0.0002810955047607422 idx 1647
pyknn ib 0, in 3721, ip 12 dist 0.006165006663650274 idx 282
dist 0.0061647891998291016 idx 2323
pyknn ib 0, in 3721, ip 13 dist 0.006165006663650274 idx 2323
dist 0.0061647891998291016 idx 282
pyknn ib 0, in 3722, ip 0 dist 0.0 idx 2790
dist 0.0 idx 3722
pyknn ib 0, in 3722, ip 1 dist 0.0 idx 3722
dist 0.0 idx 2790
pyknn ib 0, in 3722, ip 14 dist 0.006624998524785042 idx 1906
dist 0.006625652313232422 idx 3635
pyknn ib 0, in 3722, ip 15 dist 0.006624998524785042 idx 3635
dist 0.006625652313232422 idx 1906
pyknn ib 0, in 3723, ip 11 dist 0.006090002600103617 idx 497
dist 0.006090044975280762 idx 2785
pyknn ib 0, in 3723, ip 12 dist 0.006090002600103617 idx 2785
dist 0.006090044975280762 idx 497
pyknn ib 0, in 3725, ip 9 dist 0.004778000060468912 idx 683
dist 0.004777997732162476 idx 3377
pyknn ib 0, in 3725, ip 10 dist 0.004778000060468912 idx 2705
dist 0.004777997732162476 idx 683
pyknn ib 0, in 3725, ip 11 dist 0.004778000060468912 idx 3377
dist 0.004777997732162476 idx 2705
pyknn ib 0, in 3728, ip 5 dist 0.003074999898672104 idx 722
dist 0.0030749994330108166 idx 3776
pyknn ib 0, in 3728, ip 6 dist 0.003074999898672104 idx 3776
dist 0.0030749994330108166 idx 722
pyknn ib 0, in 3731, ip 16 dist 0.01155802421271801 idx 2854
dist 0.011558055877685547 idx 3996
pyknn ib 0, in 3731, ip 17 dist 0.01155802421271801 idx 3996
dist 0.011558055877685547 idx 2854
pyknn ib 0, in 3734, ip 2 dist 0.0009229975985363126 idx 1649
dist 0.0009236335754394531 idx 3471
pyknn ib 0, in 3734, ip 3 dist 0.0009229975985363126 idx 3471
dist 0.0009236335754394531 idx 1649
pyknn ib 0, in 3737, ip 0 dist 0.0 idx 3261
dist 0.0 idx 3737
pyknn ib 0, in 3737, ip 1 dist 0.0 idx 3737
dist 0.0 idx 3261
pyknn ib 0, in 3738, ip 3 dist 0.0022369935177266598 idx 959
dist 0.0022363662719726562 idx 2733
pyknn ib 0, in 3738, ip 4 dist 0.0022370037622749805 idx 2733
dist 0.0022363662719726562 idx 959
pyknn ib 0, in 3740, ip 1 dist 0.0019520005444064736 idx 1647
dist 0.0019516944885253906 idx 2457
pyknn ib 0, in 3740, ip 2 dist 0.0019520005444064736 idx 2457
dist 0.0019516944885253906 idx 1647
pyknn ib 0, in 3740, ip 4 dist 0.0031559993512928486 idx 282
dist 0.0031557083129882812 idx 2323
pyknn ib 0, in 3740, ip 5 dist 0.0031559993512928486 idx 2323
dist 0.0031557083129882812 idx 282
pyknn ib 0, in 3742, ip 11 dist 0.007034010253846645 idx 937
dist 0.007033348083496094 idx 2228
pyknn ib 0, in 3742, ip 12 dist 0.007034010253846645 idx 2228
dist 0.007033348083496094 idx 937
pyknn ib 0, in 3746, ip 4 dist 0.0036910013295710087 idx 1342
dist 0.003690958023071289 idx 2235
pyknn ib 0, in 3746, ip 5 dist 0.0036910013295710087 idx 2235
dist 0.003690958023071289 idx 1342
pyknn ib 0, in 3746, ip 6 dist 0.003737997729331255 idx 847
dist 0.003737926483154297 idx 2932
pyknn ib 0, in 3746, ip 7 dist 0.003737997729331255 idx 2932
dist 0.003737926483154297 idx 847
pyknn ib 0, in 3749, ip 11 dist 0.004900988657027483 idx 2055
dist 0.004900932312011719 idx 2936
pyknn ib 0, in 3749, ip 12 dist 0.004900988657027483 idx 2936
dist 0.004900932312011719 idx 2055
pyknn ib 0, in 3752, ip 6 dist 0.0036260022316128016 idx 1248
dist 0.0036258697509765625 idx 3939
pyknn ib 0, in 3752, ip 7 dist 0.0036260022316128016 idx 3939
dist 0.0036258697509765625 idx 1248
pyknn ib 0, in 3753, ip 10 dist 0.005917004309594631 idx 1881
dist 0.0059168338775634766 idx 2188
pyknn ib 0, in 3753, ip 11 dist 0.005917004309594631 idx 2188
dist 0.0059168338775634766 idx 1881
pyknn ib 0, in 3754, ip 0 dist 0.0 idx 2701
dist 0.0 idx 3754
pyknn ib 0, in 3754, ip 1 dist 0.0 idx 3754
dist 0.0 idx 2701
pyknn ib 0, in 3758, ip 18 dist 0.01014699973165989 idx 127
dist 0.010146856307983398 idx 2848
pyknn ib 0, in 3758, ip 19 dist 0.01014699973165989 idx 2848
dist 0.010146856307983398 idx 127
pyknn ib 0, in 3765, ip 3 dist 0.001645983662456274 idx 2854
dist 0.0016455650329589844 idx 3996
pyknn ib 0, in 3765, ip 4 dist 0.001645983662456274 idx 3996
dist 0.0016455650329589844 idx 2854
pyknn ib 0, in 3766, ip 0 dist 0.0 idx 810
dist -1.9073486328125e-06 idx 3766
pyknn ib 0, in 3766, ip 1 dist 0.0 idx 3766
dist -1.9073486328125e-06 idx 810
pyknn ib 0, in 3767, ip 5 dist 0.004002002067863941 idx 3251
dist 0.0040018558502197266 idx 3264
pyknn ib 0, in 3767, ip 6 dist 0.004002002067863941 idx 3264
dist 0.0040018558502197266 idx 3251
pyknn ib 0, in 3768, ip 9 dist 0.005507001653313637 idx 1167
dist 0.005507469177246094 idx 2174
pyknn ib 0, in 3768, ip 10 dist 0.005507001653313637 idx 2174
dist 0.005507469177246094 idx 1167
pyknn ib 0, in 3774, ip 0 dist 0.0 idx 2328
dist 0.0 idx 3774
pyknn ib 0, in 3774, ip 1 dist 0.0 idx 3774
dist 0.0 idx 2328
pyknn ib 0, in 3776, ip 0 dist 0.0 idx 722
dist 0.0 idx 3776
pyknn ib 0, in 3776, ip 1 dist 0.0 idx 3776
dist 0.0 idx 722
pyknn ib 0, in 3777, ip 0 dist 0.0 idx 1062
dist 5.960464477539063e-08 idx 3777
pyknn ib 0, in 3777, ip 2 dist 0.0 idx 3777
dist 5.960464477539063e-08 idx 1062
pyknn ib 0, in 3778, ip 14 dist 0.006602999288588762 idx 722
dist 0.006602998822927475 idx 3776
pyknn ib 0, in 3778, ip 15 dist 0.006602999288588762 idx 3776
dist 0.006602998822927475 idx 722
pyknn ib 0, in 3785, ip 17 dist 0.010439997538924217 idx 363
dist 0.01043999195098877 idx 694
pyknn ib 0, in 3785, ip 18 dist 0.010439997538924217 idx 694
dist 0.01043999195098877 idx 363
pyknn ib 0, in 3791, ip 16 dist 0.010437000542879105 idx 722
dist 0.01043699961155653 idx 3776
pyknn ib 0, in 3791, ip 17 dist 0.010437000542879105 idx 3776
dist 0.01043699961155653 idx 722
pyknn ib 0, in 3792, ip 3 dist 0.001552001223899424 idx 1871
dist 0.0015506744384765625 idx 3541
pyknn ib 0, in 3792, ip 4 dist 0.001552001223899424 idx 3541
dist 0.0015506744384765625 idx 1871
pyknn ib 0, in 3792, ip 18 dist 0.013074001297354698 idx 1167
dist 0.013072013854980469 idx 2174
pyknn ib 0, in 3792, ip 19 dist 0.013074001297354698 idx 2174
dist 0.013072013854980469 idx 1167
pyknn ib 0, in 3796, ip 18 dist 0.011904004961252213 idx 963
dist 0.011904239654541016 idx 3703
pyknn ib 0, in 3796, ip 19 dist 0.011904004961252213 idx 3703
dist 0.011904239654541016 idx 963
pyknn ib 0, in 3797, ip 1 dist 0.00010100007784785703 idx 3325
dist 0.00010102987289428711 idx 3729
pyknn ib 0, in 3797, ip 2 dist 0.00010100007784785703 idx 3729
dist 0.00010102987289428711 idx 3325
pyknn ib 0, in 3799, ip 3 dist 0.00041699971188791096 idx 2266
dist 0.0004177093505859375 idx 3388
pyknn ib 0, in 3799, ip 4 dist 0.00041699971188791096 idx 3388
dist 0.0004177093505859375 idx 2266
pyknn ib 0, in 3802, ip 14 dist 0.011641998775303364 idx 3383
dist 0.011641979217529297 idx 3439
pyknn ib 0, in 3802, ip 15 dist 0.011641998775303364 idx 3439
dist 0.011641979217529297 idx 3383
pyknn ib 0, in 3805, ip 7 dist 0.0040910011157393456 idx 792
dist 0.004091024398803711 idx 974
pyknn ib 0, in 3805, ip 8 dist 0.0040910011157393456 idx 974
dist 0.004091024398803711 idx 792
pyknn ib 0, in 3807, ip 14 dist 0.008724001236259937 idx 744
dist 0.008723974227905273 idx 3909
pyknn ib 0, in 3807, ip 15 dist 0.008724001236259937 idx 3909
dist 0.008723974227905273 idx 744
pyknn ib 0, in 3818, ip 2 dist 0.000551996985450387 idx 2458
dist 0.0005519390106201172 idx 2645
pyknn ib 0, in 3818, ip 3 dist 0.000551996985450387 idx 2645
dist 0.0005519390106201172 idx 2458
pyknn ib 0, in 3821, ip 14 dist 0.004901000298559666 idx 2701
dist 0.004901885986328125 idx 3754
pyknn ib 0, in 3821, ip 15 dist 0.004901000298559666 idx 3754
dist 0.004901885986328125 idx 2701
pyknn ib 0, in 3826, ip 12 dist 0.010700996033847332 idx 1031
dist 0.01070094108581543 idx 1100
pyknn ib 0, in 3826, ip 13 dist 0.010700996033847332 idx 1100
dist 0.01070094108581543 idx 1031
pyknn ib 0, in 3826, ip 16 dist 0.012152014300227165 idx 744
dist 0.01215219497680664 idx 3909
pyknn ib 0, in 3826, ip 17 dist 0.012152014300227165 idx 3909
dist 0.01215219497680664 idx 744
pyknn ib 0, in 3833, ip 9 dist 0.003604000201448798 idx 281
dist 0.003603994846343994 idx 2437
pyknn ib 0, in 3833, ip 10 dist 0.003604000201448798 idx 2437
dist 0.003603994846343994 idx 281
pyknn ib 0, in 3835, ip 7 dist 0.004132998175919056 idx 1093
dist 0.004132270812988281 idx 2320
pyknn ib 0, in 3835, ip 8 dist 0.004132998175919056 idx 2320
dist 0.004132270812988281 idx 1093
pyknn ib 0, in 3835, ip 18 dist 0.010659990832209587 idx 810
dist 0.010659217834472656 idx 3766
pyknn ib 0, in 3835, ip 19 dist 0.010659990832209587 idx 3766
dist 0.010659217834472656 idx 810
pyknn ib 0, in 3842, ip 8 dist 0.005413996987044811 idx 2767
dist 0.005414038896560669 idx 3503
pyknn ib 0, in 3842, ip 9 dist 0.005413996987044811 idx 3503
dist 0.005414038896560669 idx 2767
pyknn ib 0, in 3843, ip 12 dist 0.005192999728024006 idx 847
dist 0.005192995071411133 idx 2932
pyknn ib 0, in 3843, ip 13 dist 0.005192999728024006 idx 2932
dist 0.005192995071411133 idx 847
pyknn ib 0, in 3851, ip 7 dist 0.0037380007561296225 idx 1333
dist 0.003738000988960266 idx 1388
pyknn ib 0, in 3851, ip 8 dist 0.0037380007561296225 idx 1388
dist 0.003738000988960266 idx 1333
pyknn ib 0, in 3853, ip 2 dist 0.0003970006946474314 idx 818
dist 0.00039702653884887695 idx 3318
pyknn ib 0, in 3853, ip 3 dist 0.0003970006946474314 idx 3318
dist 0.00039702653884887695 idx 818
pyknn ib 0, in 3853, ip 18 dist 0.025717997923493385 idx 3462
dist 0.025718003511428833 idx 3465
pyknn ib 0, in 3853, ip 19 dist 0.025717997923493385 idx 3465
dist 0.025718003511428833 idx 3462
pyknn ib 0, in 3858, ip 3 dist 0.004686000291258097 idx 636
dist 0.004686005413532257 idx 3362
pyknn ib 0, in 3858, ip 4 dist 0.004686000291258097 idx 3362
dist 0.004686005413532257 idx 636
pyknn ib 0, in 3859, ip 8 dist 0.0045179990120232105 idx 2502
dist 0.004517912864685059 idx 3168
pyknn ib 0, in 3859, ip 9 dist 0.0045179990120232105 idx 3168
dist 0.004517912864685059 idx 2502
pyknn ib 0, in 3859, ip 17 dist 0.0062250057235360146 idx 2316
dist 0.006224989891052246 idx 2761
pyknn ib 0, in 3859, ip 18 dist 0.0062250057235360146 idx 2761
dist 0.006224989891052246 idx 2316
pyknn ib 0, in 3860, ip 1 dist 0.001565000507980585 idx 768
dist 0.00156499445438385 idx 2765
pyknn ib 0, in 3860, ip 2 dist 0.001565000507980585 idx 2765
dist 0.00156499445438385 idx 768
pyknn ib 0, in 3864, ip 8 dist 0.0025200005620718002 idx 1219
dist 0.0025200843811035156 idx 3352
pyknn ib 0, in 3864, ip 9 dist 0.0025200005620718002 idx 3352
dist 0.0025200843811035156 idx 1219
pyknn ib 0, in 3866, ip 9 dist 0.008149027824401855 idx 3519
dist 0.008148193359375 idx 4066
pyknn ib 0, in 3866, ip 10 dist 0.008149027824401855 idx 4066
dist 0.008148193359375 idx 3519
pyknn ib 0, in 3866, ip 18 dist 0.011526023969054222 idx 2377
dist 0.011525154113769531 idx 3132
pyknn ib 0, in 3866, ip 19 dist 0.011526023969054222 idx 3132
dist 0.011525154113769531 idx 2377
pyknn ib 0, in 3867, ip 9 dist 0.004133999813348055 idx 857
dist 0.004133939743041992 idx 2424
pyknn ib 0, in 3867, ip 10 dist 0.004133999813348055 idx 2424
dist 0.004133939743041992 idx 857
pyknn ib 0, in 3868, ip 18 dist 0.012170001864433289 idx 1333
dist 0.012169986963272095 idx 1388
pyknn ib 0, in 3868, ip 19 dist 0.012170001864433289 idx 1388
dist 0.012169986963272095 idx 1333
pyknn ib 0, in 3869, ip 3 dist 0.002033001510426402 idx 938
dist 0.0020329952239990234 idx 1350
pyknn ib 0, in 3869, ip 4 dist 0.002033001510426402 idx 1350
dist 0.0020329952239990234 idx 938
pyknn ib 0, in 3872, ip 4 dist 0.0024029994383454323 idx 683
dist 0.0024029985070228577 idx 2705
pyknn ib 0, in 3872, ip 5 dist 0.0024029994383454323 idx 2705
dist 0.0024029985070228577 idx 3377
pyknn ib 0, in 3872, ip 6 dist 0.0024029994383454323 idx 3377
dist 0.0024029985070228577 idx 683
pyknn ib 0, in 3873, ip 4 dist 0.0017179968999698758 idx 1304
dist 0.001717984676361084 idx 2654
pyknn ib 0, in 3873, ip 5 dist 0.0017179968999698758 idx 2654
dist 0.001717984676361084 idx 1304
pyknn ib 0, in 3874, ip 10 dist 0.007155999541282654 idx 366
dist 0.0071561336517333984 idx 3555
pyknn ib 0, in 3874, ip 11 dist 0.007155999541282654 idx 3555
dist 0.0071561336517333984 idx 366
pyknn ib 0, in 3883, ip 2 dist 0.0005449995514936745 idx 158
dist 0.0005464553833007812 idx 1404
pyknn ib 0, in 3883, ip 3 dist 0.0005449995514936745 idx 1404
dist 0.0005464553833007812 idx 158
pyknn ib 0, in 3884, ip 7 dist 0.0038480001967400312 idx 1018
dist 0.003847837448120117 idx 3982
pyknn ib 0, in 3884, ip 8 dist 0.0038480001967400312 idx 3982
dist 0.003847837448120117 idx 1018
pyknn ib 0, in 3887, ip 10 dist 0.006790000945329666 idx 3107
dist 0.006789207458496094 idx 1966
pyknn ib 0, in 3887, ip 11 dist 0.006790002807974815 idx 1966
dist 0.006789207458496094 idx 3107
pyknn ib 0, in 3890, ip 0 dist 0.0 idx 1426
dist 0.0 idx 3890
pyknn ib 0, in 3890, ip 1 dist 0.0 idx 3890
dist 0.0 idx 1426
pyknn ib 0, in 3890, ip 6 dist 0.0032750004902482033 idx 2328
dist 0.0032750368118286133 idx 3774
pyknn ib 0, in 3890, ip 7 dist 0.0032750004902482033 idx 3774
dist 0.0032750368118286133 idx 2328
pyknn ib 0, in 3894, ip 4 dist 0.002228998579084873 idx 1254
dist 0.0022287368774414062 idx 2552
pyknn ib 0, in 3894, ip 5 dist 0.002228998579084873 idx 2552
dist 0.0022287368774414062 idx 1254
pyknn ib 0, in 3899, ip 11 dist 0.006374003365635872 idx 497
dist 0.006373763084411621 idx 2785
pyknn ib 0, in 3899, ip 12 dist 0.006374003365635872 idx 2785
dist 0.006373763084411621 idx 497
pyknn ib 0, in 3902, ip 2 dist 0.000996999442577362 idx 1333
dist 0.0009969770908355713 idx 1388
pyknn ib 0, in 3902, ip 3 dist 0.000996999442577362 idx 1388
dist 0.0009969770908355713 idx 1333
pyknn ib 0, in 3906, ip 11 dist 0.008240998722612858 idx 768
dist 0.00824098289012909 idx 2765
pyknn ib 0, in 3906, ip 12 dist 0.008240998722612858 idx 2765
dist 0.00824098289012909 idx 768
pyknn ib 0, in 3909, ip 0 dist 0.0 idx 744
dist 0.0 idx 3909
pyknn ib 0, in 3909, ip 1 dist 0.0 idx 3909
dist 0.0 idx 744
pyknn ib 0, in 3909, ip 3 dist 0.0003130019176751375 idx 1031
dist 0.00031256675720214844 idx 2711
pyknn ib 0, in 3909, ip 4 dist 0.0003130019176751375 idx 1100
dist 0.00031256675720214844 idx 1031
pyknn ib 0, in 3909, ip 5 dist 0.0003130019176751375 idx 2711
dist 0.00031256675720214844 idx 1100
pyknn ib 0, in 3911, ip 3 dist 0.0003949997771997005 idx 2143
dist 0.00039499253034591675 idx 4072
pyknn ib 0, in 3911, ip 4 dist 0.0003949997771997005 idx 4072
dist 0.00039499253034591675 idx 2143
pyknn ib 0, in 3912, ip 18 dist 0.007466008421033621 idx 476
dist 0.007465362548828125 idx 784
pyknn ib 0, in 3912, ip 19 dist 0.007466008421033621 idx 784
dist 0.007465362548828125 idx 476
pyknn ib 0, in 3916, ip 12 dist 0.00708600040525198 idx 488
dist 0.007086008787155151 idx 2434
pyknn ib 0, in 3916, ip 13 dist 0.00708600040525198 idx 2434
dist 0.007086008787155151 idx 488
pyknn ib 0, in 3918, ip 13 dist 0.009893002919852734 idx 3325
dist 0.009893059730529785 idx 3729
pyknn ib 0, in 3918, ip 14 dist 0.009893002919852734 idx 3729
dist 0.009893059730529785 idx 3325
pyknn ib 0, in 3918, ip 18 dist 0.012113000266253948 idx 1426
dist 0.012112975120544434 idx 3890
pyknn ib 0, in 3918, ip 19 dist 0.012113000266253948 idx 3890
dist 0.012112975120544434 idx 1426
pyknn ib 0, in 3921, ip 3 dist 0.003403998911380768 idx 281
dist 0.003403991460800171 idx 2437
pyknn ib 0, in 3921, ip 4 dist 0.003403998911380768 idx 2437
dist 0.003403991460800171 idx 281
pyknn ib 0, in 3931, ip 5 dist 0.0017810005228966475 idx 2701
dist 0.001781463623046875 idx 3754
pyknn ib 0, in 3931, ip 6 dist 0.0017810005228966475 idx 3754
dist 0.001781463623046875 idx 2701
pyknn ib 0, in 3933, ip 0 dist 0.0 idx 2745
dist -4.76837158203125e-07 idx 3933
pyknn ib 0, in 3933, ip 1 dist 0.0 idx 3933
dist -4.76837158203125e-07 idx 2745
pyknn ib 0, in 3936, ip 13 dist 0.013471986167132854 idx 1031
dist 0.013471603393554688 idx 1100
pyknn ib 0, in 3936, ip 14 dist 0.013471986167132854 idx 1100
dist 0.013471603393554688 idx 2711
pyknn ib 0, in 3936, ip 15 dist 0.013471986167132854 idx 2711
dist 0.013471603393554688 idx 1031
pyknn ib 0, in 3936, ip 16 dist 0.013521000742912292 idx 744
dist 0.013520956039428711 idx 3909
pyknn ib 0, in 3936, ip 17 dist 0.013521000742912292 idx 3909
dist 0.013520956039428711 idx 744
pyknn ib 0, in 3938, ip 4 dist 0.004649000242352486 idx 1555
dist 0.004649162292480469 idx 2866
pyknn ib 0, in 3938, ip 5 dist 0.004649000242352486 idx 2866
dist 0.004649162292480469 idx 1555
pyknn ib 0, in 3939, ip 0 dist 0.0 idx 1248
dist 0.0 idx 3939
pyknn ib 0, in 3939, ip 1 dist 0.0 idx 3939
dist 0.0 idx 1248
pyknn ib 0, in 3940, ip 2 dist 0.002899995306506753 idx 1231
dist 0.002899169921875 idx 533
pyknn ib 0, in 3940, ip 3 dist 0.002900000661611557 idx 533
dist 0.002899646759033203 idx 1231
pyknn ib 0, in 3940, ip 14 dist 0.00899400096386671 idx 1021
dist 0.008993148803710938 idx 1857
pyknn ib 0, in 3940, ip 15 dist 0.00899400096386671 idx 1857
dist 0.008993148803710938 idx 1021
pyknn ib 0, in 3942, ip 1 dist 0.0005389998550526798 idx 2767
dist 0.0005390048027038574 idx 3503
pyknn ib 0, in 3942, ip 2 dist 0.0005389998550526798 idx 3503
dist 0.0005390048027038574 idx 2767
pyknn ib 0, in 3943, ip 5 dist 0.002406000392511487 idx 1426
dist 0.002406001091003418 idx 3890
pyknn ib 0, in 3943, ip 6 dist 0.002406000392511487 idx 3890
dist 0.002406001091003418 idx 1426
pyknn ib 0, in 3943, ip 9 dist 0.004201000090688467 idx 2328
dist 0.004200935363769531 idx 3774
pyknn ib 0, in 3943, ip 10 dist 0.004201000090688467 idx 3774
dist 0.004200935363769531 idx 2328
pyknn ib 0, in 3950, ip 8 dist 0.003989000804722309 idx 1718
dist 0.003989040851593018 idx 2775
pyknn ib 0, in 3950, ip 9 dist 0.003989000804722309 idx 2775
dist 0.003989040851593018 idx 1718
pyknn ib 0, in 3951, ip 8 dist 0.004125999752432108 idx 2266
dist 0.004126548767089844 idx 3388
pyknn ib 0, in 3951, ip 9 dist 0.004125999752432108 idx 3388
dist 0.004126548767089844 idx 2266
pyknn ib 0, in 3952, ip 5 dist 0.003297999268397689 idx 744
dist 0.0032978057861328125 idx 3909
pyknn ib 0, in 3952, ip 6 dist 0.003297999268397689 idx 3909
dist 0.0032978057861328125 idx 744
pyknn ib 0, in 3952, ip 9 dist 0.005625006277114153 idx 1031
dist 0.005624532699584961 idx 2711
pyknn ib 0, in 3952, ip 11 dist 0.005625006277114153 idx 2711
dist 0.005624532699584961 idx 1031
pyknn ib 0, in 3953, ip 9 dist 0.009367999620735645 idx 542
dist 0.009368062019348145 idx 1590
pyknn ib 0, in 3953, ip 10 dist 0.009367999620735645 idx 1590
dist 0.009368062019348145 idx 542
pyknn ib 0, in 3953, ip 16 dist 0.014188993722200394 idx 798
dist 0.014188885688781738 idx 3397
pyknn ib 0, in 3953, ip 17 dist 0.014188996516168118 idx 3397
dist 0.014189004898071289 idx 798
pyknn ib 0, in 3955, ip 6 dist 0.0023769985418766737 idx 3325
dist 0.002377033233642578 idx 3729
pyknn ib 0, in 3955, ip 7 dist 0.0023769985418766737 idx 3729
dist 0.002377033233642578 idx 3325
pyknn ib 0, in 3959, ip 6 dist 0.0041410005651414394 idx 2780
dist 0.004140615463256836 idx 3638
pyknn ib 0, in 3959, ip 7 dist 0.0041410005651414394 idx 3638
dist 0.004140615463256836 idx 2780
pyknn ib 0, in 3962, ip 4 dist 0.001824999344535172 idx 768
dist 0.0018250048160552979 idx 2765
pyknn ib 0, in 3962, ip 5 dist 0.001824999344535172 idx 2765
dist 0.0018250048160552979 idx 768
pyknn ib 0, in 3962, ip 11 dist 0.0032780019100755453 idx 281
dist 0.003278002142906189 idx 2437
pyknn ib 0, in 3962, ip 12 dist 0.0032780019100755453 idx 2437
dist 0.003278002142906189 idx 281
pyknn ib 0, in 3971, ip 12 dist 0.007450995501130819 idx 1157
dist 0.007451057434082031 idx 2023
pyknn ib 0, in 3971, ip 13 dist 0.007450995501130819 idx 2023
dist 0.007451057434082031 idx 1157
pyknn ib 0, in 3976, ip 1 dist 0.0003900023002643138 idx 963
dist 0.00039005279541015625 idx 3703
pyknn ib 0, in 3976, ip 2 dist 0.0003900023002643138 idx 3703
dist 0.00039005279541015625 idx 963
pyknn ib 0, in 3977, ip 5 dist 0.003753000171855092 idx 767
dist 0.0037530213594436646 idx 1375
pyknn ib 0, in 3977, ip 6 dist 0.003753000171855092 idx 954
dist 0.0037530213594436646 idx 767
pyknn ib 0, in 3977, ip 7 dist 0.003753000171855092 idx 1375
dist 0.0037530213594436646 idx 954
pyknn ib 0, in 3982, ip 0 dist 0.0 idx 1018
dist 0.0 idx 3982
pyknn ib 0, in 3982, ip 1 dist 0.0 idx 3982
dist 0.0 idx 1018
pyknn ib 0, in 3987, ip 3 dist 0.0009689986472949386 idx 282
dist 0.0009691715240478516 idx 2323
pyknn ib 0, in 3987, ip 4 dist 0.0009689986472949386 idx 2323
dist 0.0009691715240478516 idx 282
pyknn ib 0, in 3988, ip 8 dist 0.004057002253830433 idx 2745
dist 0.004056692123413086 idx 3933
pyknn ib 0, in 3988, ip 9 dist 0.004057002253830433 idx 3933
dist 0.004056692123413086 idx 2745
pyknn ib 0, in 3989, ip 7 dist 0.007250993978232145 idx 1031
dist 0.007250308990478516 idx 1100
pyknn ib 0, in 3989, ip 8 dist 0.007250993978232145 idx 1100
dist 0.007250308990478516 idx 1031
pyknn ib 0, in 3989, ip 14 dist 0.010538001544773579 idx 744
dist 0.010537862777709961 idx 3909
pyknn ib 0, in 3989, ip 15 dist 0.010538001544773579 idx 3909
dist 0.010537862777709961 idx 744
pyknn ib 0, in 3996, ip 0 dist 0.0 idx 2854
dist 0.0 idx 3996
pyknn ib 0, in 3996, ip 1 dist 0.0 idx 3996
dist 0.0 idx 2854
pyknn ib 0, in 3999, ip 12 dist 0.010126007720828056 idx 1018
dist 0.010125875473022461 idx 3982
pyknn ib 0, in 3999, ip 13 dist 0.010126007720828056 idx 3982
dist 0.010125875473022461 idx 1018
pyknn ib 0, in 4005, ip 11 dist 0.004081999883055687 idx 193
dist 0.004082083702087402 idx 231
pyknn ib 0, in 4005, ip 12 dist 0.004081999883055687 idx 231
dist 0.004082083702087402 idx 193
pyknn ib 0, in 4007, ip 7 dist 0.0034770015627145767 idx 1800
dist 0.003475189208984375 idx 2901
pyknn ib 0, in 4007, ip 8 dist 0.0034770015627145767 idx 2901
dist 0.003475189208984375 idx 1800
pyknn ib 0, in 4011, ip 4 dist 0.0019210001919418573 idx 281
dist 0.0019209980964660645 idx 2437
pyknn ib 0, in 4011, ip 5 dist 0.0019210001919418573 idx 2437
dist 0.0019209980964660645 idx 281
pyknn ib 0, in 4011, ip 8 dist 0.0032220007851719856 idx 768
dist 0.0032220035791397095 idx 2765
pyknn ib 0, in 4011, ip 9 dist 0.0032220007851719856 idx 2765
dist 0.0032220035791397095 idx 768
pyknn ib 0, in 4015, ip 6 dist 0.0012199964839965105 idx 1925
dist 0.0012197494506835938 idx 2416
pyknn ib 0, in 4015, ip 7 dist 0.0012199964839965105 idx 2416
dist 0.0012197494506835938 idx 1925
pyknn ib 0, in 4017, ip 9 dist 0.0036890103947371244 idx 366
dist 0.003688812255859375 idx 3555
pyknn ib 0, in 4017, ip 10 dist 0.0036890103947371244 idx 3555
dist 0.003688812255859375 idx 366
pyknn ib 0, in 4019, ip 8 dist 0.003205000888556242 idx 1225
dist 0.0032033920288085938 idx 1637
pyknn ib 0, in 4019, ip 9 dist 0.003205000888556242 idx 1637
dist 0.0032033920288085938 idx 1225
pyknn ib 0, in 4026, ip 7 dist 0.002222006907686591 idx 595
dist 0.0022215843200683594 idx 2077
pyknn ib 0, in 4026, ip 8 dist 0.002222006907686591 idx 2077
dist 0.0022215843200683594 idx 595
pyknn ib 0, in 4032, ip 12 dist 0.007041999604552984 idx 792
dist 0.0070416927337646484 idx 974
pyknn ib 0, in 4032, ip 13 dist 0.007041999604552984 idx 974
dist 0.0070416927337646484 idx 792
pyknn ib 0, in 4038, ip 2 dist 0.0010400002356618643 idx 636
dist 0.0010400041937828064 idx 3362
pyknn ib 0, in 4038, ip 3 dist 0.0010400002356618643 idx 3362
dist 0.0010400041937828064 idx 636
pyknn ib 0, in 4047, ip 5 dist 0.0014030011370778084 idx 719
dist 0.0014026165008544922 idx 3707
pyknn ib 0, in 4047, ip 6 dist 0.0014030011370778084 idx 3707
dist 0.0014026165008544922 idx 719
pyknn ib 0, in 4050, ip 0 dist 0.0 idx 2397
dist 0.0 idx 4050
pyknn ib 0, in 4050, ip 1 dist 0.0 idx 4050
dist 0.0 idx 2397
pyknn ib 0, in 4058, ip 6 dist 0.0026439952198415995 idx 1225
dist 0.0026445388793945312 idx 1637
pyknn ib 0, in 4058, ip 7 dist 0.0026439952198415995 idx 1637
dist 0.0026445388793945312 idx 1225
pyknn ib 0, in 4058, ip 8 dist 0.0026499878149479628 idx 29
dist 0.0026502609252929688 idx 1466
pyknn ib 0, in 4058, ip 9 dist 0.0026499878149479628 idx 1466
dist 0.0026502609252929688 idx 29
pyknn ib 0, in 4058, ip 19 dist 0.009620998986065388 idx 1176
dist 0.00962066650390625 idx 2555
pyknn ib 0, in 4061, ip 10 dist 0.006673999130725861 idx 3462
dist 0.006673991680145264 idx 3465
pyknn ib 0, in 4061, ip 11 dist 0.006673999130725861 idx 3465
dist 0.006673991680145264 idx 3462
pyknn ib 0, in 4062, ip 3 dist 0.001948998891748488 idx 315
dist 0.0019488334655761719 idx 3219
pyknn ib 0, in 4062, ip 4 dist 0.001948998891748488 idx 3219
dist 0.0019488334655761719 idx 315
pyknn ib 0, in 4063, ip 1 dist 6.1000100686214864e-05 idx 1951
dist 6.097555160522461e-05 idx 2710
pyknn ib 0, in 4063, ip 2 dist 6.1000100686214864e-05 idx 2710
dist 6.097555160522461e-05 idx 1951
pyknn ib 0, in 4063, ip 9 dist 0.006617994979023933 idx 1161
dist 0.006617993116378784 idx 2641
pyknn ib 0, in 4063, ip 10 dist 0.006617994979023933 idx 2641
dist 0.006617993116378784 idx 1161
pyknn ib 0, in 4065, ip 5 dist 0.0018000002019107342 idx 2790
dist 0.001800537109375 idx 3722
pyknn ib 0, in 4065, ip 6 dist 0.0018000002019107342 idx 3722
dist 0.001800537109375 idx 2790
pyknn ib 0, in 4065, ip 8 dist 0.0027250039856880903 idx 1906
dist 0.0027251243591308594 idx 3635
pyknn ib 0, in 4065, ip 9 dist 0.0027250039856880903 idx 3635
dist 0.0027251243591308594 idx 1906
pyknn ib 0, in 4066, ip 0 dist 0.0 idx 3519
dist 0.0 idx 4066
pyknn ib 0, in 4066, ip 1 dist 0.0 idx 4066
dist 0.0 idx 3519
pyknn ib 0, in 4066, ip 2 dist 0.0004419961478561163 idx 829
dist 0.00044155120849609375 idx 3173
pyknn ib 0, in 4066, ip 3 dist 0.0004419961478561163 idx 3173
dist 0.00044155120849609375 idx 829
pyknn ib 0, in 4070, ip 9 dist 0.004648996517062187 idx 1248
dist 0.004649162292480469 idx 3939
pyknn ib 0, in 4070, ip 10 dist 0.004648996517062187 idx 3939
dist 0.004649162292480469 idx 1248
pyknn ib 0, in 4072, ip 0 dist 0.0 idx 2143
dist 0.0 idx 4072
pyknn ib 0, in 4072, ip 1 dist 0.0 idx 4072
dist 0.0 idx 2143
pyknn ib 0, in 4074, ip 1 dist 0.0005799981881864369 idx 1259
dist 0.0005799531936645508 idx 310
pyknn ib 0, in 4074, ip 2 dist 0.0005799999344162643 idx 310
dist 0.0005799531936645508 idx 1259
pyknn ib 0, in 4083, ip 3 dist 0.0021730000153183937 idx 965
dist 0.0021734237670898438 idx 1552
pyknn ib 0, in 4083, ip 4 dist 0.0021730000153183937 idx 1552
dist 0.0021734237670898438 idx 965
pyknn ib 0, in 4087, ip 2 dist 0.00047799956519156694 idx 991
dist 0.000476837158203125 idx 2895
pyknn ib 0, in 4087, ip 3 dist 0.00047799956519156694 idx 2895
dist 0.000476837158203125 idx 991
pyknn ib 0, in 4088, ip 15 dist 0.012462000362575054 idx 965
dist 0.012462615966796875 idx 1552
pyknn ib 0, in 4088, ip 16 dist 0.012462000362575054 idx 1552
dist 0.012462615966796875 idx 965
pyknn ib 0, in 4089, ip 0 dist 0.0 idx 3631
dist 0.0 idx 4089
pyknn ib 0, in 4089, ip 1 dist 0.0 idx 4089
dist 0.0 idx 3631
CHECK1: True
"""
# print(idx)
# speed test
# loader = DataLoader(
# dataset,
# batch_size=2,
# num_workers=8,
# pin_memory=True,
# shuffle=False
# )
# speed_test(0, loader)
# speed_test(1, loader)
# speed_test(2, loader)
# speed_test(3, loader)
|
[
"id115561@gmail.com"
] |
id115561@gmail.com
|
20ac97c759b78886592cf435a9a22569d62392bd
|
4d9422c56a606432eaa2d0a45c16ee0df390b639
|
/model/callbacks.py
|
df0cbd8ef1c382663eba693f5150bbe52ed9a5b4
|
[
"MIT"
] |
permissive
|
svpeeters/performance_prediction
|
f3f139d93d2527752bba41ab1c305e74925c1073
|
713e78441d59a5dafccaa43858a0478a29b43e2f
|
refs/heads/main
| 2023-04-11T05:20:43.247242
| 2021-04-22T19:43:44
| 2021-04-22T19:43:44
| 314,288,762
| 0
| 0
| null | 2020-11-20T14:50:38
| 2020-11-19T15:26:25
|
Python
|
UTF-8
|
Python
| false
| false
| 4,409
|
py
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
tf.get_logger().setLevel('ERROR')
class MonotonicCallback(tf.keras.callbacks.Callback):
"""
Callback that organizes the monotonic penalty computation for the training and th validation set. If the model
is in training mode the penalty term incorporates the training set and otherwise the validation set. This callback
is needed since TensorFlow 2 does not allow to have separated loss functions for training and validation set (set
on which the early stopping will be done). This callback essentially alters a TensorFlow variable which indicates
if the model is in training mode or not. Based on the Variable the monotonic loss function uses the training or the
validation set for the computation of the penalty term.
"""
def __init__(self, train_indicator: tf.Variable, last_penalty: tf.Variable):
super().__init__()
self.train_indicator = train_indicator
self.last_penalty = last_penalty
self.train_penalty = 0.0
self.val_penalty = 0.0
self.num_train_batches = 0
self.num_val_batches = 0
def on_train_begin(self, logs=None):
self.train_indicator.assign(1)
def on_train_batch_begin(self, batch, logs=None):
self.num_train_batches += 1
def on_train_batch_end(self, batch, logs=None):
self.train_penalty += self.last_penalty.numpy()
def on_test_begin(self, logs=None):
self.train_indicator.assign(0)
def on_test_batch_begin(self, batch, logs=None):
self.num_val_batches += 1
def on_test_end(self, logs=None):
self.train_indicator.assign(1)
def on_test_batch_end(self, batch, logs=None):
self.val_penalty += self.last_penalty.numpy()
def on_epoch_end(self, epoch, logs=None):
logs['mon_penalty'] = self.train_penalty / self.num_train_batches
logs['val_mon_penalty'] = self.val_penalty / self.num_val_batches
self.train_penalty = 0.0
self.val_penalty = 0.0
self.num_train_batches = 0
self.num_val_batches = 0
class MonotonicBatchCallback(tf.keras.callbacks.Callback):
"""
Callback that organizes the monotonic penalty computation for the training and th validation set. If the model
is in training mode the penalty term incorporates the training set and otherwise the validation set. This callback
is needed since TensorFlow 2 does not allow to have separated loss functions for training and validation set (set
on which the early stopping will be done). This callback essentially alters a TensorFlow variable which indicates
if the model is in training mode or not and which batch is the current batch used in training and validation.
Based on the Variables the monotonic loss function uses the corresponding batch from the training or the validation
set for the computation of the penalty term.
"""
def __init__(self, train_indicator: tf.Variable, last_penalty: tf.Variable, current_step):
super().__init__()
self.train_indicator = train_indicator
self.last_penalty = last_penalty
self.train_penalty = 0.0
self.current_step = current_step
self.val_penalty = 0.0
self.num_train_batches = 0
self.num_val_batches = 0
def on_train_begin(self, logs=None):
self.train_indicator.assign(1)
def on_train_batch_begin(self, batch, logs=None):
self.current_step.assign(self.num_train_batches)
self.num_train_batches += 1
def on_train_batch_end(self, batch, logs=None):
self.train_penalty += self.last_penalty.numpy()
def on_test_begin(self, logs=None):
self.train_indicator.assign(0)
def on_test_batch_begin(self, batch, logs=None):
self.current_step.assign(self.num_val_batches)
self.num_val_batches += 1
def on_test_end(self, logs=None):
self.train_indicator.assign(1)
def on_test_batch_end(self, batch, logs=None):
self.val_penalty += self.last_penalty.numpy()
def on_epoch_end(self, epoch, logs=None):
logs['mon_penalty'] = self.train_penalty / self.num_train_batches
logs['val_mon_penalty'] = self.val_penalty / self.num_val_batches
self.train_penalty = 0.0
self.val_penalty = 0.0
self.num_train_batches = 0
self.num_val_batches = 0
|
[
"speeters@mail.upb.de"
] |
speeters@mail.upb.de
|
0d788a1849f7407ed0c963a2f1ff2282a44211eb
|
a8599b7cb0f1deac1b8a62a35f3f1c95c6d0e7ba
|
/lookerapi/models/prefetch_access_filter_value.py
|
57b4a9ca5a801c241765ae52b290b512cbe56f01
|
[
"MIT"
] |
permissive
|
llooker/python_sdk
|
b82b1dbe30a734b1cc1e1bcafd3d2ac7ce9fa705
|
8364839b1de0519771f2f749e45b4e6cb1c75577
|
refs/heads/master
| 2020-03-30T08:40:42.562469
| 2020-01-16T00:08:31
| 2020-01-16T00:08:31
| 151,030,473
| 13
| 10
|
MIT
| 2020-01-16T00:08:32
| 2018-10-01T03:07:09
|
Python
|
UTF-8
|
Python
| false
| false
| 6,563
|
py
|
# coding: utf-8
"""
Looker API 3.0 Reference
### Authorization The Looker API uses Looker **API3** credentials for authorization and access control. Looker admins can create API3 credentials on Looker's **Admin/Users** page. Pass API3 credentials to the **/login** endpoint to obtain a temporary access_token. Include that access_token in the Authorization header of Looker API requests. For details, see [Looker API Authorization](https://looker.com/docs/r/api/authorization) ### Client SDKs The Looker API is a RESTful system that should be usable by any programming language capable of making HTTPS requests. Client SDKs for a variety of programming languages can be generated from the Looker API's Swagger JSON metadata to streamline use of the Looker API in your applications. A client SDK for Ruby is available as an example. For more information, see [Looker API Client SDKs](https://looker.com/docs/r/api/client_sdks) ### Try It Out! The 'api-docs' page served by the Looker instance includes 'Try It Out!' buttons for each API method. After logging in with API3 credentials, you can use the \"Try It Out!\" buttons to call the API directly from the documentation page to interactively explore API features and responses. ### Versioning Future releases of Looker will expand this API release-by-release to securely expose more and more of the core power of Looker to API client applications. API endpoints marked as \"beta\" may receive breaking changes without warning. Stable (non-beta) API endpoints should not receive breaking changes in future releases. For more information, see [Looker API Versioning](https://looker.com/docs/r/api/versioning)
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class PrefetchAccessFilterValue(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, model=None, field=None, value=None, can=None):
"""
PrefetchAccessFilterValue - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'model': 'str',
'field': 'str',
'value': 'str',
'can': 'dict(str, bool)'
}
self.attribute_map = {
'model': 'model',
'field': 'field',
'value': 'value',
'can': 'can'
}
self._model = model
self._field = field
self._value = value
self._can = can
@property
def model(self):
"""
Gets the model of this PrefetchAccessFilterValue.
Access filter model name.
:return: The model of this PrefetchAccessFilterValue.
:rtype: str
"""
return self._model
@model.setter
def model(self, model):
"""
Sets the model of this PrefetchAccessFilterValue.
Access filter model name.
:param model: The model of this PrefetchAccessFilterValue.
:type: str
"""
self._model = model
@property
def field(self):
"""
Gets the field of this PrefetchAccessFilterValue.
Access filter field name.
:return: The field of this PrefetchAccessFilterValue.
:rtype: str
"""
return self._field
@field.setter
def field(self, field):
"""
Sets the field of this PrefetchAccessFilterValue.
Access filter field name.
:param field: The field of this PrefetchAccessFilterValue.
:type: str
"""
self._field = field
@property
def value(self):
"""
Gets the value of this PrefetchAccessFilterValue.
Access filter value
:return: The value of this PrefetchAccessFilterValue.
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""
Sets the value of this PrefetchAccessFilterValue.
Access filter value
:param value: The value of this PrefetchAccessFilterValue.
:type: str
"""
self._value = value
@property
def can(self):
"""
Gets the can of this PrefetchAccessFilterValue.
Operations the current user is able to perform on this object
:return: The can of this PrefetchAccessFilterValue.
:rtype: dict(str, bool)
"""
return self._can
@can.setter
def can(self, can):
"""
Sets the can of this PrefetchAccessFilterValue.
Operations the current user is able to perform on this object
:param can: The can of this PrefetchAccessFilterValue.
:type: dict(str, bool)
"""
self._can = can
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, PrefetchAccessFilterValue):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"russell@looker.com"
] |
russell@looker.com
|
524b08c17f3b308214909ee6b437d2bfceb346b4
|
6c8d702b5956aab847f5783d96965a227e649eff
|
/find_coordinates_germany.py
|
d752a4b2b2c7684dabd77aa075ccb048cae9e6dd
|
[] |
no_license
|
ew-shopp/weather_import
|
2155d83476f7106348bae086ec97ad21cc5caa8a
|
c1880223714f1ce6d369feeed7ed7907eeb3639f
|
refs/heads/master
| 2021-04-30T01:44:05.197464
| 2019-09-10T13:50:44
| 2019-09-10T13:50:44
| 121,489,170
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,100
|
py
|
import pandas as pd
import geopy.distance
jot_cities = pd.read_json("sf/citiesInJotDataset_germany.json", typ="series")
geonames = pd.read_csv('sf/download.geonames.org-export-dump/DE.txt', sep='\t', header=None)
geoname_cities = []
for i in range(len(geonames)):
if "P" == geonames.loc[i,6]:
entry = {"name": geonames.loc[i,2].lower(), "lat": geonames.loc[i,4], "lon":geonames.loc[i,5], "geonameid":geonames.loc[i,0]}
geoname_cities.append(entry)
one_equal_match = 0
several_equal_match = 0
one_in_match = 0
several_in_match = 0
no_match = 0
f_out= open("match.csv","w+")
#f_out.write('City,Latitude,Longitude\n')
print len(geoname_cities)
print"Matches\tSearch_type\tjot_city\tgeo_name_city\tlat\tlon\tdist_from_first\tgeonameid"
for jot in range(len(jot_cities)):
jot_city = jot_cities.loc[jot]
found_match = 0
matches = []
for geo in range(len(geoname_cities)):
if jot_city == geoname_cities[geo]["name"]:
found_match += 1
match_coord = (geoname_cities[geo]["lat"], geoname_cities[geo]["lon"])
if found_match == 1:
first_coord = match_coord
first_line = "%s,%f,%f\n" % (jot_city, geoname_cities[geo]["lat"], geoname_cities[geo]["lon"])
distance_km = geopy.distance.vincenty(first_coord, match_coord).km
s = "EQ:\t%s\t%s\t%f\t%f\t%fkm\t%d" % (jot_city, geoname_cities[geo]["name"], geoname_cities[geo]["lat"], geoname_cities[geo]["lon"], distance_km, geoname_cities[geo]["geonameid"])
matches.append(s)
if found_match == 1:
one_equal_match += 1
f_out.write(first_line)
if found_match > 1: several_equal_match += 1
if found_match == 0:
for geo in range(len(geoname_cities)):
if jot_city in geoname_cities[geo]["name"]:
found_match += 1
match_coord = (geoname_cities[geo]["lat"], geoname_cities[geo]["lon"])
if found_match == 1:
first_coord = match_coord
first_line = "%s,%f,%f\n" % (jot_city, geoname_cities[geo]["lat"], geoname_cities[geo]["lon"])
distance_km = geopy.distance.vincenty(first_coord, match_coord).km
s = "IN:\t%s\t%s\t%f\t%f\t%fkm\t%d" % (jot_city, geoname_cities[geo]["name"], geoname_cities[geo]["lat"], geoname_cities[geo]["lon"], distance_km, geoname_cities[geo]["geonameid"])
matches.append(s)
if found_match == 1:
one_in_match += 1
f_out.write(first_line)
if found_match > 1: several_in_match += 1
if found_match == 0:
s = "No match: %s" % (jot_city)
matches.append(s)
no_match += 1
for s in matches:
print "(%d)\t%s" % (found_match, s)
print "one_equal_match %d" % one_equal_match
print "several_equal_match %d" % several_equal_match
print "one_in_match %d" % one_in_match
print "several_in_match %d" % several_in_match
print "no_match %d" % no_match
f_out.close()
print 'Ending program'
|
[
"steffen.dalgard@sintef.no"
] |
steffen.dalgard@sintef.no
|
bbe71ab825436d8c21b377fc1eec83eefb6a5520
|
dda122e6ba6d3b591dd7c0e808a4c42e8a93d714
|
/data_structures/stack/test_stack.py
|
0a9367dda1e9bd14aaa55bd37da9e38df6adfe73
|
[] |
no_license
|
tobyatgithub/data_structure_and_algorithms
|
4c8e1d8a756ba8950446ffd00568a237204f5684
|
bd2f46bf9bffb846dc1262093619b7da395a2203
|
refs/heads/master
| 2020-04-08T13:42:56.661842
| 2019-02-13T08:38:16
| 2019-02-13T08:38:16
| 159,403,405
| 0
| 0
| null | 2019-02-13T08:37:51
| 2018-11-27T21:44:50
|
Python
|
UTF-8
|
Python
| false
| false
| 1,516
|
py
|
from .stack import Stack
from node import Node
import pytest
@pytest.fixture()
def empty_stack():
return Stack()
@pytest.fixture()
def single_stack():
tmp = Stack()
tmp.push(2)
return tmp
@pytest.fixture()
def mid_stack():
tmp = Stack()
tmp.push(2)
tmp.push("4")
tmp.push(['6'])
return tmp
@pytest.fixture()
def long_stack():
return Stack([5,4,3,2,1])
def test_empty_stack(empty_stack):
# because empty stack is empty, pytest test whether assert True
assert not empty_stack
def test_input_rasie():
# tmp = Stack(2)
with pytest.raises(TypeError):
Stack(2)
# assert TypeError("Stack only take <None> or a <list> as input init value")
def test_singly_stack(single_stack):
assert len(single_stack) == 1
single_stack.push(22)
assert len(single_stack) == 2
def test_singly_stack2(single_stack):
assert single_stack.peek().val == 2
assert len(single_stack) == 1
def test_stack_length(empty_stack):
assert len(empty_stack) == 0
def test_iterable_input_stack(long_stack):
assert long_stack
assert long_stack.peek().val == 1
def test_stack_pop_empty(empty_stack):
assert empty_stack.pop() == None
def test_stack_pop1(single_stack):
# before
assert len(single_stack) == 1
assert single_stack.pop().val == 2
#after
assert len(single_stack) == 0
def test_stack_pop3(mid_stack):
assert mid_stack.pop().val ==['6']
def test_stack_pop2(long_stack):
assert long_stack.pop().val == 1
|
[
"toby.fangyuan@gmail.com"
] |
toby.fangyuan@gmail.com
|
19cb350fe6ef91ef6239c5d4681aab2a4ccd5ce6
|
397e4863f1d123c9a7d5e0dac74d8747c3408b83
|
/netmikouse.py
|
3bab56ba7739b8337568ca73d5c97d9d6fb6fe16
|
[] |
no_license
|
javincraig/Homelab
|
91686d858dff55c6f3eaf8a02d121e3dc8179af4
|
c0513378c20bd3928d875b3006dff16421d0696d
|
refs/heads/master
| 2020-04-27T08:16:56.346167
| 2019-08-18T16:37:54
| 2019-08-18T16:37:54
| 174,164,649
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,467
|
py
|
from netmiko import ConnectHandler
#---------------Gather information---------------------
ip = input('What is the management IP?: ')
username = input('What is the username? [cisco]: ') or 'cisco'
password = input('What is the password? [cisco]: ') or 'cisco'
cisco_switch = {'device_type': 'cisco_ios',
'ip': ip,
'username': username,
'password': password}
connection = ConnectHandler(**cisco_switch)
interface_output = connection.send_command('show ip interface brief')
#print(interface_output)
for line in interface_output:
if 'up' in line:
print(line)
print('--------adding AAA commands to the device ------')
commands = ['aaa authentication login default group radius local-case',
'aaa authentication login vty group radius local-case',
'aaa authorization exec default group radius local if-authenticated',
'aaa accounting system default start-stop group radius',
'ip radius source-interface Vlan1301',
'radius-server host 192.168.1.10 auth-port 1645 acct-port 1646',
'radius-server key cisco',
'line vty 0 4',
'login authentication vty']
configoutput = connection.config_mode()
if 'Invalid input' in configoutput:
print(configoutput)
configoutput = connection.send_config_set(commands)
if 'Invalid input' in configoutput:
print(configoutput)
print('------------------done------------------')
|
[
"noreply@github.com"
] |
javincraig.noreply@github.com
|
6a7d74b421446916cc56287fe801c65bbd719a47
|
fce9aa2870a749dd9eb86c4123360467f524fb7b
|
/Team-Notes/Python-Competitive-Programming-Team-Notes/Number Theory/is_prime_number.py
|
d7547f3caf17e834eb73568817631687fb352b02
|
[] |
no_license
|
KOYOEN/Coding-Test
|
82ae0f5cfe6fb02b9b8ab4322e94c554fb64cda4
|
6080cfae3d7c317b9ae0c5d097046431b857954f
|
refs/heads/master
| 2022-12-28T19:28:07.070033
| 2020-10-08T04:28:27
| 2020-10-08T04:28:27
| 298,543,539
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 867
|
py
|
import math
# 소수 판별 함수(에라토스테네스의 체)
def is_prime_number(n):
# 2부터 n까지의 모든 수에 대하여 소수 판별
array = [True for i in range(n+1)] # 처음엔 모든 수가 소수(True)인 것으로 초기화(0과 1은 제외)
# 에라토스테네스의 체
for i in range(2, int(math.sqrt(n)) + 1): #2부터 n의 제곱근까지의 모든 수를 확인하며
if array[i] == True: # i가 소수인 경우(남은 수인 경우)
# i를 제외한 i의 모든 배수를 지우기
j = 2
while i * j <= n:
array[i * j] = False
j += 1
return [ i for i in range(2, n+1) if array[i] ]
# N이 1,000,000 이내로 주어지는 경우 활용할 것 => 이론상 400만번 정도 연산이고 메모리도 충분함
print(is_prime_number(26))
|
[
"happy2473@me.com"
] |
happy2473@me.com
|
3e351c8c1dc090583cc67990266dda903cd032c1
|
bf2608e054216fdacfd81ace22080b85b373d08d
|
/lib/python/usdNodeGraph/ui/graph/pipe.py
|
4f3e46bddb8fee82a2787221ffb305947313909c
|
[] |
no_license
|
yoann01/usdNodeGraph
|
89d31dcf6d25ad9da557ee035483ac6e4972361f
|
30909589bc546d24e55b8eeed4a61b17bef65d97
|
refs/heads/master
| 2020-09-08T09:35:20.244013
| 2019-09-26T15:42:23
| 2019-09-26T15:42:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,724
|
py
|
# -*- coding: utf-8 -*-
# __author__ = 'XingHuan'
# 8/29/2018
from usdNodeGraph.module.sqt import *
import math
PIPE_NORMAL_COLOR = QColor(130, 130, 130)
PIPE_HIGHTLIGHT_COLOR = QColor(250, 250, 100)
class Pipe(QGraphicsPathItem):
"""A connection between two versions"""
def __init__(self, orientation=0, **kwargs):
super(Pipe, self).__init__(**kwargs)
self.setFlag(QGraphicsItem.ItemIsSelectable, False)
self.setAcceptHoverEvents(True)
self.lineColor = PIPE_NORMAL_COLOR
self.thickness = 1.5
self.pointAtLength = 7
self.orientation = orientation
self.source = None
self.target = None
self.isFloat = False
self.foundPort = None
self.curv1 = 0.5
self.curv3 = 0.5
self.curv2 = 0.0
self.curv4 = 1.0
def setLineColor(self, highlight=False, color=None):
if color is not None:
self.lineColor = color
return
if highlight:
self.lineColor = PIPE_HIGHTLIGHT_COLOR
else:
self.lineColor = PIPE_NORMAL_COLOR
def updatePath(self, sourcePos=None, targetPos=None):
orientation = self.orientation
if self.source:
sourcePos = self.source.mapToScene(self.source.boundingRect().center())
if self.target:
targetPos = self.target.mapToScene(self.target.boundingRect().center())
path = QPainterPath()
path.moveTo(sourcePos)
dx = targetPos.x() - sourcePos.x()
dy = targetPos.y() - sourcePos.y()
if orientation in [1, 3]:
if (dx < 0 and orientation == 1) or (dx >= 0 and orientation == 3):
self.curv1 = -0.5
self.curv3 = 1.5
self.curv2 = 0.2
self.curv4 = 0.8
else:
self.curv1 = 0.5
self.curv3 = 0.5
self.curv2 = 0.0
self.curv4 = 1.0
elif orientation in [0, 2]:
if (dy < 0 and orientation == 0) or (dy >= 0 and orientation == 2):
self.curv1 = 0.2
self.curv3 = 0.8
self.curv2 = -0.5
self.curv4 = 1.5
else:
self.curv1 = 0.0
self.curv3 = 1.0
self.curv2 = 0.5
self.curv4 = 0.5
ctrl1 = QPointF(
sourcePos.x() + dx * self.curv1,
sourcePos.y() + dy * self.curv2)
ctrl2 = QPointF(
sourcePos.x() + dx * self.curv3,
sourcePos.y() + dy * self.curv4)
path.cubicTo(ctrl1, ctrl2, targetPos)
self.setPath(path)
def breakConnection(self):
if self.source is not None:
self.source.removePipe(self)
if self.target is not None:
self.target.removePipe(self)
def paint(self, painter, option, widget):
zoom = self.scene().views()[0].currentZoom
thickness = self.thickness / math.sqrt(zoom)
pointAtLength = self.pointAtLength / math.sqrt(zoom)
if self.isSelected():
pen = QPen(PIPE_HIGHTLIGHT_COLOR, thickness)
else:
pen = QPen(self.lineColor, thickness)
self.setPen(pen)
self.setZValue(-1)
super(Pipe, self).paint(painter, option, widget)
center_pos = self.path().pointAtPercent(0.5)
center_angle = self.path().angleAtPercent(0.5)
painter.setPen(pen)
painter.translate(center_pos)
painter.rotate(180 - (center_angle + 30))
painter.drawLine(QPointF(0, 0), QPointF(pointAtLength, 0))
painter.rotate(60)
painter.drawLine(QPointF(0, 0), QPointF(pointAtLength, 0))
def _getDistance(self, currentPos):
sourcePos = self.path().pointAtPercent(0)
targetPos = self.path().pointAtPercent(1)
dis1 = math.hypot(sourcePos.x() - currentPos.x(), sourcePos.y() - currentPos.y())
dis2 = math.hypot(targetPos.x() - currentPos.x(), targetPos.y() - currentPos.y())
minDis = 30
if dis1 < minDis:
return True, self.source
if dis2 < minDis:
return True, self.target
return False, None
def hoverEnterEvent(self, event):
currentPos = event.pos()
aroundPort, port = self._getDistance(currentPos)
if aroundPort:
self.setLineColor(True)
self.update()
def hoverLeaveEvent(self, event):
self.setLineColor(False)
self.update()
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
currentPos = event.pos()
self.startPos = currentPos
aroundPort, port = self._getDistance(currentPos)
if aroundPort:
port.removePipe(self)
if port == self.source:
self.source = None
if port == self.target:
self.target = None
self.isFloat = True
else:
super(Pipe, self).mousePressEvent(event)
def mouseMoveEvent(self, event):
super(Pipe, self).mouseMoveEvent(event)
from usdNodeGraph.ui.graph.node.port import Port
currentPos = event.pos()
if self.isFloat:
if self.source is not None:
self.updatePath(targetPos=currentPos)
elif self.target is not None:
self.updatePath(sourcePos=currentPos)
findPort = self.scene().itemAt(currentPos, QTransform())
if findPort is not None and isinstance(findPort, Port):
self.foundPort = findPort
self.foundPort.setHighlight(True)
else:
if self.foundPort is not None:
self.foundPort.setHighlight(False)
self.foundPort = None
def mouseReleaseEvent(self, event):
super(Pipe, self).mouseReleaseEvent(event)
if self.isFloat:
from .node.port import Port, InputPort, OutputPort
scenePos = event.pos()
findPort = self.scene().itemAt(scenePos, QTransform())
if findPort is not None and isinstance(findPort, Port):
if (isinstance(findPort, InputPort) and self.source is not None):
self.source.connectTo(findPort)
elif (isinstance(findPort, OutputPort) and self.target is not None):
self.target.connectTo(findPort)
self.breakConnection()
self.scene().removeItem(self)
self.isFloat = False
if self.foundPort is not None:
self.foundPort.setHighlight(False)
self.foundPort = None
|
[
"xinghuan951230@gmail.com"
] |
xinghuan951230@gmail.com
|
f8072d166c27ede45c6eea1cc9f07f9f1b317f63
|
3ec8a082b67f01ec09e0fc70d13551dc8785c935
|
/src/Network.py
|
edecac642235a0f5120c0ec227e66adef4baa76d
|
[] |
no_license
|
SiWALLE/ML-Exercises
|
efbddd0794c67bd429c5620a0c362c1b9cbc6c89
|
ea316b56ce06000882eefeb6c12fd69bbca3b531
|
refs/heads/master
| 2021-01-10T15:45:13.825332
| 2015-11-23T11:56:18
| 2015-11-23T11:56:18
| 43,604,271
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,106
|
py
|
import numpy as np
import random
class Network(object):
def __init__(self,sizes):
self.num_layers = len(sizes)
self.sizes = sizes
self.biases = [np.random.randn(y,1) for y in sizes[1:]]
self.weights = [np.random.randn(y,x)
for x,y in zip(sizes[:-1],sizes[1:])]
def feedforward(self,a):
""" return the ouput of a network if "a" is input"""
for b,w in zip(self.biases,self.weights):
a = sigmoid(np.dot(w,a)+b)
return a
def SGD(self,training_data,epoches,mini_batch_size,eta,
test_data=None):
if test_data:n_test = len(test_data)
n = len(training_data)
for j in xrange(epoches):
random.shuffle(training_data)
mini_batches = [
training_data[k:k+mini_batch_size]
for k in xrange(0,n,mini_batch_size)]
for mini_batch in mini_batches:
self.update_mini_batch(mini_batch,eta)
if(test_data):
print "Epoch {0}:{1}/{2}".format(
j, self.evaluate(test_data),n_test)
else:
print "Epoch {0} complete".format(j)
def update_mini_batch(self,mini_batch,eta):
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.wights]
for x,y in mini_batch:
delta_nabla_b,delta_nabla_w = self.backprop(x,y)
nabla_b = [nb+dnb for nb,dnb in zip(nabla_b,delta_nabla_b)]
nabla_w = [nw+dnw for nw,dnw in zip(nabla_w,delta_nabla_w)]
self.weights = [w-(eta/len(mini_batch))*nw for w,nw in zip(self.weights,nabla_w)]
self.biases = [b-(eta/len(mini_batch))*nb for b,nb in zip(self.biases,self.nabla_b)]
def backprop(self,x,y):
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
activation = x
activations = [x]
zs = []
for b,w in zip(self.biases,self.weights):
z = np.dot(w,activation)+b
zs.append(z)
activation = sigmoid(z)
activations.append(activation)
delta = self.cost_derivative(activation[-1],y)*\
sigmoid_prime(zs[-1])
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta,activations[-2].transpose())
for l in xrange(2,self.num_layers):
z = zs[-1]
sp = sigmoid_prime(z)
delta = np.dot(self.weights[-l+1].transpose(),delta)*sp
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta,activation[-l-1].transpose())
return (nabla_b,nabla_w)
def evaluate(self,test_data):
test_results = [(np.argmax(self.feedforward(x)),y) for x,y in test_data]
return sum(int(x==y) for (x,y) in test_results)
def cost_derivative(self,output_activations,y):
return (output_activations-y)
def sigmoid(z):
return 1.0/(1.0+np.exp(-z))
def sigmoid_prime(z):
temp = sigmoid(z)
return temp*(1-temp)
|
[
"liangsibiao@live.com"
] |
liangsibiao@live.com
|
1d0f1708b2047d74b522e65d8dec492d48924d31
|
b01fa826686cd72a1d132e9c719c3341a104e837
|
/backend/vacancies/urls.py
|
9a41152607890096d8b98d94c6b3af765254d9e6
|
[] |
no_license
|
V-ampire/kmsjob
|
d52a265eb79c778098294778d8d1730e05ef1b05
|
6c73f296cb74090c165dfee102ea99d46f299038
|
refs/heads/master
| 2023-08-10T19:57:42.693168
| 2021-09-29T11:24:17
| 2021-09-29T11:24:17
| 372,736,939
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
py
|
from django.urls import path
from . import views
app_name = 'vacancies'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('by_date/', views.ByDateView.as_view(), name='by_date'),
path('search/', views.SearchView.as_view(), name='search'),
path('detail/<int:pk>/', views.VacancyDetailView.as_view(), name='vacancy_detail'),
]
|
[
"webjob010@gmail.com"
] |
webjob010@gmail.com
|
8d9ed80c405b51c9ccd69aa03cda41513299e041
|
2bca0f3f0eb3d0560cf5953a1bcc6bfb4f1e8262
|
/python_test.py
|
7c16d4fd6d0e9d6f991ed8f53af9d22c21b7f837
|
[] |
no_license
|
apdjustino/UrbanSimDataExplorer
|
2254f1bd437e92695948409ebfd518ba2d314768
|
f5fdd4848d2efc5350ef19c1cc428808fb6555fa
|
refs/heads/master
| 2021-01-24T20:40:20.156171
| 2018-01-10T15:56:37
| 2018-01-10T15:56:37
| 51,838,158
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 78
|
py
|
import datetime
print "Current time is: {0}".format(datetime.datetime.now())
|
[
"justinmartinez14@gmail.com"
] |
justinmartinez14@gmail.com
|
ce8c9ecc1ead17d79aae5400b88c34aa2d8b7af9
|
9a78e2a176f93fce034b16d67dd47ed80e875dfa
|
/diploma/settings.py
|
d5b620694ca73d45ea871d164e8ca7c2d1fff431
|
[] |
no_license
|
sinchuk140995/courseon
|
f431a4071321b7a7e7d9315ad508e0feddd1204a
|
0e955db4f35cc6cdbde6b919095dd5885e45094c
|
refs/heads/master
| 2021-06-18T11:32:26.261228
| 2017-06-10T12:26:24
| 2017-06-10T12:26:24
| 94,334,435
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,071
|
py
|
"""
Django settings for diploma project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import dj_database_url
from decouple import config
import cloudinary
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third party stuff
'pagedown',
'crispy_forms',
'cloudinary',
# local app
'accounts',
'cabinets',
'comments',
'moderator',
'blog',
]
# Cloudinary
cloudinary.config(
cloud_name=config('cloud_name'),
api_key=config('api_key'),
api_secret=config('api_secret'),
)
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'diploma.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'diploma.wsgi.application'
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Kiev'
USE_I18N = True
USE_L10N = True
USE_TZ = True
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/login/'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = config('email')
EMAIL_HOST_PASSWORD = config('email_password')
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
SERVER_EMAIL = EMAIL_HOST_USER
|
[
"sinchuk140995@gmail.com"
] |
sinchuk140995@gmail.com
|
6d448fccf0934c608839d970b48032d28903b3bb
|
a6b543cd11b2d5e66a87c7240b80050b34616aef
|
/code-implementation/dstree_random_forest.py
|
04e383c0f82a77d7e84864129767f516606a9d09
|
[
"MIT"
] |
permissive
|
GamjaPower/kaggle
|
e7814de293d5eee58653eccf1700fd6033548726
|
8ebedc73769c3c04d9c80f9e8877a10164c9c5e8
|
refs/heads/master
| 2020-03-30T05:56:26.882863
| 2018-09-29T09:13:57
| 2018-09-29T09:13:57
| 150,828,879
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,176
|
py
|
# -*- coding: utf-8 -*-
'''
Created on 2018. 9. 24.
@author: jason96
Apply Gini Impurity
'''
import pandas as pd
from graphviz import Digraph
import os
import operator
import numpy as np
raw_data = {
'name': ["Kang", "Kim", "Choi", "Park", "Yoon"],
'짱절미': [True, False, False, False, False],
'셀스타그램': [False, False, True, False, False],
'like': [True, False, True, True, False]
}
pd_data = pd.DataFrame(raw_data)
pd_data = pd_data.set_index("name")
label_name = "like"
feature_names = pd_data.columns.difference([label_name])
def display_node(dot, key, node):
if node["leaf"] is True:
proba = node['proba']
proba = round(proba, 4)
proba = str(proba)
dot.node(key, proba)
else:
desc = node['desc']
dot.node(key, desc)
if "left" in node:
left_key = key + "L"
display_node(dot, left_key, node['left'])
dot.edge(key, left_key)
if "right" in node:
right_key = key + "R"
display_node(dot, right_key, node['right'])
dot.edge(key, right_key)
dot.render('graphviz-files/dstree.gv', view=True)
def display_tree(tree):
dot = Digraph(comment='Decision Tree')
display_node(dot, "Root", tree)
def predict(data, node):
if node['leaf']:
proba = node["proba"]
result = dict(zip(data.index, len(data) * [proba]))
else:
rule = node['rule']
left_data = data[rule(data)]
left_result = predict(left_data, node['left'])
right_data = data[~rule(data)]
right_result = predict(right_data, node['right'])
return {**left_result, **right_result}
return result
def binary_rule(data, feature_name, value):
return data[feature_name] == value
def make_rule(method, feature_name, value):
def call_condition(data):
return method(data, feature_name, value)
return call_condition
def make_rules(feature_names):
rules = {}
feature_names = list(feature_names)
for feature_name in feature_names:
rules[feature_name] = make_rule(binary_rule, feature_name, True)
return rules
def get_best_rule(data, rules):
gini_indexes = {}
for feature_name, rule in rules.items():
true_data = data[rule(data)]
true_proba = true_data[label_name].mean()
false_proba = 1 - true_proba
gini_index = true_proba*(1-false_proba) - false_proba*(1-true_proba)
gini_indexes[feature_name] = gini_index
sorted_x = sorted(gini_indexes.items(), key=operator.itemgetter(1))
for k, v in sorted_x: # @UnusedVariable
return k, rules[k]
def make_node(data, rules):
if len(rules) > 0:
feature_name, rule = get_best_rule(data, rules)
left_data = data[rule(data)]
right_data = data[~rule(data)]
if len(left_data) > 0 and len(right_data) > 0:
del rules[feature_name]
node = {'leaf': False, 'desc': feature_name, 'rule': rule}
node['left'] = make_tree(left_data, rules.copy())
node['right'] = make_tree(right_data, rules.copy())
return node
proba = data[label_name].mean()
node = {'leaf': True, 'proba': proba}
return node
def make_tree(data, feature_names):
rules = make_rules(feature_names)
return make_node(data, rules)
def display_predict(predict):
for k, v in predict.items():
print(k, v)
def bootstrap(data, feature_names, label_name):
feature_data = data[feature_names]
num_rows, num_cols = feature_data.shape
index = np.random.choice(feature_data.index, size=num_rows, replace=True)
if max_feature == None: # @IgnorePep8
num_cols = np.sqrt(num_cols)
else:
num_cols = num_cols * max_feature
num_cols = int(num_cols)
columns = np.random.choice(feature_data.columns, size=num_cols,
replace=False)
# If index and columns are specified,
# a new table is created based on the values.
result = feature_data.loc[index, columns]
result[label_name] = data[label_name]
return result
def make_forest(data):
forest = []
for _ in range(n_estimators):
bootstrapped_data = bootstrap(data, feature_names, label_name)
bs_feature_names = bootstrapped_data.columns.difference([label_name])
tree = make_tree(bootstrapped_data, bs_feature_names)
forest.append(tree)
return forest
def predict_forest(data, forest):
prediction_total = []
for tree in forest:
prediction = predict(data, tree)
prediction = pd.Series(prediction)
prediction_total.append(prediction)
prediction_total = pd.concat(prediction_total, axis=1, sort=False)
prediction_total = prediction_total.mean(axis=1)
return prediction_total
if __name__ == '__main__':
max_feature = None
n_estimators = 10
os.environ["PATH"] += os.pathsep + '/usr/local/bin'
forest = make_forest(pd_data)
display_predict(predict_forest(pd_data, forest))
# tree = make_tree(pd_data, rules)
# display_tree(tree)
# display_predict(predict(pd_data, tree))
|
[
"jason96@localhost"
] |
jason96@localhost
|
7dbdef860fb395b2ba8fcbbcdc96fb4cfb5f506f
|
c742b2449118a93dbd3dc02bd4c2c0f64aada70d
|
/config/urls.py
|
28f590cbf43e5437ca9e146844c888203f13795a
|
[] |
no_license
|
XerxesDGreat/lego-django
|
28cf927a5d887a60df0744bb8f545a938b99581c
|
413bd783e4591b1659b369f539214d546fcfefbd
|
refs/heads/master
| 2022-12-09T19:22:02.409304
| 2018-07-28T14:50:29
| 2018-07-28T14:50:29
| 134,971,037
| 0
| 0
| null | 2022-12-08T02:17:43
| 2018-05-26T15:03:52
|
Python
|
UTF-8
|
Python
| false
| false
| 798
|
py
|
"""lego URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('project.api.urls')),
]
|
[
"josh@life360.com"
] |
josh@life360.com
|
f28354d557aa4187afb4bed39f29e95ef56adc1a
|
bbe62b72f111dbff6d4640b4466e6fc9f750ee06
|
/Ryker.py
|
c32dae111b1de922bd51eb0c94b15341d09f62a3
|
[] |
no_license
|
Get2dacode/python_projects
|
210f54e8f9af394d82d4423c474e4d2e46d28270
|
e3e54d0d403486eabefbda4ec3e7eb997fd4e984
|
refs/heads/main
| 2023-09-01T02:14:43.762971
| 2021-10-04T14:38:36
| 2021-10-04T14:38:36
| 411,525,952
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,467
|
py
|
from gtts import gTTS
import speech_recognition as sr
import os
import re
import webbrowser
import smtplib
import json
import datetime
import wikipedia
def talkToMe(audio):
"speaks audio passed as argument"
print(audio)
for line in audio.splitlines():
os.system("say " + audio)
# use the system's inbuilt say command instead of mpg123
# text_to_speech = gTTS(text=audio, lang='en')
# text_to_speech.save('audio.mp3')
# os.system('mpg123 audio.mp3')
def ok():
talkToMe('ok here we go')
def myCommand():
"listens for commands"
r = sr.Recognizer()
with sr.Microphone() as source:
print('Ready...')
r.pause_threshold = 1
r.adjust_for_ambient_noise(source, duration=1)
audio = r.listen(source)
try:
command = r.recognize_google(audio).lower()
print('You said: ' + command + '\n')
# loop back to continue to listen for commands if unrecognizable speech is received
except sr.UnknownValueError:
print('Your last command couldn\'t be heard')
command = myCommand()
return command
def time():
hour = int(datetime.datetime.now().hour)
if hour >= 0 and hour < 12:
talkToMe("Good Morning Sir !")
elif hour >= 12 and hour < 18:
talkToMe("Good Afternoon Sir !")
else:
talkToMe("Good Evening Sir !")
def nameMe():
talkToMe("I am your Assistant")
assname = "Ryker"
talkToMe(assname)
def assistant(command):
"if statements for executing commands"
if 'song' in command:
reg_ex = re.search('what is (.*)', command)
url = 'https://www.youtube.com/watch?v=dQw4w9WgXcQ'
if reg_ex:
subreddit = reg_ex.group(1)
url = url + 'r/' + subreddit
ok()
webbrowser.open(url)
print('Done!')
elif 'open website' in command:
reg_ex = re.search('open website (.+)', command)
if reg_ex:
domain = reg_ex.group(1)
url = 'https://www.' + domain + '.com'
ok()
webbrowser.open(url)
print('Done!')
else:
pass
elif "what's up" in command:
talkToMe('what it do baby')
talkToMe('what can i help you with ')
elif 'search' in command:
talkToMe('Searching Wikipedia...')
results = wikipedia.summary(command, sentences=1)
talkToMe("According to Wikipedia")
print(results)
talkToMe(results)
elif 'joke' in command:
talkToMe('ok')
res = requests.get(
'https://icanhazdadjoke.com/',
headers={"Accept": "application/json"}
)
if res.status_code == requests.codes.ok:
talkToMe(str(res.json()['joke']))
else:
talkToMe('oops!I ran out of jokes')
elif "where is" in command:
reg_ex = re.search('where is (.*)', command)
location = reg_ex.group(1)
ok()
webbrowser.open(
'https://www.google.com/maps/place/' + location + "")
elif 'the time' in command:
time = datetime.datetime.now().strftime("%I,%M,%p")
talkToMe("the time is")
talkToMe(time)
elif 'close up' in command:
talkToMe("Ok make sure to shut down your computer")
exit()
time()
nameMe()
talkToMe('I am ready for your command')
# loop to continue executing multiple commands
while True:
assistant(myCommand())
|
[
"noreply@github.com"
] |
Get2dacode.noreply@github.com
|
5d72ac65b8e839c4bc6c03dba165452e798e2782
|
033f74cb79f13ac11fdd350988ade17d9a574512
|
/pages/urls.py
|
b548e1bb25545969789f2e0e907aecaafd97c33b
|
[] |
no_license
|
LevitKanner/LK
|
0c8f9371c02cf9b505b4c410133fff9351dae87d
|
06299b4cf9254b02c248a402463151eda6e987af
|
refs/heads/master
| 2023-01-30T18:54:24.429389
| 2020-12-15T16:57:19
| 2020-12-15T16:57:19
| 320,998,763
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
from django.urls import path
from . import views
app_name = 'pages'
urlpatterns = [
path('contact', views.contact, name='contact'),
path('<str:page_name>', views.index, name='index'),
path('', views.index, {'page_name': ''}, name='home')
]
|
[
"lkanner21@gmail.com"
] |
lkanner21@gmail.com
|
ff232665d973cfa366f41832e709d9e67bf27184
|
8666bba3cef8dabf3fbe129eb7f32c46f4715c35
|
/src/db/MainLandMarket/GetStockHistory_wind.py
|
a670f2053c031b7895fce66c841936e097e60994
|
[] |
no_license
|
Wangxian111/nsnqt
|
0c357130f9923097b03eb43eaf1317d044027fc7
|
b1536d857be526501578a7c7da6137e0a51c9cf0
|
refs/heads/master
| 2020-05-23T08:18:51.809583
| 2016-10-07T06:55:30
| 2016-10-07T06:55:30
| 70,222,925
| 0
| 0
| null | 2016-10-07T07:00:58
| 2016-10-07T07:00:57
| null |
UTF-8
|
Python
| false
| false
| 1,391
|
py
|
from WindPy import *
from datetime import *
par_list = "pre_close","open","high","low","close","volume","amt","dealnum"
#full value set
#w.wsd("600518.SH", "pre_close,open,high,low,close,volume,amt,dealnum,chg,pct_chg,swing,vwap,adjfactor,close2,turn,free_turn,oi,oi_chg,trade_status,susp_reason,mf_amt,mf_vol,mf_amt_ratio,mf_vol_ratio,mf_amt_close,mf_amt_open,ev,pe_ttm,val_pe_deducted_ttm,pb_lf,ps_ttm", "2009-03-19", "2016-09-24", "adjDate=0")
#only update key values for demo only
#win_data=w.wsd(stock_name, "pre_close,open,high,low,close,volume,amt,dealnum", "2009-03-19", datetime.today(), "adjDate=0")
def get_par_string():
par_string = par_list[0]
if len(par_list)<2:
return par_string
for i in range(len(par_list)-1):
par_string = par_string+","+ par_list[i+1]
return par_string
def get_history_data(stock_code,start_day,end_day=datetime.today()):
print ("start to query data from wind")
par_string = get_par_string()
print(par_string)
wind_data = w.wsd(stock_code, par_string, start_day, end_day, "adjDate=0")
print("Data query finished, %d record be found" %len(wind_data.Data[0]))
return wind_data
def connect_to_wind():
w.start()
print("wind connection is:", end="")
print(w.isconnected())
def disconnect_to_wind():
#w.disconnect()
print("wind connection has been closed")
|
[
"harry0519@gmail.com"
] |
harry0519@gmail.com
|
cd6ec6391a155ab9683f22c18d3cb4dcde470e84
|
aab2ad13d6023544fbe546d8ca90bb28259a6fc1
|
/image_rc.py
|
8f670933f38a45cfdfee90bb8a9b0eebbe0b71de
|
[] |
no_license
|
HanzoZY/DBCD
|
c141384ac85226f5dfbe16cdbe478251d5bfc3ee
|
c3818bb6738f64d3a6a1ccdbc40124b05307a200
|
refs/heads/master
| 2020-03-28T20:55:59.939875
| 2018-09-17T11:32:09
| 2018-09-17T11:36:53
| 149,114,828
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 198
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'image_rc.qrc'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
|
[
"zhangyuhan@zyhdeMacBook-Pro.local"
] |
zhangyuhan@zyhdeMacBook-Pro.local
|
3d456ff2fdf7f69d9519317f0a9a47b44322d273
|
f4b75e06e456dbd065dc57f07d55a2f5ec4ad688
|
/openstates/data/migrations/0012_person_current_role.py
|
d9e866c1e3a313f007b32336097bd875c571590a
|
[
"MIT"
] |
permissive
|
openstates/openstates-core
|
19bf927a2e72c8808a5601f4454846acaf32218a
|
3055632ea7ddab6432cc009989ffb437aed6e530
|
refs/heads/main
| 2023-09-05T10:30:58.866474
| 2023-09-01T15:43:59
| 2023-09-01T15:43:59
| 251,511,904
| 19
| 27
|
MIT
| 2023-09-06T19:30:03
| 2020-03-31T05:47:28
|
Python
|
UTF-8
|
Python
| false
| false
| 477
|
py
|
# Generated by Django 3.0.5 on 2020-08-04 15:24
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("data", "0011_auto_20200804_1108")]
operations = [
migrations.AddField(
model_name="person",
name="current_role",
field=django.contrib.postgres.fields.jsonb.JSONField(
default=None, null=True
),
)
]
|
[
"dev@jamesturk.net"
] |
dev@jamesturk.net
|
a3a06aca09ad8ed5aa53a1d92bd793ea85813cff
|
fb8bc02a724a3cb907920f4bff8d26b27b42683f
|
/Day 4/My_Package/setup.py
|
59d31a59cfd3aec65a8fd6b7cc3576aa05384883
|
[] |
no_license
|
timelyanswer/python-hafb_KHK
|
24c507f71d7381194e573ece6b7892de7921fe86
|
5f0e9d306eea28aa0886071c5e1bead44ede8025
|
refs/heads/main
| 2023-07-08T02:16:39.793601
| 2021-08-12T22:12:09
| 2021-08-12T22:12:09
| 394,317,069
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 664
|
py
|
#!/usr/bin/env python3
"""
Author : t11 <me@wsu.com>
Date : 8/12/2021
Purpose:
"""
from distutils.core import setup
setup(
name='class_decorator',
version='0.1',
py_modules=['my_class_decorator'], # need my_xxx so it does not confuse with the other
#Metadata
author='Waldo Weber',
author_email='waldo@weber.edu',
description='A module that shows how to use Class Decorators',
license='Public domain',
keywords='decorators',
)
# --------------------------------------------------
def main():
"""Make your noise here"""
# --------------------------------------------------
if __name__ == '__main__':
main()
|
[
"kimjeong@hotmail.com"
] |
kimjeong@hotmail.com
|
67feadc4b9b3d489f7627d697e4e124f5a290f02
|
2869a088aa01d8ca6349f855d882630ff1431a7b
|
/PythonBasics/Chapter9/chapter9.py
|
6af27ec025a3bd33981637074e7cb29311d1db5f
|
[] |
no_license
|
hectordelahoz/PythonExercises
|
56b2081da1db85d916aafc7b046b94245892bf38
|
d21a704a76a7e7f2ecac6a774ef74f3595cc2d2c
|
refs/heads/master
| 2022-09-24T08:51:12.402383
| 2022-08-30T00:56:54
| 2022-08-30T00:56:54
| 235,253,926
| 0
| 0
| null | 2022-08-30T00:56:55
| 2020-01-21T04:13:12
|
Python
|
UTF-8
|
Python
| false
| false
| 2,188
|
py
|
class Person:
def __init__(self, **kwargs):
self.__myName = kwargs['name'] if 'name' in kwargs else 'John'
self.__myAge = kwargs['age'] if 'age' in kwargs else '21'
self.__myNationality = kwargs['nationality'] if 'nationality' in kwargs else 'American'
def name(self,myName = None):
if myName:
self.__myName = myName
return self.__myName
def age(self, myAge = None):
if myAge:
self.__myAge = myAge
return self.__myAge
def nationality(self, myNationaltiy = None):
if myNationaltiy:
self.__myNationality = myNationaltiy
return self.__myNationality
def __str__(self):
return f'My Name is {self.name()}, I am {self.nationality()}, I am {str(self.age())} years old!'
class GeEmployee(Person):
def __init__(self, **kwargs):
self.__myCompany = 'GE'
self.__mySSO = kwargs['sso'] if 'sso' in kwargs else 'TBD'
super().__init__(**kwargs)
def sso(self, mySSO = None):
if mySSO:
self.__mySSO = mySSO
return self.__mySSO
def __str__(self):
return f'{super().__str__()} and I am a GE Employee {self.sso()}'
def addPerson(p,reg):
if not isinstance(p,Person):
print('it is not a Person')
return False
reg.append(p)
return True
def generateRegister(reg):
allData = []
for person in reg:
allData.append({'Name':person.name(),'Age':person.age(),'Nationality':person.nationality()})
else:
return allData
def printRegister(reg):
for entry in reg:
for key in entry.keys():
print(f'{key}: {entry[key]}', end=' ; ', flush=True)
else:
print()
def main():
hector = Person(name='Hector', age=35, nationality='Colombian')
john = Person()
luis = GeEmployee(name = 'Luis', age = '32', nationality='Brazilian', sso='212518053')
register = []
addPerson(hector,register)
addPerson(john,register)
addPerson(luis,register)
entries = generateRegister(register)
printRegister(entries)
print(luis)
if __name__ == '__main__':
main()
|
[
"212518053@ge.com"
] |
212518053@ge.com
|
e6310bc1faa76aed5031790a2ab7dd92ca71f610
|
c50ca4ccf0fba21386368a18b5db3bb13544a9ec
|
/todo/models.py
|
9b4b45d740194da4464b7dd3de81fe3ba91e6bc8
|
[] |
no_license
|
Nairmangit/django
|
380e578a192791e9775952ad242d40e767bb1763
|
2d52f10bce52ea9043a434e21ebdd7d1dc2974d1
|
refs/heads/master
| 2020-09-21T00:20:21.882923
| 2019-12-03T09:57:52
| 2019-12-03T09:57:52
| 224,626,808
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 201
|
py
|
from django.db import models
class Todo(models.Model):
text = models.CharField(max_length=30)
complete = models.BooleanField(default=False)
def __str__(self):
return self.text
|
[
"pavelshuralyov@yandex.ru"
] |
pavelshuralyov@yandex.ru
|
9856f239043013457366bdb2394c01a64ad38b85
|
564642b3ba5ec3a240f816f3dac7f8f95aa7520e
|
/classification/lossfun.py
|
e92d09f561657ef89001666381916e2403fd3512
|
[] |
no_license
|
JinZT-Git/HS-Codes
|
933f7c097b7e0111561c43336cd84486484267dc
|
a3705b4db5323502a20d3e4cb363d57e6946e713
|
refs/heads/main
| 2023-08-23T17:12:20.277977
| 2021-10-15T12:05:32
| 2021-10-15T12:05:32
| 417,377,870
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,732
|
py
|
import torch
import os
import torch.nn.functional as F
from config import Config as cfg
class_weight = cfg.class_weight
os.environ["CUDA_VISIBLE_DEVICES"] = cfg.gpu_device
def cross_entropy_loss(preds, target, reduction):
logp = F.log_softmax(preds, dim=1)
loss = torch.sum(-logp * target, dim=1)
if reduction == 'none':
return loss
elif reduction == 'mean':
return loss.mean()
elif reduction == 'sum':
return loss.sum()
else:
raise ValueError(
'`reduction` must be one of \'none\', \'mean\', or \'sum\'.')
def onehot_encoding(labels, n_classes):
return torch.zeros(labels.size(0), n_classes).to(labels.device).scatter_(
dim=1, index=labels.view(-1, 1), value=1)
def label_smoothing(preds, targets, epsilon=0.1):
# preds为网络最后一层输出的logits
# targets为未one-hot的真实标签
n_classes = preds.size(1)
device = preds.device
onehot = onehot_encoding(targets, n_classes).float().to(device)
targets = onehot * (1 - epsilon) + torch.ones_like(onehot).to(
device) * epsilon / n_classes
loss = cross_entropy_loss(preds, targets, reduction="mean")
return loss
class ContrastiveLoss(torch.nn.Module):
def __init__(self, margin=2.0):
super(ContrastiveLoss, self).__init__()
self.margin = margin
def forward(self, output, label):
criterion = []
for i in range(8):
w = torch.FloatTensor(class_weight[i]).cuda()
criterion.append(torch.nn.CrossEntropyLoss(weight=w))
loss = 0
for _ in (0, len(output)):
for i in range(0, 8):
loss += criterion[i](output[i], label[i])
return loss
|
[
"915681919@qq.com"
] |
915681919@qq.com
|
c9db921904acd0462c0ff0fb4a4c844a1efe7c31
|
4b9fb3080a86e036536471a6c10697ee375be6b9
|
/image_handlers/split_image_text.py
|
72b9f7b9477f1fa825a71d3caf38a61dc64edfae
|
[] |
no_license
|
Lai-smile/decompress_extract_file
|
f2a4ec5582cd7d7490eb4e26f621bcfada9262e7
|
eb736bc138727240a45e7e88696023549029990c
|
refs/heads/master
| 2022-05-30T03:30:58.465864
| 2020-05-01T03:33:10
| 2020-05-01T03:33:10
| 260,361,821
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,895
|
py
|
# Created by lixingxing at 2018/11/14
"""
Feature: #Enter feature name here
# Enter feature description here
Scenario: #Enter scenario name here
# Enter steps here
Test File LocationL: # Enter
"""
import os
import time
import cv2
import numpy as np
from constants import path_manager
from image_handlers.image_utilities import get_iso_content_highlight, tencent_ocr, my_ocr
def pre_process(img):
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
sobel = cv2.Sobel(gray_img, cv2.CV_8U, 1, 0, ksize=7) # 1,0 x方向求梯度, 0,1 y方向求梯度
ret, binary = cv2.threshold(sobel, 0, 255, cv2.THRESH_BINARY) # cv2.THRESH_OTSU +
element1 = cv2.getStructuringElement(cv2.MORPH_RECT, (24, 6))
element2 = cv2.getStructuringElement(cv2.MORPH_RECT, (30, 15)) # 15
element3 = cv2.getStructuringElement(cv2.MORPH_RECT, (40, 10))
dilation = cv2.dilate(binary, element1, iterations=1)
erosion = cv2.erode(dilation, element2, iterations=1)
dilation2 = cv2.dilate(erosion, element3, iterations=2)
# cv2.imwrite("binary.png", binary)
# cv2.imwrite("dilation.png", dilation)
# cv2.imwrite("erosion.png", erosion)
# cv2.imwrite("dilation2.png", dilation2)
return dilation2
def find_text_region(img):
region = []
img, contours, hierarchy = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for i in range(len(contours)):
cnt = contours[i]
area = cv2.contourArea(cnt)
if area < 3000:
continue
rect = cv2.minAreaRect(cnt)
# print("rect is: ", rect)
box = cv2.boxPoints(rect)
box = np.int0(box)
# point = box.tolist()
# min_point = min(point)
# max_point = max(point)
# print(min_point, max_point)
region.append(box)
return region
def text_ocr(region, o_img):
text_dicts_group = []
iso_table_dict = {}
box_num = 0
img_c = o_img.copy()
for box in region:
box_num += 1
point = box.tolist()
min_point = min(point)
max_point = max(point)
max_y = max(max_point[0], min_point[0])
max_x = max(max_point[1], min_point[1])
min_y = min(max_point[0], min_point[0])
min_x = min(max_point[1], min_point[1])
represent_point = min_point[:]
width = max_point[0] - min_point[0]
height = max_point[1] - min_point[1]
represent_point.append(width)
represent_point.append(height)
# print(o_img.shape)
# print(min_point, max_point)
text_region = o_img[min_x:max_x, min_y:max_y]
cv2.drawContours(img_c, [box], 0, (0, 255, 0), 2)
# cv2.imwrite('text_region_RAM.png', text_region)
# iso_table_dict = tencent_ocr('text_region_RAM.png', blank, 10, [0, 0])
text = my_ocr(text_region, blank, represent_point, 10)
if text:
iso_table_dict[tuple(represent_point)] = text
text_dicts_group.append(iso_table_dict)
# cv2.imwrite('contours.png', img_c)
return text_dicts_group
def pure_text_region(no_table_image, background_color, blank_image):
global blank
# img = cv2.imread(no_table_image)
blank = blank_image
max_x, max_y, dim = no_table_image.shape
max_area = max_x * max_y
# print(img.shape)
gray_img = cv2.cvtColor(no_table_image, cv2.COLOR_BGR2GRAY)
res, binary_img = cv2.threshold(gray_img, 45, 255, cv2.THRESH_BINARY_INV)
# canny_img = cv2.Canny(img, 50, 150)
# dilate_kernel = cv2.getStructuringElement(cv2.MORPH_OPEN, (3, 3))
# dilate_image = cv2.dilate(canny_img, dilate_kernel, iterations=1)
b_img, contours, h = cv2.findContours(binary_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# cv2.drawContours(no_table_image, contours, -1, (0, 255, 0), thickness=4)
for contour_num in range(len(contours)):
contour = contours[contour_num]
area = cv2.contourArea(contour)
if 3000 < area < 2*max_area/3:
cv2.drawContours(no_table_image, contours, contour_num, background_color, thickness=-1)
# cv2.imwrite('test_text.png', no_table_image)
dilation = pre_process(no_table_image)
region = find_text_region(dilation)
if region:
dict_group = text_ocr(region, no_table_image)
return dict_group
else:
return []
if __name__ == '__main__':
# input_file = 'pure_table.png'
input_file = os.path.join(path_manager.root, path_manager.IMAGE_TEXT_DATA_PATH, 'su_4.png')
start_time = time.time()
img = cv2.imread(input_file)
print(img.shape)
blank = np.zeros(list(img.shape), img.dtype)
dict = pure_text_region(img, (0, 0, 0), blank)
print('time spend is: {}'.format(time.time() - start_time))
print(dict)
# print(len(dict[0]))
|
[
"noreply@github.com"
] |
Lai-smile.noreply@github.com
|
497e345288a9d28536fdbaf5f67a2102b003849e
|
7652b3d21519771aa073c4f4a9d66f4f4d5db013
|
/creating-project/project/project_app/urls.py
|
de7fffc7ea068fde214f0d92d79c134b3e945a32
|
[] |
no_license
|
pavkozlov/NETOLOGY-Django-homeworks
|
9c64cde294590c8a85c5f89fd2190fe989720c84
|
c331fa10906470c974802932e9d7d7526841f6f1
|
refs/heads/master
| 2022-11-27T22:36:12.537296
| 2019-07-17T16:19:11
| 2019-07-17T16:19:11
| 189,250,824
| 0
| 1
| null | 2022-11-22T03:14:37
| 2019-05-29T15:20:09
|
Python
|
UTF-8
|
Python
| false
| false
| 282
|
py
|
from django.urls import path
from .views import stations_view
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('stations/', stations_view, name='stations_view')
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
[
"it.pavelkozlov@gmail.com"
] |
it.pavelkozlov@gmail.com
|
f30f5d1496cb56926ce73d581deca33497224893
|
4c446b67986260b37978e663bcef040e4e925a33
|
/project2/part b/reddit_model_v2.py
|
108ea81f02317afae0ff5ff679171106797716d9
|
[] |
no_license
|
xukaiyuan/Database-Systems
|
1ec815533c5c61765a48d00f3d7133ca1453232e
|
bd3fa8275b117f221431b37cc9b067387b9c9089
|
refs/heads/master
| 2020-03-11T10:34:14.252258
| 2018-06-07T03:25:32
| 2018-06-07T03:25:32
| 129,946,450
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,573
|
py
|
from __future__ import print_function
from pyspark import SparkConf, SparkContext
from pyspark.sql import SQLContext
# IMPORT OTHER MODULES HERE
flag = 0
save = 0
predict = 1
from pyspark.sql.functions import *
from pyspark.sql.types import *
from pyspark.ml.feature import CountVectorizer, CountVectorizerModel
from pyspark.ml.classification import LogisticRegression
from pyspark.ml.tuning import CrossValidator, ParamGridBuilder, CrossValidatorModel
from pyspark.ml.evaluation import BinaryClassificationEvaluator
from pyspark.mllib.linalg import DenseVector
def join(comments, label):
# task 2
associated = comments.join(label, comments.id == label.Input_id, 'inner')
return associated.select("id","body","labeldem","labelgop","labeldjt")
def makeNgrams(text):
ngrams = cleantext.sanitize(text)
concat = ngrams[1]+' '+ngrams[2]+' '+ngrams[3]
return concat.split(' ')
def pLabel(raw):
if(raw==1):
return 1
else:
return 0
def nLabel(raw):
if(raw==-1):
return 1
else:
return 0
def getLinkid(text):
if(len(text)>3):
return text[3:]
else:
return text
def posTh(p):
if(p[1]>0.2):
return 1
else:
return 0
def negTh(p):
if(p[1]>0.25):
return 1
else:
return 0
makeNgrams_udf = udf(makeNgrams, ArrayType(StringType()))
pLabel_udf = udf(pLabel, IntegerType())
nLabel_udf = udf(nLabel, IntegerType())
getLinkid_udf = udf(getLinkid, StringType())
posTh_udf = udf(posTh, IntegerType())
negTh_udf = udf(negTh, IntegerType())
states = ['Alabama', 'Alaska', 'Arizona', 'Arkansas', 'California', 'Colorado', 'Connecticut', 'Delaware', 'District of Columbia', 'Florida', 'Georgia', 'Hawaii', 'Idaho', 'Illinois', 'Indiana', 'Iowa', 'Kansas', 'Kentucky', 'Louisiana', 'Maine', 'Maryland', 'Massachusetts', 'Michigan', 'Minnesota', 'Mississippi', 'Missouri', 'Montana', 'Nebraska', 'Nevada', 'New Hampshire', 'New Jersey', 'New Mexico', 'New York', 'North Carolina', 'North Dakota', 'Ohio', 'Oklahoma', 'Oregon', 'Pennsylvania', 'Rhode Island', 'South Carolina', 'South Dakota', 'Tennessee', 'Texas', 'Utah', 'Vermont', 'Virginia', 'Washington', 'West Virginia', 'Wisconsin', 'Wyoming']
def main(sqlContext):
"""Main function takes a Spark SQL context."""
# YOUR CODE HERE
# YOU MAY ADD OTHER FUNCTIONS AS NEEDED
# load files
label = sqlContext.read.load("labeled_data.csv",format="csv", sep=",", inferSchema="true", header="true")
if(flag):
comments = sqlContext.read.json("comments-minimal.json.bz2")
submissions = sqlContext.read.json("submissions.json.bz2")
print("loading done")
comments.write.parquet("comments_data")
submissions.write.parquet("submissions_data")
print("writing done")
else:
comments = sqlContext.read.parquet("comments_data")
submissions = sqlContext.read.parquet("submissions_data")
print("loading done")
if(save):
# task 7 starts here
associated = join(comments, label)
withngrams = associated.withColumn("ngrams", makeNgrams_udf(associated['body']))
withplabels = withngrams.withColumn("poslabel", pLabel_udf(withngrams['labeldjt']))
withpnlabels = withplabels.withColumn("neglabel", nLabel_udf(withplabels['labeldjt'])).select("id","ngrams","poslabel","neglabel")
# withpnlabels.show()
cv = CountVectorizer(binary=True, inputCol="ngrams", outputCol="features")
model = cv.fit(withpnlabels)
model.save("cv.model")
# model.transform(withpnlabels).show()
pos = model.transform(withpnlabels).select("id", col("poslabel").alias("label"), "features")
neg = model.transform(withpnlabels).select("id", col("neglabel").alias("label"), "features")
# pos.show()
# neg.show()
poslr = LogisticRegression(labelCol="label", featuresCol="features", maxIter=10)
neglr = LogisticRegression(labelCol="label", featuresCol="features", maxIter=10)
posEvaluator = BinaryClassificationEvaluator()
negEvaluator = BinaryClassificationEvaluator()
posParamGrid = ParamGridBuilder().addGrid(poslr.regParam, [1.0]).build()
negParamGrid = ParamGridBuilder().addGrid(neglr.regParam, [1.0]).build()
posCrossval = CrossValidator(
estimator=poslr,
evaluator=posEvaluator,
estimatorParamMaps=posParamGrid,
numFolds=2) # for test
negCrossval = CrossValidator(
estimator=neglr,
evaluator=negEvaluator,
estimatorParamMaps=negParamGrid,
numFolds=2) # for test
posTrain, posTest = pos.randomSplit([0.5, 0.5])
negTrain, negTest = neg.randomSplit([0.5, 0.5])
print("Training positive classifier...")
posModel = posCrossval.fit(posTrain)
print("Training negative classifier...")
negModel = negCrossval.fit(negTrain)
posModel.save("pos.model")
negModel.save("neg.model")
print("trained")
else:
# comments.show()
# submissions.show()
posModel = CrossValidatorModel.load("pos.model")
negModel = CrossValidatorModel.load("neg.model")
model = CountVectorizerModel.load("cv.model")
# withngrams = comments.withColumn("ngrams", makeNgrams_udf(comments['body']))
# cv = CountVectorizer(binary=True, inputCol="ngrams", outputCol="features")
# model = cv.fit(withngrams)
print("model loaded")
if(predict==0):
# task 8 starts here
temp_comments = comments.select("id", "link_id", "author_flair_text", "created_utc", "body")
clean_comments = temp_comments.withColumn("true_id", getLinkid_udf(temp_comments['link_id']))
# print(clean_comments.count())
clean_submissions = submissions.select(col("id").alias("sub_id"), "title")
# clean_comments.show()
# clean_submissions.show()
com_sub = clean_comments.join(clean_submissions, clean_comments.true_id==clean_submissions.sub_id, "inner")
com_sub.write.parquet("com_sub")
else:
# task 9 starts here
com_sub = sqlContext.read.parquet("com_sub")
com_sub = com_sub.sample(False, 0.0001, None)
filtered = com_sub.filter("body NOT LIKE '%/s%' and body NOT LIKE '>%'")
# print(filtered.count())
filtered_ngrams = filtered.withColumn("ngrams", makeNgrams_udf(filtered['body']))
# filtered_ngrams = filtered_ngrams.sample(False, 0.01, None)
print("prepared")
featuredata = model.transform(filtered_ngrams).select("id","author_flair_text","created_utc","sub_id","title","features")
posResult = posModel.transform(featuredata)
negResult = negModel.transform(featuredata)
# posResult.show()
# negResult.show()
poslabel = posResult.withColumn("positive",posTh_udf(posResult['probability']))# .select("id", "author_flair_text", "created_utc", "title", "positive")
neglabel = negResult.withColumn("negtive",negTh_udf(negResult['probability']))# .select(col("id").alias("nid"), "author_flair_text", "created_utc", "title", "negtive")
print("predict done")
# poslabel.show()
# neglabel.show()
# how to combine these 2 tables???
# task 10 starts here
# c_all = poslabel.count()
all_day = poslabel.withColumn("date",from_unixtime('created_utc').cast(DateType())).groupby("date").count()
pos_posts = poslabel.filter("positive = 1")
# c_pos_posts = pos_posts.count()
# p_pos_posts = c_pos_posts/c_all
# print(p_pos_posts)
# neg_posts = neglabel.filter("negtive = 1")
# c_neg_posts = neg_posts.count()
# p_neg_posts = c_neg_posts/c_all
# print(p_neg_posts)
pos_day = pos_posts.withColumn("pos_date",from_unixtime('created_utc').cast(DateType())).groupby("pos_date").count().withColumnRenamed("count","pos_count")
p_pos_day = all_day.join(pos_day, all_day.date==pos_day.pos_date, "left").withColumn("pos_per", pos_count/count).show()
print("end")
if __name__ == "__main__":
conf = SparkConf().setAppName("CS143 Project 2B")
conf = conf.setMaster("local[*]")
sc = SparkContext(conf=conf)
sqlContext = SQLContext(sc)
sc.addPyFile("cleantext.py")
import cleantext
# load csv file
main(sqlContext)
|
[
"kyxu95@gmail.com"
] |
kyxu95@gmail.com
|
f427572dcc294f2f278b1dc156e4b0e0c130a115
|
a4c5a56ed6d3c4299213ff8fd0e4f37719e063ff
|
/tests/test_override.py
|
ec0ad1f2bfab03914d5df5c21408b1e52fcbb993
|
[
"BSD-3-Clause"
] |
permissive
|
pyecore/motra
|
76add183cf2777bef5916b88e30dd2b3eef8cb06
|
c0b3e8e54b46572c3bc10bb2b719102e267c371b
|
refs/heads/main
| 2023-09-02T12:44:37.688979
| 2021-10-27T05:53:01
| 2021-10-27T05:53:01
| 395,357,398
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 965
|
py
|
import pytest
import inspect
import pyecore.ecore as ecore
from motra import m2m
@pytest.fixture(scope='module')
def t1():
# Define a transformation meta-data
t = m2m.Transformation('t1', inputs=['in_model'], outputs=['in_model'])
@t.mapping(when=lambda self: self.name.startswith('Egg'))
def r1(self: ecore.EClass):
self.name = self.name + '_egg'
@t.mapping(when=lambda self: self.name.startswith('Spam'))
def r1(self: ecore.EClass):
self.name = self.name + '_spam'
return t, r1
def test__override_with_when(t1):
t, r1 = t1
# Fake main for the mapping execution
result1 = None
result2 = None
def fake_main(in_model):
nonlocal result1
nonlocal result2
result1 = r1(ecore.EClass('Spam'))
result2 = r1(ecore.EClass('Egg'))
t._main = fake_main
t.run(in_model=ecore.EPackage())
assert result1.name == "Spam_spam"
assert result2.name == "Egg_egg"
|
[
"vincent.aranega@gmail.com"
] |
vincent.aranega@gmail.com
|
2eb64c8805a832356c2c1af85b8173d8303d1b14
|
09cb91bdfc1569f12f73648e390943602c3790c5
|
/voters/viewsets.py
|
df2daf53f9207631ddd90cd37d8e58afe475f2af
|
[] |
no_license
|
Chukwunazaekpere/ceo-bincom-test
|
0a916bff6be82df47684550399ee77de285106e9
|
69dd413c20fe79ece0b732b8b161d951d598be56
|
refs/heads/master
| 2023-07-01T04:36:51.568102
| 2021-08-05T19:57:10
| 2021-08-05T19:57:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 250
|
py
|
from rest_framework import viewsets
from .models import (
Voters
)
from .serializers import (
VotersSerializer
)
class VotersViewSet(viewsets.ModelViewSet):
queryset = Voters.objects.all()
serializer_class = VotersSerializer
|
[
"emmanuelchinaza5777@gmail.com"
] |
emmanuelchinaza5777@gmail.com
|
1285ed4012292a12041f5aaf57a58b6db329bf76
|
aa06db0771f92f9c21a16ace4f91196b4b607e58
|
/store/store/migrations/0015_auto_20170913_1636.py
|
ea3064dc31e49639a8c8add2ea524d66fe32f85b
|
[] |
no_license
|
cami-project/cami-project
|
7b51430b2e97565d58fbab98f3a68108821e5c6d
|
386b6ea158f3849dd302e9c15d3c7acc1ac2eac5
|
refs/heads/master
| 2020-04-12T03:05:38.970568
| 2018-09-21T14:04:00
| 2018-09-21T14:04:00
| 59,824,119
| 2
| 1
| null | 2018-03-09T13:18:37
| 2016-05-27T09:49:17
|
C
|
UTF-8
|
Python
| false
| false
| 505
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-09-13 13:36
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('store', '0014_auto_20170825_1309'),
]
operations = [
migrations.AlterField(
model_name='device',
name='device_identifier',
field=models.CharField(default=uuid.uuid4, max_length=128, unique=True),
),
]
|
[
"alex.sorici@gmail.com"
] |
alex.sorici@gmail.com
|
2d906722068461d7ff7b6331b9cc36ebbc8e6d9a
|
f2f362267e48662445eeac3bb309a61353b275b5
|
/leetcode/closest_binary_search_tree_value.py
|
410727a8b18cac3b64d2ea0bef91100999e47834
|
[] |
no_license
|
karthikpalavalli/Puzzles
|
78dfe87edfaacc72782f6391e225841768e3638d
|
e4e02635d7b5cf001201b4ab05f70836beb1b8ff
|
refs/heads/master
| 2020-03-30T12:20:07.064549
| 2019-12-21T01:49:55
| 2019-12-21T01:49:55
| 151,219,634
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 596
|
py
|
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def closestValue(self, root: TreeNode, target: float) -> int:
def dfs_inorder(root):
if root is None:
return
nonlocal closest_value
dfs_inorder(root.left)
if (root.val - target) ** 2 < (closest_value - target) ** 2:
closest_value = root.val
dfs_inorder(root.right)
closest_value = 10 ** 5
dfs_inorder(root)
return closest_value
|
[
"karthik00p@gmail.com"
] |
karthik00p@gmail.com
|
2332d5c21dfd47be0eab2e6439fbacef32c5aeb3
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/data/p3BR/R1/benchmark/startPyquil199.py
|
b8ba1c63c355402f38a256e26772b3f9cb67ca75
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,775
|
py
|
# qubit number=2
# total number=33
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += RX(-0.09738937226128368,2) # number=2
prog += H(1) # number=30
prog += CZ(2,1) # number=31
prog += H(1) # number=32
prog += H(1) # number=3
prog += CNOT(1,0) # number=4
prog += Y(1) # number=15
prog += CNOT(1,0) # number=10
prog += H(1) # number=19
prog += CZ(0,1) # number=20
prog += RX(-0.6000441968356504,1) # number=28
prog += H(1) # number=21
prog += CNOT(0,1) # number=22
prog += X(1) # number=23
prog += H(2) # number=29
prog += CNOT(0,1) # number=24
prog += CNOT(0,1) # number=18
prog += Z(1) # number=11
prog += CNOT(1,0) # number=12
prog += CNOT(2,1) # number=26
prog += Y(1) # number=14
prog += CNOT(1,0) # number=5
prog += X(1) # number=6
prog += Z(1) # number=8
prog += X(1) # number=7
prog += RX(-2.42845112122491,1) # number=25
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('1q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil199.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
5b3e342ade56e396a3dfad0237f974e5082e1bc9
|
114b61513733083555924fc8ab347335e10471ae
|
/stackone/stackone/viewModel/MultipartPostHandler.py
|
df8c4aa526fbfe18ae6a303322624a6199dcffe3
|
[] |
no_license
|
smarkm/ovm
|
6e3bea19816affdf919cbd0aa81688e6c56e7565
|
cd30ad5926f933e6723805d380e57c638ee46bac
|
refs/heads/master
| 2021-01-21T04:04:28.637901
| 2015-08-31T03:05:03
| 2015-08-31T03:05:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,033
|
py
|
#!/usr/bin/python
####
# 02/2006 Will Holcomb <wholcomb@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# 7/26/07 Slightly modified by Brian Schneider
# in order to support unicode files ( multipart_encode function )
"""
Usage:
Enables the use of multipart/form-data for posting forms
Inspirations:
Upload files in python:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
urllib2_file:
Fabien Seisen: <fabien@seisen.org>
Example:
import MultipartPostHandler, urllib2, cookielib
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
MultipartPostHandler.MultipartPostHandler)
params = { "username" : "bob", "password" : "riviera",
"file" : open("filename", "rb") }
opener.open("http://wwww.bobsite.com/upload/", params)
Further Example:
The main function of this file is a sample which downloads a page and
then uploads it to the W3C validator.
"""
import urllib
import urllib2
import mimetools, mimetypes
import os, stat
from cStringIO import StringIO
import sys
class Callable:
def __init__(self, anycallable):
self.__call__ = anycallable
# Controls how sequences are uncoded. If true, elements may be given multiple values by
# assigning a sequence.
doseq = 1
class MultipartPostHandler(urllib2.BaseHandler):
handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
def http_request(self, request):
data = request.get_data()
if data is not None and type(data) != str:
v_files = []
v_vars = []
try:
for(key, value) in data.items():
if type(value) == file:
v_files.append((key, value))
else:
v_vars.append((key, value))
except TypeError:
systype, value, traceback = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", traceback
if len(v_files) == 0:
data = urllib.urlencode(v_vars, doseq)
else:
boundary, data = self.multipart_encode(v_vars, v_files)
contenttype = 'multipart/form-data; boundary=%s' % boundary
if(request.has_header('Content-Type')
and request.get_header('Content-Type').find('multipart/form-data') != 0):
print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
request.add_unredirected_header('Content-Type', contenttype)
request.add_data(data)
return request
def multipart_encode(vars, files, boundary = None, buf = None):
if boundary is None:
boundary = mimetools.choose_boundary()
if buf is None:
buf = StringIO()
for(key, value) in vars:
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"' % key)
buf.write('\r\n\r\n' + value + '\r\n')
for(key, fd) in files:
file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
filename = fd.name.split('/')[-1]
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename))
buf.write('Content-Type: %s\r\n' % contenttype)
# buffer += 'Content-Length: %s\r\n' % file_size
fd.seek(0)
buf.write('\r\n' + fd.read() + '\r\n')
buf.write('--' + boundary + '--\r\n\r\n')
buf = buf.getvalue()
return boundary, buf
multipart_encode = Callable(multipart_encode)
https_request = http_request
def main():
import tempfile, sys
validatorURL = "http://localhost"
opener = urllib2.build_opener(MultipartPostHandler)
def validateFile(url):
temp = tempfile.mkstemp(suffix=".html")
os.write(temp[0], opener.open(url).read())
params = { "ss" : "0", # show source
"doctype" : "Inline",
"uploaded_file" : open(temp[1], "rb") }
print opener.open(validatorURL, params).read()
os.remove(temp[1])
if len(sys.argv[1:]) > 0:
for arg in sys.argv[1:]:
validateFile(arg)
else:
validateFile("http://www.google.com")
if __name__=="__main__":
main()
|
[
"18614072558@163.com"
] |
18614072558@163.com
|
2827c9aa71286f778f7010ddcf378a307b7e403f
|
1df8bb2fafac39dbea2f0c92831933a479508c0b
|
/defoe/nzpp/setup.py
|
9ae583e7449c74cc9844499d3372251b36cecb09
|
[
"MIT",
"CC0-1.0",
"CC-BY-4.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain"
] |
permissive
|
alan-turing-institute/defoe
|
b816aee7de0510dbc71322dd156015fd429d4e8f
|
d7d2a22c8976fb0b0016cb0a231d4822424f8e88
|
refs/heads/master
| 2022-03-08T09:05:48.148085
| 2022-02-11T10:59:37
| 2022-02-11T10:59:37
| 165,081,527
| 16
| 6
|
MIT
| 2022-02-01T09:39:53
| 2019-01-10T15:14:53
|
Lex
|
UTF-8
|
Python
| false
| false
| 725
|
py
|
"""
Given a filename create a defoe.nzpp.articles.Articles.
"""
from defoe.nzpp.articles import Articles
def filename_to_object(filename):
"""
Given a filename create a defoe.nzpp.articles.Articles. If an
error arises during its creation this is caught and returned as a
string.
:param filename: filename
:type filename: str or unicode
:return: tuple of form (Articles, None) or (filename, error message),
if there was an error creating Articles
:rtype: tuple(defoe.nzpp.articles.Articles | str or unicode, str
or unicode)
"""
try:
result = (Articles(filename), None)
except Exception as exception:
result = (filename, str(exception))
return result
|
[
"michaelj@epcc.ed.ac.uk"
] |
michaelj@epcc.ed.ac.uk
|
5dcc386e96726fe2001888a8096c2940980aae92
|
e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f
|
/indices/aphidivor.py
|
488d7159a51c32e35a6a3d61bdf217023f68d3e4
|
[] |
no_license
|
psdh/WhatsintheVector
|
e8aabacc054a88b4cb25303548980af9a10c12a8
|
a24168d068d9c69dc7a0fd13f606c080ae82e2a6
|
refs/heads/master
| 2021-01-25T10:34:22.651619
| 2015-09-23T11:54:06
| 2015-09-23T11:54:06
| 42,749,205
| 2
| 3
| null | 2015-09-23T11:54:07
| 2015-09-18T22:06:38
|
Python
|
UTF-8
|
Python
| false
| false
| 63
|
py
|
ii = [('RennJIT.py', 2), ('WestJIT2.py', 3), ('WestJIT.py', 2)]
|
[
"prabhjyotsingh95@gmail.com"
] |
prabhjyotsingh95@gmail.com
|
cacff14e0b1b2678353ba2d462a5de00a04555a7
|
55ab4d0aecc49078e7a0f47a05457c9602327ed7
|
/egs/madcat_arabic/v1/local/create_mask_from_page_image.py
|
b4147dcd3851a52f5a1a9319a6986519f66ac00b
|
[
"Apache-2.0"
] |
permissive
|
aarora8/waldo
|
56a171f0b2048d980023173ab38f5248db936eeb
|
ad08a05fa9e9890ad986f11d4bca3c773b228d87
|
refs/heads/master
| 2020-03-14T04:43:47.513263
| 2018-06-07T05:09:47
| 2018-06-07T05:09:47
| 131,447,076
| 0
| 0
|
Apache-2.0
| 2018-04-28T22:00:19
| 2018-04-28T22:00:19
| null |
UTF-8
|
Python
| false
| false
| 5,535
|
py
|
#!/usr/bin/env python3
# Copyright 2018 Johns Hopkins University (author: Ashish Arora)
# Apache 2.0
""" This module will be used for creating text localization mask on page image.
Given the word segmentation (bounding box around a word) for every word, it will
extract line segmentation. To extract line segmentation, it will take word bounding
boxes of a line as input, will create a minimum area bounding box that will contain
all corner points of word bounding boxes. The obtained bounding box (will not necessarily
be vertically or horizontally aligned).
"""
import xml.dom.minidom as minidom
from waldo.data_manipulation import *
from waldo.core_config import CoreConfig
from waldo.mar_utils import compute_hull
from scipy.spatial import ConvexHull
from waldo.data_transformation import scale_down_image_with_objects, \
make_square_image_with_padding
def get_mask_from_page_image(madcat_file_path, image_file_name, max_size):
""" Given a page image, extracts the page image mask from it.
Input
-----
image_file_name (string): complete path and name of the page image.
madcat_file_path (string): complete path and name of the madcat xml file
corresponding to the page image.
"""
objects = _get_bounding_box(madcat_file_path)
img = Image.open(image_file_name).convert("RGB")
im_arr = np.array(img)
config = CoreConfig()
config.num_colors = 3
image_with_objects = {
'img': im_arr,
'objects': objects
}
im_height = im_arr.shape[0]
im_width = im_arr.shape[1]
validated_objects = []
for original_object in image_with_objects['objects']:
ordered_polygon_points = original_object['polygon']
object = {}
resized_pp = []
for point in ordered_polygon_points:
new_point = _validate_and_update_point(point, im_width, im_height)
resized_pp.append(new_point)
object['polygon'] = resized_pp
validated_objects.append(object)
validated_image_with_objects = {
'img': im_arr,
'objects': validated_objects
}
scaled_image_with_objects = scale_down_image_with_objects(validated_image_with_objects, config,
max_size)
img_padded = make_square_image_with_padding(scaled_image_with_objects['img'], 3, 255)
padded_image_with_objects = {
'img': img_padded,
'objects': scaled_image_with_objects['objects']
}
y = convert_to_mask(padded_image_with_objects, config)
return y
def _get_bounding_box(madcat_file_path):
""" Given word boxes of each line, return bounding box for each
line in sorted order
Input
-----
image_file_name (string): complete path and name of the page image.
madcat_file_path (string): complete path and name of the madcat xml file
corresponding to the page image.
"""
objects = []
doc = minidom.parse(madcat_file_path)
zone = doc.getElementsByTagName('zone')
for node in zone:
object = {}
token_image = node.getElementsByTagName('token-image')
mbb_input = []
for token_node in token_image:
word_point = token_node.getElementsByTagName('point')
for word_node in word_point:
word_coordinate = (int(word_node.getAttribute('x')), int(word_node.getAttribute('y')))
mbb_input.append(word_coordinate)
points = get_minimum_bounding_box(mbb_input)
points = tuple(points)
points_ordered = [points[index] for index in ConvexHull(points).vertices]
object['polygon'] = points_ordered
objects.append(object)
return objects
def _validate_and_update_point(pt0, im_width, im_height, pt1=(0, 0)):
new_point = pt0
if pt0[0] < 0:
new_point = _get_pointx_inside_origin(pt0, pt1)
if pt0[0] > im_width:
new_point = _get_pointx_inside_width(pt0, pt1, im_width)
if pt0[1] < 0:
new_point = _get_pointy_inside_origin(pt0, pt1)
if pt0[1] > im_height:
new_point = _get_pointy_inside_height(pt0, pt1, im_height)
return new_point
def _get_pointx_inside_origin(pt0, pt1):
""" Given a point pt0, return an updated point that is
inside orgin. It finds line equation and uses it to
get updated point x value inside origin
Returns
-------
(float, float): updated point
"""
return (0, pt0[1])
# TODO
def _get_pointx_inside_width(pt0, pt1, im_width):
""" Given a point pt0, return an updated point that is
inside image width. It finds line equation and uses it to
get updated point x value inside image width
Returns
-------
(float, float): updated point
"""
return (im_width, pt0[1])
# TODO
def _get_pointy_inside_origin(pt0, pt1):
""" Given a point pt0, return an updated point that is
inside orgin. It finds line equation and uses it to
get updated point y value inside origin
Returns
-------
(float, float): updated point
"""
return (pt0[0], 0)
# TODO
def _get_pointy_inside_height(pt0, pt1, im_height):
""" Given a point pt0, return an updated point that is
inside image height. It finds line equation and uses it to
get updated point y value inside image height
Returns
-------
(float, float): updated point
"""
return (pt0[0], im_height)
# TODO
|
[
"dpovey@gmail.com"
] |
dpovey@gmail.com
|
afbe370c3f2219dd8bd99964dbbbedd97b09d9be
|
a65e0a3afebbd3c033f56d198103704d8f8092ff
|
/Text-Categorization/kfoldprep2.py
|
327532ab398312956c4f05c730b041ebe6335dfd
|
[] |
no_license
|
layth79/ece467-NLP
|
a44087085b247f5279ec897f6b0c73ee543655ac
|
796195a67d5f9634d56dc53d946618818ba05948
|
refs/heads/main
| 2023-07-13T20:32:20.317460
| 2021-08-12T00:28:36
| 2021-08-12T00:28:36
| 395,126,850
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 660
|
py
|
import random
testDoc = open("corpus2_train.labels")
#Cross Validation
testSet = open("corpus2_test.list", 'w')
validation = open("corpus2_test.labels", 'w')
trainSet = open("corpus3_train1.labels", 'w')
#shuffle data
lines = testDoc.readlines()
random.shuffle(lines)
open('corpus3_shuffled.labels', 'w').writelines(lines)
shuffled = open('corpus3_shuffled.labels')
i = 0
for line in shuffled:
i+=1
if (i == 716):
break
trainSet.write(line)
trainSet.close()
for line in shuffled:
x = line.split()
testSet.write(x[0] + '\n')
validation.write(line)
validation.close()
testSet.close()
|
[
"layth.yassin@cooper.edu"
] |
layth.yassin@cooper.edu
|
dcf60d425a75a5583dc890529bb1f1fffe42a262
|
428ee863e50fecfaedbbf64f3da95e9acb746ae4
|
/src/tamsin/main.py
|
a9ea83fa097c8b5749742963afb74886d3b5d15a
|
[
"BSD-3-Clause",
"Unlicense",
"LicenseRef-scancode-public-domain"
] |
permissive
|
catseye/Tamsin
|
ba53a0ee4ac882486a958e6ba7225f19eea763ef
|
1c9e7ade052d734fa1753d612f2426ac067d5252
|
refs/heads/master
| 2021-01-17T09:21:25.202969
| 2016-03-31T15:00:14
| 2016-03-31T15:00:14
| 19,212,331
| 12
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,766
|
py
|
# encoding: UTF-8
# Copyright (c)2014 Chris Pressey, Cat's Eye Technologies.
# Distributed under a BSD-style license; see LICENSE for more information.
import os
import subprocess
import sys
from tamsin.buffer import FileBuffer, StringBuffer
from tamsin.event import DebugEventListener
from tamsin.term import Atom
from tamsin.scanner import (
Scanner, EOF, UTF8ScannerEngine, TamsinScannerEngine
)
from tamsin.parser import Parser
from tamsin.interpreter import Interpreter
from tamsin.desugarer import Desugarer
from tamsin.analyzer import Analyzer
from tamsin.compiler import Compiler # to be replaced by...
from tamsin.codegen import CodeGen
from tamsin.backends.c import Emitter
def parse(filename):
with open(filename, 'r') as f:
scanner = Scanner(
FileBuffer(f, filename=filename),
#StringBuffer(f.read(), filename=filename),
engines=(TamsinScannerEngine(),)
)
parser = Parser(scanner)
ast = parser.grammar()
desugarer = Desugarer(ast)
ast = desugarer.desugar(ast)
return ast
def parse_and_check_args(args):
ast = None
for arg in args:
next_ast = parse(arg)
if ast is None:
ast = next_ast
else:
ast.incorporate(next_ast)
analyzer = Analyzer(ast)
ast = analyzer.analyze(ast)
return ast
def run(ast, listeners=None):
scanner = Scanner(
FileBuffer(sys.stdin, filename='<stdin>'),
#StringBuffer(sys.stdin.read(), filename='<stdin>'),
engines=(UTF8ScannerEngine(),),
listeners=listeners
)
interpreter = Interpreter(
ast, scanner, listeners=listeners
)
(succeeded, result) = interpreter.interpret_program(ast)
if not succeeded:
sys.stderr.write(str(result) + "\n")
sys.exit(1)
print str(result)
def main(args, tamsin_dir='.'):
listeners = []
if args[0] == '--debug':
listeners.append(DebugEventListener())
args = args[1:]
if args[0] == 'scan':
with open(args[1], 'r') as f:
scanner = Scanner(
FileBuffer(f, filename=args[1]),
engines=(TamsinScannerEngine(),),
listeners=listeners
)
tok = None
while tok is not EOF:
tok = scanner.scan()
if tok is not EOF:
print Atom(tok).repr()
print
elif args[0] == 'parse':
parser = Parser.for_file(args[1])
ast = parser.grammar()
print str(ast)
elif args[0] == 'desugar':
parser = Parser.for_file(args[1])
ast = parser.grammar()
desugarer = Desugarer(ast)
ast = desugarer.desugar(ast)
print str(ast)
elif args[0] == 'analyze':
ast = parse_and_check_args(args[1:])
print str(ast)
elif args[0] == 'compile':
ast = parse_and_check_args(args[1:])
compiler = Compiler(ast, sys.stdout)
compiler.compile()
elif args[0] == 'codegen':
ast = parse_and_check_args(args[1:])
generator = CodeGen(ast)
result = generator.generate()
emitter = Emitter(result, sys.stdout)
emitter.go()
elif args[0] == 'doublecompile':
# http://www.youtube.com/watch?v=6WxJECOFg8w
ast = parse_and_check_args(args[1:])
c_filename = 'foo.c'
exe_filename = './foo'
with open(c_filename, 'w') as f:
compiler = Compiler(ast, f)
compiler.compile()
c_src_dir = os.path.join(tamsin_dir, 'c_src')
command = ("gcc", "-g", "-I%s" % c_src_dir, "-L%s" % c_src_dir,
c_filename, "-o", exe_filename, "-ltamsin")
try:
subprocess.check_call(command)
exit_code = 0
except subprocess.CalledProcessError:
exit_code = 1
#subprocess.call(('rm', '-f', c_filename))
sys.exit(exit_code)
elif args[0] == 'loadngo':
ast = parse_and_check_args(args[1:])
c_filename = 'foo.c'
exe_filename = './foo'
with open(c_filename, 'w') as f:
compiler = Compiler(ast, f)
compiler.compile()
c_src_dir = os.path.join(tamsin_dir, 'c_src')
command = ("gcc", "-g", "-I%s" % c_src_dir, "-L%s" % c_src_dir,
c_filename, "-o", exe_filename, "-ltamsin")
try:
subprocess.check_call(command)
subprocess.check_call((exe_filename,))
exit_code = 0
except subprocess.CalledProcessError:
exit_code = 1
subprocess.call(('rm', '-f', c_filename, exe_filename))
sys.exit(exit_code)
else:
ast = parse_and_check_args(args)
run(ast, listeners=listeners)
|
[
"cpressey@catseye.tc"
] |
cpressey@catseye.tc
|
4708a37bf64d1775f833ff0ec1f2ed6eef44407f
|
4c6927247a3a5fcc73d6101a3cb905f85f03c7a8
|
/Data/process_data.py
|
e646ea023c62ce4f62a93f0262a0836bc44cd355
|
[
"Unlicense"
] |
permissive
|
PZebarth/Python-ML-Pipeline
|
2c0ca9ea4b44b388b0ba83f44a9653f5967fa32e
|
b12b32db850c95298b225638f7a32e54e5d1221f
|
refs/heads/main
| 2023-02-10T05:34:53.559108
| 2021-01-05T19:18:28
| 2021-01-05T19:18:28
| 325,661,300
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,317
|
py
|
import sys
import pandas as pd
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
'''
Loads csv data from messages_filepath and categories_filepath filepaths and
merges into a single dataframe.
Input:
messages_filepath - filepath string
categories_filepath - filepath string
Output:
df - merged dataframe
'''
# reading csv data into dataframes
messages = pd.read_csv(messages_filepath)
categories = pd.read_csv(categories_filepath)
# merging data frames on id key
df = messages.merge(categories, on=['id'])
return df
def clean_data(df):
'''
Inserts correct column names, cleans data of duplicate and Nan values
Input:
df - dataframe
Output:
df - dataframe
'''
# splitting data in categories column
categories = df.categories.str.split(pat =';', expand=True)
# obtainging list of categories
row = categories.iloc[0]
# obatining clean list of categories
category_colnames = row.apply(lambda x: x[:-2]).tolist()
# renaming columns based on list of category names
categories.columns = category_colnames
# converting categories to numbers 0 and 1
for column in categories:
# selects last digit
categories[column] = categories[column].apply(lambda x: x[-1:])
# encodes data as integer
categories[column] = categories[column].astype(int)
# removes old categories column
df = df.drop(columns = 'categories')
# concatenates cleaned categories
df = pd.concat([df,categories], axis=1)
# drops duplicates
df.drop_duplicates(inplace=True)
# drops Nan values
df.dropna(how = 'any', subset = category_colnames, inplace=True)
# replaces 2.0 with 1.0 in related column
df.related.replace(to_replace = 2.0, value = 1.0, inplace=True)
# the child alone column only has zeros and can't train our model
df = df.drop(['child_alone'],axis=1)
return df
def save_data(df, database_filename):
'''
Saves dataframe in sql table
Input:
df - dataframe
database_filename - filename string
'''
#
engine = create_engine('sqlite:///' + database_filename)
df.to_sql('messages_disaster', engine, if_exists='replace', index=False)
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
PZebarth.noreply@github.com
|
ac09433b7ea92f104f6fa94e0dd0943bbfee0442
|
2d0f5d0e9b134dd43747f9a324c7c1a5d60d20da
|
/working_with_timestamps.py
|
8368b09f6ca896a81ca88ffccbf7d68ca5769a21
|
[] |
no_license
|
sushrut7898/complete_python_mastery
|
4521254d5ea8d9f1c2a80a82239f977b4bf5eb3e
|
cd10c6fe3546893aa58b9a5fc41141cbe18e3293
|
refs/heads/master
| 2022-12-08T08:41:05.971723
| 2020-08-14T06:54:32
| 2020-08-14T06:54:32
| 285,747,101
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 215
|
py
|
import time
print(time.time()) # in seconds from win beg time
def send_emails():
for i in range(10000):
pass
start = time.time()
send_emails()
end = time.time()
duration = end - start
print(duration)
|
[
"sushrut7898@gmail.com"
] |
sushrut7898@gmail.com
|
861dff28f9a037a7efe6412727882352ecd7643e
|
adbc8062ea7620eae45a94b9882dfb2b6bb8d103
|
/server/trips/urls.py
|
8839aef735bdea7bc4375f5e56aac81570f83a20
|
[
"MIT"
] |
permissive
|
el-Joft/taxi-app
|
d50063de5969f2cd07c97aa79b07693bd3862acd
|
570cc6879aaab2ec944e29f2f5f8e2a710a97716
|
refs/heads/develop
| 2023-02-04T17:41:28.977300
| 2020-12-30T15:13:18
| 2020-12-30T15:13:18
| 323,642,400
| 0
| 0
|
MIT
| 2020-12-30T15:13:19
| 2020-12-22T14:03:06
|
Python
|
UTF-8
|
Python
| false
| false
| 297
|
py
|
from django.contrib import admin
from django.urls import path
from trips.views import TripView
app_name = 'trips'
urlpatterns = [
path('', TripView.as_view({'get': 'list'}), name='trip_list'),
path('<uuid:trip_id>/', TripView.as_view({'get': 'retrieve'}), name='trip_detail'), # new
]
|
[
"ottimothy@gmail.com"
] |
ottimothy@gmail.com
|
8d1d223014ed6a6fd3dedf230f0818e8b775fd28
|
3c564b05a236f3ea3940f8354a85cffb8a502c5f
|
/run.py
|
6595710fc72673428344099ef7e59562672859ba
|
[] |
no_license
|
shalevy1/tesseractDataGenerator
|
ee0b88c2e780bd2617cf7ac60fc413b87e660191
|
57d35646f9ac7b32394fbacf7a04a1124ad84596
|
refs/heads/master
| 2022-11-14T09:38:47.232596
| 2019-07-03T13:53:39
| 2019-07-03T13:53:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 255
|
py
|
from Utils import genData
codesfile = 'codesKOR.txt'
trainingAmt = 1000
trainTestSplit = 0.7
outputdir = 'data'
# chars = getCharList('charsFixed2.txt')
# genCodes(chars, 'codesKOR.txt', 10000)
genData(trainingAmt,codesfile, trainTestSplit, outputdir)
|
[
"rafayk7@yahoo.com"
] |
rafayk7@yahoo.com
|
4fe20784e210003df990201f226915a4f8702cd0
|
d7016f69993570a1c55974582cda899ff70907ec
|
/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2021_10_01_dataplanepreview/operations/_code_containers_operations.py
|
39304e6ffb704920562356f3609fa0b63f3eb4b9
|
[
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-python-cwi",
"LGPL-2.1-or-later",
"PSF-2.0",
"LGPL-2.0-or-later",
"GPL-3.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"Python-2.0",
"MPL-2.0",
"LicenseRef-scancode-other-copyleft",
"HPND",
"ODbL-1.0",
"GPL-3.0-only",
"ZPL-2.1",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
kurtzeborn/azure-sdk-for-python
|
51ca636ad26ca51bc0c9e6865332781787e6f882
|
b23e71b289c71f179b9cf9b8c75b1922833a542a
|
refs/heads/main
| 2023-03-21T14:19:50.299852
| 2023-02-15T13:30:47
| 2023-02-15T13:30:47
| 157,927,277
| 0
| 0
|
MIT
| 2022-07-19T08:05:23
| 2018-11-16T22:15:30
|
Python
|
UTF-8
|
Python
| false
| false
| 19,903
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
# fmt: off
def build_list_request(
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
skiptoken = kwargs.pop('skiptoken', None) # type: Optional[str]
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if skiptoken is not None:
query_parameters['$skiptoken'] = _SERIALIZER.query("skiptoken", skiptoken, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request(
name, # type: str
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}')
path_format_arguments = {
"name": _SERIALIZER.url("name", name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
name, # type: str
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}')
path_format_arguments = {
"name": _SERIALIZER.url("name", name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_or_update_request(
name, # type: str
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}')
path_format_arguments = {
"name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
# fmt: on
class CodeContainersOperations(object):
"""CodeContainersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.machinelearningservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name, # type: str
registry_name, # type: str
skiptoken=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.CodeContainerResourceArmPaginatedResult"]
"""List containers.
List containers.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:param skiptoken: Continuation token for pagination.
:type skiptoken: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the
result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerResourceArmPaginatedResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
skiptoken=skiptoken,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
skiptoken=skiptoken,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes'} # type: ignore
@distributed_trace
def delete(
self,
name, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete container.
Delete container.
:param name: Container name.
:type name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
name=name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}'} # type: ignore
@distributed_trace
def get(
self,
name, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.CodeContainerData"
"""Get container.
Get container.
:param name: Container name.
:type name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CodeContainerData, or the result of cls(response)
:rtype: ~azure.mgmt.machinelearningservices.models.CodeContainerData
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerData"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
name=name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CodeContainerData', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}'} # type: ignore
@distributed_trace
def create_or_update(
self,
name, # type: str
resource_group_name, # type: str
registry_name, # type: str
body, # type: "_models.CodeContainerData"
**kwargs # type: Any
):
# type: (...) -> "_models.CodeContainerData"
"""Create or update container.
Create or update container.
:param name: Container name.
:type name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:param body: Container entity to create or update.
:type body: ~azure.mgmt.machinelearningservices.models.CodeContainerData
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CodeContainerData, or the result of cls(response)
:rtype: ~azure.mgmt.machinelearningservices.models.CodeContainerData
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerData"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'CodeContainerData')
request = build_create_or_update_request(
name=name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CodeContainerData', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}'} # type: ignore
|
[
"noreply@github.com"
] |
kurtzeborn.noreply@github.com
|
358e0825a1854b062e87d35611e52cd3c239266d
|
21540ab033e180a3d94b270b7faffac7fe4af68f
|
/wordshop2/Project_01_10_page62-63/Project_05.py
|
e45ba58fc5058ea1e533a49592edf98b0103a792
|
[] |
no_license
|
tuan102081/wordshop1.2.3.5
|
eaa344bdb04f565d1354b9476b4d4ecafc5cc7f3
|
70e75b56f48a2e5b1622d956f33831f80e64d368
|
refs/heads/master
| 2023-07-14T23:26:31.089484
| 2021-08-30T18:53:24
| 2021-08-30T18:53:24
| 401,411,439
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 607
|
py
|
"""
Author: Nguyen Duy Tuan
Date: 29/08/2021
Program: project_05_page_62.py
Problem:
An object’s momentum is its mass multiplied by its velocity. Write a program
that accepts an object’s mass (in kilograms) and velocity (in meters per second) as
inputs and then outputs its momentum.
Solution:
Display:
Enter of mass(kg): 51
Enter of velocity(m/s): 60
Object’s momentum = 3060.0 (kgm/s)
"""
mass = float(input("Enter of mass(kg): "))
V = float(input("Enter of velocity(m/s): "))
M = mass * V
print("\nObject’s momentum = " + str(round(M, 2)) + " (kgm/s)")
|
[
"you@example.com"
] |
you@example.com
|
c536a4122ad57a3aa5fa4992da64a4835318a3c1
|
b5dcba412c6f84aed7eef05d18ba96faf997c311
|
/KdV-Python/MZKdV.py
|
9b5036d3571becc603abcffb984e5787b8c0f632
|
[
"MIT"
] |
permissive
|
jrpriceUPS/Renormalized_Mori_Zwanzig
|
d2794d4c7c25a1b95ca73ab75fcab56a84845a9a
|
f818a63291497da88cf42e99408e2bb3a0452986
|
refs/heads/master
| 2023-04-15T03:12:30.294887
| 2021-05-03T16:05:34
| 2021-05-03T16:05:34
| 99,272,655
| 1
| 3
| null | 2021-02-01T19:39:03
| 2017-08-03T20:32:33
|
MATLAB
|
UTF-8
|
Python
| false
| false
| 116,988
|
py
|
# Author: Jake Price
# Date: February 8, 2021
# Purpose: Collection of custom functions for running CMA simulations of KdV,
# analysis functions, and renormalization scripts. Generates some images
# from past papers, but not all (didn't want / need to duplicate everything)
# Translation of code from UW PhD in Matlab.
# import libraries
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from matplotlib import animation
import glob
import re
def fftnorm(u_full):
"""Computes normalized FFT (such that FFT and IFFT are symmetrically normalized)
Parameters
----------
u_full : 1D Numpy Array (N,)
The vector whose discrete FFT is to be computed
Returns
-------
normalizedFFT : 1D Numpy Array (N,)
The transformed version of that vector
"""
N = u_full.shape[0]
normalizedFFT = np.fft.fft(u_full)*1/N
return normalizedFFT
def ifftnorm(u_full):
"""Computes normalized IFFT (such that FFT and IFFT are symmetrically normalized)
Parameters
----------
u_full : 1D Numpy Array (N,)
The vector whose discrete IFFT is to be computed
Returns
-------
normalizedIFFT : 1D Numpy Array (N,)
The transformed version of that vector
"""
N = u_full.shape[0]
normalizedIFFT = np.real(np.fft.ifft(u_full)*N)
return normalizedIFFT
def convolutionSumKdV(u,v,alpha):
"""Computes convolution sum associated with RHS of KdV ODE
C_k(u,v) = -(alpha * 1i * k) / 2 * sum_{i+j = k} u_i v_j
Computed in real space to avoid loops and then converted back
to Fourier space.
Parameters
----------
u : 1D Numpy Array (N,)
One of the two vectors being convolved
v : 1D Numpy Array (N,)
One of the two vectors being convolved
alpha : float
Degree of nonlinearity in KdV
Returns
-------
convo : 1D Numpy Array (N,)
Convolution of the two vectors
"""
# generate array of wavenumbers
L = u.shape[0]
k = np.concatenate([np.arange(0,L/2),np.arange(-L/2,0)])
if v.shape[0]!=L:
raise NameError('u and v must be the same length.')
# compute double sum in real space, then apply scalar multiplier
convo = fftnorm(ifftnorm(u)*ifftnorm(v))
convo = -alpha/2*1j*k*convo
return convo
# RHS: Right hand side functions for CMA and non-renormalized KdV
def markovKdV(u,M,alpha):
"""Computes nonlinear part of Markov term in KdV
C_k(u,v) = -(alpha * 1i * k) / 2 * sum_{i+j = k} u_i v_j
where the sum of i and j is over a "full" system with M positive modes (user specified)
Computed in real space to avoid loops and then converted back
to Fourier space.
Parameters
----------
u : 1D Numpy Array (N,)
Positive modes of state vector whose RHS is being computed
M : int
Number of positive modes in "full" model for intermediary calculations
alpha : float
Degree of nonlinearity in KdV
Returns
-------
nonlin0 : 1D Numpy Array (2*M,)
Nonlinear part of Markov term for given state vector
u_full : 1D Numpy array (2*M,)
"full" state vector for use in later computations
"""
# construct full Fourier vector from only the positive modes
u_full = np.zeros(2*M) +1j*np.zeros(2*M)
u_full[0:u.shape[0]] = u
u_full[2*M-u.shape[0]+1:] = np.conj(np.flip(u[1:]))
# compute the convolution sum
nonlin0 = convolutionSumKdV(u_full,u_full,alpha)
return nonlin0,u_full
def tModelKdV(u_full,nonlin0,alpha,F_modes):
"""Computes t-model term in KdV
C_k(u,v) = -(alpha * 1i * k) / 2 * sum_{i+j = k, i and j in F} u_i v_j
where the sum of i and j is over a "full" system with M positive modes (user specified)
Computed in real space to avoid loops and then converted back
to Fourier space.
Parameters
----------
u_full : Numpy array (2M,1)
Current state of u in full form
nonlin0 : Numpy array (2M,1)
Markov term (for convolving)
alpha : float
Degree of nonlinearity in KdV
F_modes : Numpy array
Set of resolved modes (and aliasing modes) to zero out
Returns
-------
nonlin1 : 1D Numpy Array (2*M,)
t-model term
uuStar : 1D Numpy array (2*M,)
unresolved modes of state vector convolved with itself
"""
uuStar = np.copy(nonlin0)
uuStar[F_modes] = 0
nonlin1 = 2*convolutionSumKdV(u_full, uuStar, alpha)
return nonlin1,uuStar
def t2ModelKdV(u_full,nonlin0,uuStar,alpha,F_modes,G_modes,k,epsilon):
"""Computes second order ROM term in KdV
*see paper / symbolic notebook for expression*
Computed in real space to avoid loops and then converted back
to Fourier space.
Parameters
----------
u_full : Numpy array (2M,1)
Current state of u in full form
nonlin0 : Numpy array (2M,1)
Markov term (for convolving)
uuStar : 1D Numpy array (2*M,)
Unresolved modes of state vector convolved with itself
alpha : float
Degree of nonlinearity in KdV
F_modes : Numpy array
Set of resolved modes (and aliasing modes) to zero out
G_modes : Numpy array
Set of unresolved modes (and aliasing modes) to zero out
k : Numpy array (2M,1)
Array of wavenumbers
epsilon : float
Size of linear term (stiffness)
Returns
-------
nonlin2 : 1D Numpy Array (2*M,)
t2-model term
uk3 : 1D Numpy array (2*M,)
Resolved modes of state vector multiplied by k^3
uu : 1D Numpy array (2*M,)
Resolved modes of state vector convolved with itself
A, AStar, B, BStar, C, CStar, D, DStar : 1D Numpy arrays (2*M,)
Specific convolutions used as inner terms in future terms
"""
# compute inner convolutions
uu = np.copy(nonlin0)
uu[G_modes] = 0
uk3 = k**3*u_full
A = k**3*uu
AStar = k**3*uuStar
B = convolutionSumKdV(1j*epsilon**2*uk3+uu,u_full,alpha)
BStar = np.copy(B)
B[G_modes] = 0
BStar[F_modes] = 0
C = convolutionSumKdV(uuStar,u_full,alpha)
CStar = np.copy(C)
C[G_modes] = 0
CStar[F_modes] = 0
D = convolutionSumKdV(uuStar,uuStar,alpha)
DStar = np.copy(D)
D[G_modes] = 0
DStar[F_modes] = 0
# compute actual term
nonlin2 = -2*convolutionSumKdV(u_full,1j*epsilon**2*AStar - 2*BStar + 2*CStar,alpha) - 2*D
return nonlin2,uk3,uu,A,AStar,B,BStar,C,CStar,D,DStar
def t3ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,A,AStar,B,BStar,C,CStar,DStar):
"""Computes third order ROM term in KdV
*see paper / symbolic notebook for expression*
Computed in real space to avoid loops and then converted back
to Fourier space.
Parameters
----------
alpha : float
Degree of nonlinearity in KdV
F_modes : Numpy array
Set of resolved modes (and aliasing modes) to zero out
G_modes : Numpy array
Set of unresolved modes (and aliasing modes) to zero out
k : Numpy array (2M,1)
Array of wavenumbers
epsilon : float
Size of linear term (stiffness)
u_full : Numpy array (2M,1)
Current state of u in full form
uu : 1D Numpy array (2*M,)
Resolved modes of state vector convolved with itself
uuStar : 1D Numpy array (2*M,)
Unresolved modes of state vector convolved with itself
uk3 : 1D Numpy array (2*M,)
Resolved modes of state vector multiplied by k^3
A, AStar, B, BStar, C, CStar, DStar : 1D Numpy arrays (2*M,)
Specific convolutions used as inner terms in future terms
Returns
-------
nonlin3 : 1D Numpy Array (2*M,)
t3-model term
uk6 : 1D Numpy array (2*M,)
Resolved modes of state vector multiplied by k^6
nonlin3,uk6,E,EStar,F,FStar
E, EStar, F, FStar : 1D Numpy arrays (2*M,)
Specific convolutions used as inner terms in future terms
"""
# compute internal convolutions
uk6 = k**3*uk3
E = convolutionSumKdV(1j*epsilon**2*uk3+uu,1j*epsilon**2*uk3+uu,alpha)
EStar = np.copy(E)
E[G_modes] = 0
EStar[F_modes] = 0
F = convolutionSumKdV(uuStar,1j*epsilon**2*uk3+uu,alpha)
FStar = np.copy(F)
F[G_modes] = 0
FStar[F_modes] = 0
int1 = -2*BStar+CStar
int2 = (convolutionSumKdV(u_full,
-epsilon**4*uk6
+1j*epsilon**2*(A+AStar)
+2*(B-2*C)
+2*(CStar-2*BStar),
alpha))
int2[F_modes] = 0
int3 = EStar-FStar
int4 = np.copy(DStar)
int5 = CStar-BStar
# compute actual 3rd order term
nonlin3 = (2*convolutionSumKdV(u_full,-k**3*epsilon**4*AStar
+2*1j*epsilon**2*k**3*int1
+2*int2+2*int3+2*int4,alpha)
+6*convolutionSumKdV(uuStar,1j*epsilon**2*AStar + 2*int5,alpha))
return nonlin3,uk6,E,EStar,F,FStar
def t4ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,uk6,A,AStar,B,BStar,C,CStar,D,DStar,E,EStar,F,FStar):
"""Computes fourth order ROM term in KdV
*see paper / symbolic notebook for expression*
Computed in real space to avoid loops and then converted back
to Fourier space.
Parameters
----------
alpha : float
Degree of nonlinearity in KdV
F_modes : Numpy array
Set of resolved modes (and aliasing modes) to zero out
G_modes : Numpy array
Set of unresolved modes (and aliasing modes) to zero out
k : Numpy array (2M,1)
Array of wavenumbers
epsilon : float
Size of linear term (stiffness)
u_full : Numpy array (2M,1)
Current state of u in full form
uu : 1D Numpy array (2*M,)
Resolved modes of state vector convolved with itself
uuStar : 1D Numpy array (2*M,)
Unresolved modes of state vector convolved with itself
uk3 : 1D Numpy array (2*M,)
Resolved modes of state vector multiplied by k^3
uk6 : 1D Numpy array (2*M,)
Resolved modes of state vector multiplied by k^6
A, AStar, B, BStar, C, CStar, DStar, E, EStar, F, FStar : 1D Numpy arrays (2*M,)
Specific convolutions used as inner terms in future terms
Returns
-------
nonlin4 : 1D Numpy Array (2*M,)
t4-model term
"""
# compute internal convolutions
internal1 = (convolutionSumKdV(u_full,-epsilon**4*uk6+1j*epsilon**2*(A+AStar)
+2*B-4*C-4*BStar+2*CStar,alpha))
internal1[F_modes] = 0
internal2 = (1j*epsilon**2*k**3*convolutionSumKdV(u_full,-3*epsilon**4*uk6
+1j*epsilon**2*(3*A+AStar)
-2*(-3*B+5*C)
+2*(-3*BStar+CStar),alpha))
internal2[F_modes] = 0
auxiliary1 = 2*convolutionSumKdV(u_full,epsilon**4*uk6-1j*epsilon**2*(A+3*AStar)
+2*(3*C-B)+2*(5*BStar-3*CStar),alpha)
auxiliary1[G_modes] = 0
auxiliary2 = 2*convolutionSumKdV(u_full,-3*epsilon**4*uk6+1j*epsilon**2*(3*A+AStar)
+2*(3*B-5*C)+2*(-3*BStar+CStar),alpha)
auxiliary2[F_modes] = 0
internal3 = convolutionSumKdV(u_full,1j*k**3*uk6*epsilon**6
+k**3*epsilon**4*(A-AStar)
+2*1j*epsilon**2*k**3*(3*C-B)
+2*1j*epsilon**2*k**3*(-3*BStar+CStar)
+auxiliary1+auxiliary2
-2*(E-2*F)
+2*(3*EStar-2*FStar)
-6*D+2*DStar,alpha)
internal3[F_modes]= 0
internal4 = convolutionSumKdV(1j*epsilon**2*uk3+uu,3*epsilon**4*uk6-1j*epsilon**2*(3*A+AStar)
+2*(-3*B+5*C)+2*(3*BStar-CStar),alpha)
internal4[F_modes] = 0
internal5 = convolutionSumKdV(uuStar,-epsilon**4*uk6+1j*epsilon**2*(A+3*AStar)
+2*B-6*C-10*BStar+6*CStar,alpha)
internal5[F_modes] = 0
# compute actual fourth order term
nonlin4 = (2*convolutionSumKdV(u_full,-1j*epsilon**6*k**6*AStar
+2*k**6*epsilon**4*(3*BStar-CStar)
+2*internal2
+2*internal3
+2*internal4
-2*k**3*1j*epsilon**2*(2*FStar-3*EStar)
+2*k**3*1j*epsilon**2*DStar
+2*internal5,alpha)
+8*convolutionSumKdV(uuStar,-k**3*epsilon**4*AStar
+2*1j*epsilon**2*k**3*(-2*BStar+CStar)
+2*internal1
+2*(EStar-FStar)
+2*DStar,alpha)
-48*convolutionSumKdV(BStar,1j*epsilon**2*AStar+2*CStar,alpha)
+6*convolutionSumKdV(1j*epsilon**2*AStar+2*(BStar+CStar),
1j*epsilon**2*AStar+2*(BStar+CStar),alpha)
)
nonlin4 = -nonlin4
return nonlin4
def RHSKdV(t,u,params):
"""
Computes the RHS for a full KdV or ROM simulation. For use in solver.
Parameters
----------
t : float
Current time
u : Numpy array (N,)
Current state vector
params : Dictionary
Dictionary of relevant parameters (see below)
N : float, number of positive modes in simulation
M : float, number of positive modes in "full" intermediate compuation
alpha : float, degree of nonlinearity in KdV
epsilon : float, size of linear term (stiffness)
tau : float, time decay modifier
coeffs : Numpy array, renormalization coefficients for ROM (None if no ROM)
Returns
-------
RHS : 1D Numpy array (N,)
Derivative of each positive mode in state vector
"""
# extract parameters from dictionary
N = params['N']
M = params['M']
alpha = params['alpha']
epsilon = params['epsilon']
tau = params['tau']
coeffs = params['coeffs']
# construct wavenumber array
k = np.concatenate([np.arange(0,M),np.arange(-M,0)])
# Linear and Markov term
nonlin0,u_full = markovKdV(u,M,alpha)
RHS = 1j*k[0:N]**3*epsilon**2*u + nonlin0[0:N]
if (np.any(coeffs == None)):
order = 0
else:
order = coeffs.shape[0]
if (order >= 1):
# compute t-model term
# define which modes are resolved / unresolved in full array
F_modes = np.concatenate([np.arange(0,N),np.arange(2*N-1,M+N+2),np.arange(2*M-N+1,2*M)])
G_modes = np.arange(N,2*M-N+1)
# compute t-model term
nonlin1,uuStar = tModelKdV(u_full,nonlin0,alpha,F_modes)
RHS = RHS + coeffs[0]*nonlin1[0:N]*t**(1-tau)
order = coeffs.shape[0]
if (order >= 2):
# compute t2-model term
nonlin2,uk3,uu,A,AStar,B,BStar,C,CStar,D,DStar = t2ModelKdV(u_full,nonlin0,uuStar,alpha,F_modes,G_modes,k,epsilon)
RHS = RHS + coeffs[1]*nonlin2[0:N]*t**(2*(1-tau))
if (order >= 3):
# compute t3-model term
nonlin3,uk6,E,EStar,F,FStar = t3ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,A,AStar,B,BStar,C,CStar,DStar)
RHS = RHS + coeffs[2]*nonlin3[0:N]*t**(3*(1-tau))
if (order == 4):
# compute t4-model term
nonlin4 = t4ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,uk6,A,AStar,B,BStar,C,CStar,D,DStar,E,EStar,F,FStar)
RHS = RHS + coeffs[3]*nonlin4[0:N]*t**(4*(1-tau))
return RHS
def getMass(u,N):
"""Computes mass in first N modes for all timesteps from solution array u
Parameters
----------
u : 2D Numpy Array (M,tList)
Positive modes of state vector for all timesteps
N : int
Number of positive modes to include in mass measurement
Returns
-------
mass : 1D Numpy Array (tList,)
Energy in first N modes at all timesteps
"""
mass = np.sum(2*(abs(u[0:N,]))**2,0)
return mass
def runSim(params):
"""
Runs an actual ROM or non-ROM simulation of KdV
Parameters
----------
params : Dictionary
Dictionary of relevant parameters (see below)
N : float, number of positive modes in simulation
M : float, number of positive modes in "full" intermediate compuation
alpha : float, degree of nonlinearity in KdV
epsilon : float, size of linear term (stiffness)
tau : float, time decay modifier
coeffs : Numpy array, renormalization coefficients for ROM (None if no ROM)
IC : function handle, initial condition of simulation
endtime : float, final time to simulate to
timesteps: Numpy array, specific timesteps for which to save solution
Returns
-------
uSim : ODE solver output
Output solution from sp.integrate.solve_ivp (includes state vector at all timesteps, time vector, etc.)
"""
# unpack parameters from dictionary
N = params['N']
IC = params['IC']
endtime = params['endtime']
timesteps = params['timesteps']
# generate initial condition
x = np.linspace(0,2*np.pi-2*np.pi/(2*N),2*N)
y = IC(x)
uFull = fftnorm(y)
u = uFull[0:N]
# define RHS in form appropriate for solve_ivp
def myRHS(t,y):
out = RHSKdV(t,y,params)
return out
# solve the IVP
uSim = sp.integrate.solve_ivp(fun = myRHS, t_span = [0,endtime], y0 = u,method = "BDF", t_eval = timesteps)
return uSim
def makeRealSpace(u,N):
"""Takes a completed simulation and finds the real space solution at all timesteps for a chosen subset of modes
Parameters
----------
u : Numpy array (M,t)
Output of simulation giving energy in first M positive modes for all timesteps t
N : int
Number of positive modes to use in real space
Returns
-------
x : Numpy vector (2xN,1)
x-grid for plotting purposes
uReal : Numpy array (2xN,t)
Real space solution at all times
"""
# identify shapes of arrays
uShape = u.shape
numTimes = uShape[1]
# drop modes we don't wish to keep
uNew = u[0:N,:]
# generate full vector (with negative modes)
uFull = np.zeros((2*N,numTimes)) + 1j*0
uFull[0:N,:] = uNew
uFull[2*N-N+1:,:] = np.conj(np.flip(uNew[1:,:],0))
# initialize output
uReal = np.zeros(uFull.shape)
# take inverse transform for each timestep
# NOTE: is there a vectorized way to do this?
for i in np.arange(0,numTimes):
uReal[:,i] = ifftnorm(uFull[:,i])
return uReal
def makeAnimations(uList,t,legendList):
"""
Creates an animation from a list of simulations
Parameters
----------
uList : List of Numpy arrays of size (N,T)
Set of state vector evolutions to animate
t : Numpy array (T,)
Timesteps associated with simulations (must all be the same)
legendList : List of strings
Labels for each simulation
Returns
-------
anim : animation object
output from animation.FuncAnimation
"""
# identify the resolution to use for plots and generate x grid
N = min([x.shape[0] for x in uList])
xgrid = np.linspace(0,2*np.pi*(2*N-1)/(2*N),2*N)
# generate real space solutions
realSols = [makeRealSpace(x,N) for x in uList]
# initialize figure
myFig = plt.figure()
ax = plt.subplot()
ax.axis(xmin = 0,xmax = 2*np.pi-np.pi/N,ymin = -2, ymax = 4)
# create empty list of lines to populate each iteration
lineList = [ax.plot([],[]) for i in range(len(uList))]
# define function to draw each frame
def makeFrame(n):
for i in range(len(uList)):
lineList[i][0].set_data(xgrid,realSols[i][:,n])
plt.title('t = '+str(round(t[n],1)))
plt.legend(legendList, loc = "upper right")
return lineList
# generate animation
anim = animation.FuncAnimation(fig = myFig,func = makeFrame,frames = t.shape[0])
return anim
def renormalize(fullM, endtime, Nlist, Mlist, epsilon, alpha, tau, timesteps, IC = np.sin, plots = False):
"""
Finds renormalization coefficients based on a single simulation. If the
simulation doesn't yet exist, it creates it
Parameters
----------
fullM : int
Size of full simulation to base fits on
endtime : int
Endtime of full simulation
Nlist : list of ints
List of resolutions for which to find coefficients
Mlist : list of ints
List of intermediary "full" simulations to use for ROMs
epsilon : float
size of linear term (stiffness)
alpha : float
degree of nonlinearity in KdV
tau : float
time decay modifier
timesteps : Numpy array
specific timesteps for which to save solution
IC : function handle
initial condition of simulation (default np.sin)
plots : boolean
Indicates whether to generate plots (default: False)
Returns
-------
coeeffsArray1 : Numpy array (length(Nlist),1)
Renormalization coefficients for t-model only
coeffsArray2 : Numpy array (length(Nlist),2)
Renormalization coefficients for t-model and t2-model only
coeffsArray3 : Numpy array (length(Nlist),3)
Renormalization coefficients for t1-t3-models
coeffsArray4 : Numpy array (length(Nlist),4)
Renormalization coefficients for t1-t4-models
coeffsArray2only : Numpy array (length(Nlist),1)
Renormalization coefficients for t2-model only
coeffsArray24only : Numpy array (length(Nlist),2)
Renormalization coefficients for t2-model and t4-model only
fitLines : Dict
Contains scaling law fits for each ROM coefficients
of form c = -b * N^a
Terms given are a, b, and r (correlation coefficient of fit)
err : Dict
Contains least-squares error for each fit for each model and resolution
"""
# Check if full simulation has already been constructed
# if so, load it, if not, generate it
try:
uFull = np.load("u" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+".npy")
tFull = np.load("t" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+".npy")
except:
fullParams = {
'N': fullM,
'M': int(3/2*fullM),
'alpha': 1,
'epsilon': epsilon,
'tau': 1,
'coeffs': None,
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
uSimFull = runSim(fullParams)
uFull = uSimFull.y
tFull = uSimFull.t
np.save( "u" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p'),uFull)
np.save( "t" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p'),tFull)
# initialize output arrays
coeffsArray1 = np.zeros((Nlist.shape[0],1))
coeffsArray2 = np.zeros((Nlist.shape[0],2))
coeffsArray3 = np.zeros((Nlist.shape[0],3))
coeffsArray4 = np.zeros((Nlist.shape[0],4))
coeffsArray2only = np.zeros((Nlist.shape[0],1))
coeffsArray24only = np.zeros((Nlist.shape[0],2))
# recover number of timesteps
numSteps = tFull.shape[0]
# initialize least squares error output
err = {"t-model" : np.zeros((Nlist.shape[0],1)),
"t2-model" : np.zeros((Nlist.shape[0],1)),
"t3-model" : np.zeros((Nlist.shape[0],1)),
"t4-model" : np.zeros((Nlist.shape[0],1)),
"t2-model only" : np.zeros((Nlist.shape[0],1)),
"t2- and t4-models" : np.zeros((Nlist.shape[0],1))}
# loop through all resolutions
for j in np.arange(0,Nlist.shape[0]):
# Find number of positive terms in ROM, in intermediate calculations, and wavenumber array
N = Nlist[j]
M = Mlist[j]
k = np.concatenate([np.arange(0,M),np.arange(-M,0)])
# Gather first derivative data for fitting purposes
exactEnergy = np.zeros((N,numSteps))
R0Energy = np.zeros((N,numSteps))
R1Energy = np.zeros((N,numSteps))
R2Energy = np.zeros((N,numSteps))
R3Energy = np.zeros((N,numSteps))
R4Energy = np.zeros((N,numSteps))
# plug exact solution into exact RHS and all ROM terms and find energy contribution of each
for i in np.arange(0,numSteps):
# exact RHS
exactRHS,dummyU = markovKdV(uFull[:,i],int(fullM*3/2),alpha)
exactEnergy[:,i] = np.real(exactRHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(exactRHS[0:N])*uFull[0:N,i])
# Markov RHS
nonlin0,u_full = markovKdV(uFull[0:N,i],M,alpha)
R0RHS = nonlin0
R0Energy[:,i] = np.real(R0RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R0RHS[0:N])*uFull[0:N,i])
# First order RHS term
F_modes = np.concatenate([np.arange(0,N),np.arange(2*N-1,M+N+2),np.arange(2*M-N+1,2*M)])
G_modes = np.arange(N,2*M-N+1)
nonlin1,uuStar = tModelKdV(u_full,nonlin0,alpha,F_modes)
R1RHS = nonlin1*tFull[i]**(1-tau)
R1Energy[:,i] = np.real(R1RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R1RHS[0:N])*uFull[0:N,i])
# Second order RHS term
nonlin2,uk3,uu,A,AStar,B,BStar,C,CStar,D,DStar = t2ModelKdV(u_full,nonlin0,uuStar,alpha,F_modes,G_modes,k,epsilon)
R2RHS = nonlin2*tFull[i]**(2*(1-tau))
R2Energy[:,i] = np.real(R2RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R2RHS[0:N])*uFull[0:N,i])
# Third order RHS term
nonlin3,uk6,E,EStar,F,FStar = t3ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,A,AStar,B,BStar,C,CStar,DStar)
R3RHS = nonlin3*tFull[i]**(3*(1-tau))
R3Energy[:,i] = np.real(R3RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R3RHS[0:N])*uFull[0:N,i])
# Fourth order RHS term
nonlin4 = t4ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,uk6,A,AStar,B,BStar,C,CStar,D,DStar,E,EStar,F,FStar)
R4RHS = nonlin4*tFull[i]**(4*(1-tau))
R4Energy[:,i] = np.real(R4RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R4RHS[0:N])*uFull[0:N,i])
if j == 0:
R0Energy0 = np.copy(R0Energy)
R1Energy0 = np.copy(R1Energy)
R2Energy0 = np.copy(R2Energy)
R3Energy0 = np.copy(R3Energy)
R4Energy0 = np.copy(R4Energy)
##################################################
# Use least-squares fit to identify coefficients #
##################################################
# t-model coefficient
coeffsArray1[j,:] = np.sum((exactEnergy - R0Energy)*R1Energy)/np.sum(R1Energy*R1Energy)
err["t-model"][j] = np.sum((exactEnergy - R0Energy - coeffsArray1[j,0]*R1Energy)**2)
# t2-model coefficient
LSMatrix = (np.array([[np.sum(R1Energy*R1Energy),np.sum(R1Energy*R2Energy)],
[np.sum(R2Energy*R1Energy),np.sum(R2Energy*R2Energy)]]))
LSb = (np.array([np.sum(R1Energy*(exactEnergy-R0Energy)),np.sum(R2Energy*(exactEnergy-R0Energy))]))
coeffsArray2[j,:] = np.linalg.solve(LSMatrix,LSb)
err["t2-model"][j] = np.sum((exactEnergy - R0Energy - coeffsArray2[j,0]*R1Energy - coeffsArray2[j,1]*R2Energy)**2)
# t3-model coefficient
LSMatrix = (np.array([[np.sum(R1Energy*R1Energy),np.sum(R1Energy*R2Energy),np.sum(R1Energy*R3Energy)],
[np.sum(R2Energy*R1Energy),np.sum(R2Energy*R2Energy),np.sum(R2Energy*R3Energy)],
[np.sum(R3Energy*R1Energy),np.sum(R3Energy*R2Energy),np.sum(R3Energy*R3Energy)]]))
LSb = (np.array([np.sum(R1Energy*(exactEnergy-R0Energy)),np.sum(R2Energy*(exactEnergy-R0Energy)),np.sum(R3Energy*(exactEnergy-R0Energy))]))
coeffsArray3[j,:] = np.linalg.solve(LSMatrix,LSb)
err["t3-model"][j] = np.sum((exactEnergy - R0Energy - coeffsArray3[j,0]*R1Energy - coeffsArray3[j,1]*R2Energy - coeffsArray3[j,2]*R3Energy)**2)
# t4-model coefficient
LSMatrix = (np.array([[np.sum(R1Energy*R1Energy),np.sum(R1Energy*R2Energy),np.sum(R1Energy*R3Energy),np.sum(R1Energy*R4Energy)],
[np.sum(R2Energy*R1Energy),np.sum(R2Energy*R2Energy),np.sum(R2Energy*R3Energy),np.sum(R2Energy*R4Energy)],
[np.sum(R3Energy*R1Energy),np.sum(R3Energy*R2Energy),np.sum(R3Energy*R3Energy),np.sum(R3Energy*R4Energy)],
[np.sum(R4Energy*R1Energy),np.sum(R4Energy*R2Energy),np.sum(R4Energy*R3Energy),np.sum(R4Energy*R4Energy)]]))
LSb = (np.array([np.sum(R1Energy*(exactEnergy-R0Energy)),np.sum(R2Energy*(exactEnergy-R0Energy)),np.sum(R3Energy*(exactEnergy-R0Energy)),np.sum(R4Energy*(exactEnergy-R0Energy))]))
coeffsArray4[j,:] = np.linalg.solve(LSMatrix,LSb)
err["t4-model"][j] = np.sum((exactEnergy - R0Energy - coeffsArray4[j,0]*R1Energy - coeffsArray4[j,1]*R2Energy - coeffsArray4[j,2]*R3Energy - coeffsArray4[j,3]*R4Energy)**2)
# t2-model with *no* t-model
coeffsArray2only[j,:] = np.sum((exactEnergy - R0Energy)*R2Energy)/np.sum(R2Energy*R2Energy)
err["t2-model only"][j] = np.sum((exactEnergy - R0Energy - coeffsArray2only[j,0]*R2Energy)**2)
# t2-model and t4-model with *no* t-model or t3-model
LSMatrix = (np.array([[np.sum(R2Energy*R2Energy),np.sum(R2Energy*R4Energy)],
[np.sum(R4Energy*R2Energy),np.sum(R4Energy*R4Energy)]]))
LSb = (np.array([np.sum(R2Energy*(exactEnergy-R0Energy)),np.sum(R4Energy*(exactEnergy-R0Energy))]))
coeffsArray24only[j,:] = np.linalg.solve(LSMatrix,LSb)
err["t2- and t4-models"][j] = np.sum((exactEnergy - R0Energy - coeffsArray24only[j,0]*R2Energy - coeffsArray24only[j,1]*R4Energy)**2)
# Generate plots if desired
if plots:
# Plot 1: Qualitative comparison of each term contributing to energy movement
N = Nlist[0]
fig1, ax1 = plt.subplots(3,2)
ax1[0,0].plot(tFull,np.sum(exactEnergy[0:N,:],0))
ax1[0,0].set_title("Exact Energy Decay")
ax1[0,1].plot(tFull,np.sum(R0Energy0[0:N,:],0))
ax1[0,1].set_title("Markov Energy Decay")
ax1[1,0].plot(tFull,np.sum(R2Energy0[0:N,:],0))
ax1[1,0].set_title("R2 Energy Decay")
ax1[1,1].plot(tFull,np.sum(R1Energy0[0:N,:],0))
ax1[1,1].set_title("R1 Energy Decay")
ax1[2,0].plot(tFull,np.sum(R4Energy0[0:N,:],0))
ax1[2,0].set_title("R4 Energy Decay")
ax1[2,1].plot(tFull,np.sum(R3Energy0[0:N,:],0))
ax1[2,1].set_title("R3 Energy Decay")
fig1.suptitle("N = "+str(N)+" Energy Decays")
plt.tight_layout()
# remove axis labels to not crowd plots (since only qualitative comparisons desired)
for i in range(0,3):
for j in range(0,2):
#ax1[i,j].tick_params(labelbottom=False,labelleft=False)
ax1[i,j].tick_params(labelleft=False)
# compute best fit lines for coefficients in log-log space
fitLines = {"t-model" : np.zeros((1,3)),
"t2-model" : np.zeros((2,3)),
"t3-model" : np.zeros((3,3)),
"t4-model" : np.zeros((4,3)),
"t2-model only" : np.zeros((1,3)),
"t2- and t4-models" : np.zeros((2,3))}
fig2, ax2 = plt.subplots(2,2)
# t-model
ax2[0,0].scatter(np.log(Nlist),np.log(abs(coeffsArray1[:,0])))
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray1[:,0])))
ax2[0,0].plot(np.log(Nlist),intercept + slope*np.log(Nlist))
fitLines["t-model"][:] = np.array([slope,np.exp(intercept),r_value])
# t2-model
ax2[0,0].scatter(np.log(Nlist),np.log(abs(coeffsArray2[:,0])),color="red")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray2[:,0])))
ax2[0,0].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "red")
fitLines["t2-model"][0,:] = np.array([slope,np.exp(intercept),r_value])
ax2[0,1].scatter(np.log(Nlist),np.log(abs(coeffsArray2[:,1])),color="red")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray2[:,1])))
ax2[0,1].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "red")
fitLines["t2-model"][1,:] = np.array([slope,np.exp(intercept),r_value])
# t3-model
ax2[0,0].scatter(np.log(Nlist),np.log(abs(coeffsArray3[:,0])),color="green")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray3[:,0])))
ax2[0,0].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "green")
fitLines["t3-model"][0,:] = np.array([slope,np.exp(intercept),r_value])
ax2[0,1].scatter(np.log(Nlist),np.log(abs(coeffsArray3[:,1])),color="green")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray3[:,1])))
ax2[0,1].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "green")
fitLines["t3-model"][1,:] = np.array([slope,np.exp(intercept),r_value])
ax2[1,0].scatter(np.log(Nlist),np.log(abs(coeffsArray3[:,2])),color="green")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray3[:,2])))
ax2[1,0].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "green")
fitLines["t3-model"][2,:] = np.array([slope,np.exp(intercept),r_value])
# t4-model
ax2[0,0].scatter(np.log(Nlist),np.log(abs(coeffsArray4[:,0])),color="purple")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,0])))
ax2[0,0].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "purple")
fitLines["t4-model"][0,:] = np.array([slope,np.exp(intercept),r_value])
ax2[0,1].scatter(np.log(Nlist),np.log(abs(coeffsArray4[:,1])),color="purple")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,1])))
ax2[0,1].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "purple")
fitLines["t4-model"][1,:] = np.array([slope,np.exp(intercept),r_value])
ax2[1,0].scatter(np.log(Nlist),np.log(abs(coeffsArray4[:,2])),color="purple")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,2])))
ax2[1,0].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "purple")
fitLines["t4-model"][2,:] = np.array([slope,np.exp(intercept),r_value])
ax2[1,1].scatter(np.log(Nlist),np.log(abs(coeffsArray4[:,3])),color="purple")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,3])))
ax2[1,1].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "purple")
fitLines["t4-model"][3,:] = np.array([slope,np.exp(intercept),r_value])
# t2-model alone
ax2[0,1].scatter(np.log(Nlist),np.log(abs(coeffsArray2only[:,0])),color="cyan")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray2only[:,0])))
ax2[0,1].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "cyan")
fitLines["t2-model only"][:] = np.array([slope,np.exp(intercept),r_value])
# t2- and t4-model alone
ax2[0,1].scatter(np.log(Nlist),np.log(abs(coeffsArray24only[:,0])),color="black")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray24only[:,0])))
ax2[0,1].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "black")
fitLines["t2- and t4-models"][0,:] = np.array([slope,np.exp(intercept),r_value])
ax2[1,1].scatter(np.log(Nlist),np.log(abs(coeffsArray24only[:,1])),color="black")
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray24only[:,1])))
ax2[1,1].plot(np.log(Nlist),intercept + slope*np.log(Nlist), color = "black")
fitLines["t2- and t4-models"][1,:] = np.array([slope,np.exp(intercept),r_value])
ax2[0,0].set_title("t-model")
ax2[0,1].set_title("t2-model")
ax2[1,0].set_title("t3-model")
ax2[1,1].set_title("t4-model")
customLines = [plt.Line2D([0],[0], color = "blue"),
plt.Line2D([0],[0], color = "red"),
plt.Line2D([0],[0], color = "green"),
plt.Line2D([0],[0], color = "purple"),
plt.Line2D([0],[0], color = "cyan"),
plt.Line2D([0],[0], color = "black")]
ax2[0,1].legend(customLines,["First Order Model","Second Order Model",
"Third Order Model","Fourth Order Model",
"Only Second Order","Second and Fourth Order"],
prop = {"size":5})
fig2.suptitle("Renormalization Coefficients (log(a) vs log(N))")
plt.subplots_adjust(right=0.7)
plt.tight_layout()
# calculate best fit lines if plotting didn't occur
else:
fitLines = {"t-model" : np.zeros((1,3)),
"t2-model" : np.zeros((2,3)),
"t3-model" : np.zeros((3,3)),
"t4-model" : np.zeros((4,3)),
"t2-model only" : np.zeros((1,3)),
"t2- and t4-models" : np.zeros((2,3))}
# t-model
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray1[:,0])))
fitLines["t-model"][:] = np.array([slope,np.exp(intercept),r_value])
# second order ROM
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray2[:,0])))
fitLines["t2-model"][0,:] = np.array([slope,np.exp(intercept),r_value])
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray2[:,1])))
fitLines["t2-model"][1,:] = np.array([slope,np.exp(intercept),r_value])
# third order ROM
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray3[:,0])))
fitLines["t3-model"][0,:] = np.array([slope,np.exp(intercept),r_value])
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray3[:,1])))
fitLines["t3-model"][1,:] = np.array([slope,np.exp(intercept),r_value])
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray3[:,2])))
fitLines["t3-model"][2,:] = np.array([slope,np.exp(intercept),r_value])
# fourth order ROM
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,0])))
fitLines["t4-model"][0,:] = np.array([slope,np.exp(intercept),r_value])
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,1])))
fitLines["t4-model"][1,:] = np.array([slope,np.exp(intercept),r_value])
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,2])))
fitLines["t4-model"][2,:] = np.array([slope,np.exp(intercept),r_value])
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray4[:,3])))
fitLines["t4-model"][3,:] = np.array([slope,np.exp(intercept),r_value])
# only t2-model
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray2only[:,0])))
fitLines["t2-model only"][:] = np.array([slope,np.exp(intercept),r_value])
# only t2- and t4-models
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray24only[:,0])))
fitLines["t2- and t4-models"][0,:] = np.array([slope,np.exp(intercept),r_value])
slope,intercept,r_value,p_value,std_err = sp.stats.linregress(np.log(Nlist), np.log(abs(coeffsArray24only[:,1])))
fitLines["t2- and t4-models"][1,:] = np.array([slope,np.exp(intercept),r_value])
return coeffsArray1,coeffsArray2,coeffsArray3,coeffsArray4,coeffsArray2only,coeffsArray24only,fitLines,err
def scalingLaws(fullM, endtime, Nlist, Mlist, epsilonList, alpha, tau, timesteps, IC = np.sin, plots = False):
"""
Finds renormalization coefficients based on a simulations with a range of
epsilon values.
Parameters
----------
fullM : int
Size of full simulation to base fits on
endtime : int
Endtime of full simulation
Nlist : list of ints
List of resolutions for which to find coefficients
Mlist : list of ints
List of intermediary "full" simulations to use for ROMs
epsilonList : list of floats
size of linear term (stiffness)
alpha : float
degree of nonlinearity in KdV
tau : float
time decay modifier
timesteps : Numpy array
specific timesteps for which to save solution
IC : function handle
initial condition of simulation (default np.sin)
plots : boolean
Indicates whether to generate plots (default: False)
Returns
-------
coeeffsArray1 : Numpy array (length(Nlist),1)
Renormalization coefficients for t-model only
coeffsArray2 : Numpy array (length(Nlist),2)
Renormalization coefficients for t-model and t2-model only
coeffsArray3 : Numpy array (length(Nlist),3)
Renormalization coefficients for t1-t3-models
coeffsArray4 : Numpy array (length(Nlist),4)
Renormalization coefficients for t1-t4-models
coeffsArray2only : Numpy array (length(Nlist),1)
Renormalization coefficients for t2-model only
coeffsArray24only : Numpy array (length(Nlist),2)
Renormalization coefficients for t2-model and t4-model only
fitLines : Dict
Contains scaling law fits for each ROM coefficients
of form c = -b * N^a
Terms given are a, b, and r (correlation coefficient of fit)
"""
# initialize output arrays
c1 = np.zeros((len(Nlist),1,len(epsilonList)))
c2 = np.zeros((len(Nlist),2,len(epsilonList)))
c3 = np.zeros((len(Nlist),3,len(epsilonList)))
c4 = np.zeros((len(Nlist),4,len(epsilonList)))
c2only = np.zeros((len(Nlist),1,len(epsilonList)))
c24only = np.zeros((len(Nlist),2,len(epsilonList)))
# loop through all epsilon values
for i in np.arange(0,len(epsilonList)):
# renormalize for given epsilon value and save results
coeffsArray1,coeffsArray2,coeffsArray3,coeffsArray4,coeffsArray2only,coeffsArray24only,fitLines,err = renormalize(fullM = fullM, endtime = endtime, Nlist = Nlist, Mlist = Mlist, epsilon = epsilonList[i], alpha = alpha, tau = tau, timesteps = timesteps, IC = IC, plots = False)
c1[:,:,i] = coeffsArray1
c2[:,:,i] = coeffsArray2
c3[:,:,i] = coeffsArray3
c4[:,:,i] = coeffsArray4
c2only[:,:,i] = coeffsArray2only
c24only[:,:,i] = coeffsArray24only
# pack results into dictionary for output
coefficients = {"t-model" : c1,
"t2-model" : c2,
"t3-model" : c3,
"t4-model" : c4,
"t2-model only" : c2only,
"t2- and t4-models" : c24only}
# initialize output with best fit scaling laws
fitLines = {"t-model" : np.zeros((1,3)),
"t2-model" : np.zeros((2,3)),
"t3-model" : np.zeros((3,3)),
"t4-model" : np.zeros((4,3)),
"t2-model only" : np.zeros((1,3)),
"t2- and t4-models" : np.zeros((2,3))}
# find the scaling laws for each coefficient
# t-model coefficient
fitLines["t-model"][0,:] = epsilonNscalingLaw(c1[:,0,:],Nlist,epsilonList)
# Second order model coefficients
fitLines["t2-model"][0,:] = epsilonNscalingLaw(c2[:,0,:],Nlist,epsilonList)
fitLines["t2-model"][1,:] = epsilonNscalingLaw(c2[:,1,:],Nlist,epsilonList)
# Third order model coefficients
fitLines["t3-model"][0,:] = epsilonNscalingLaw(c3[:,0,:],Nlist,epsilonList)
fitLines["t3-model"][1,:] = epsilonNscalingLaw(c3[:,1,:],Nlist,epsilonList)
fitLines["t3-model"][2,:] = epsilonNscalingLaw(c3[:,2,:],Nlist,epsilonList)
# Fourth order model coefficients
fitLines["t4-model"][0,:] = epsilonNscalingLaw(c4[:,0,:],Nlist,epsilonList)
fitLines["t4-model"][1,:] = epsilonNscalingLaw(c4[:,1,:],Nlist,epsilonList)
fitLines["t4-model"][2,:] = epsilonNscalingLaw(c4[:,2,:],Nlist,epsilonList)
fitLines["t4-model"][3,:] = epsilonNscalingLaw(c4[:,3,:],Nlist,epsilonList)
# Only t2-model coefficient
fitLines["t2-model only"][0,:] = epsilonNscalingLaw(c2only[:,0,:],Nlist,epsilonList)
# Only t2- and t4-models coefficients
fitLines["t2- and t4-models"][0,:] = epsilonNscalingLaw(c24only[:,0,:],Nlist,epsilonList)
fitLines["t2- and t4-models"][1,:] = epsilonNscalingLaw(c24only[:,1,:],Nlist,epsilonList)
# make plots
fig1,ax1 = plt.subplots(1,2)
fig2,ax2 = plt.subplots(2,2)
fig3,ax3 = plt.subplots(3,2)
fig4,ax4 = plt.subplots(4,2)
fig5,ax5 = plt.subplots(1,2)
fig6,ax6 = plt.subplots(2,2)
# loop through epsilon values
for i in np.arange(len(epsilonList)):
# t-model coefficient
ax1[0].scatter(np.log(Nlist),np.log(-c1[:,0,i]))
# Second order model coefficients
ax2[0,0].scatter(np.log(Nlist),np.log(-c2[:,0,i]))
ax2[1,0].scatter(np.log(Nlist),np.log(-c2[:,1,i]))
# Third order model coefficients
ax3[0,0].scatter(np.log(Nlist),np.log(-c3[:,0,i]))
ax3[1,0].scatter(np.log(Nlist),np.log(-c3[:,1,i]))
ax3[2,0].scatter(np.log(Nlist),np.log(-c3[:,2,i]))
# Fourth order model coefficients
ax4[0,0].scatter(np.log(Nlist),np.log(-c4[:,0,i]))
ax4[1,0].scatter(np.log(Nlist),np.log(-c4[:,1,i]))
ax4[2,0].scatter(np.log(Nlist),np.log(-c4[:,2,i]))
ax4[3,0].scatter(np.log(Nlist),np.log(-c4[:,3,i]))
# Only t2-model
ax5[0].scatter(np.log(Nlist),np.log(-c2only[:,0,i]))
# Only t2- and t4-models
ax6[0,0].scatter(np.log(Nlist),np.log(-c24only[:,0,i]))
ax6[1,0].scatter(np.log(Nlist),np.log(-c24only[:,1,i]))
# plot best fit lines
myEps = epsilonList[i]
myFit = fitLines["t-model"][0,:]
ax1[0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t2-model"][0,:]
ax2[0,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t2-model"][1,:]
ax2[1,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t3-model"][0,:]
ax3[0,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t3-model"][1,:]
ax3[1,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t3-model"][2,:]
ax3[2,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t4-model"][0,:]
ax4[0,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t4-model"][1,:]
ax4[1,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t4-model"][2,:]
ax4[2,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t4-model"][3,:]
ax4[3,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t2-model only"][0,:]
ax5[0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t2- and t4-models"][0,:]
ax6[0,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
myFit = fitLines["t2- and t4-models"][1,:]
ax6[1,0].plot(np.log(Nlist),np.log(-myFit[0])+myFit[1]*np.log(Nlist)+myFit[2]*np.log(myEps))
# loop through epsilon values
for j in np.arange(len(Nlist)):
# t-model coefficient
ax1[1].scatter(np.log(epsilonList),np.log(-c1[j,0,:]))
# Second order model coefficients
ax2[0,1].scatter(np.log(epsilonList),np.log(-c2[j,0,:]))
ax2[1,1].scatter(np.log(epsilonList),np.log(-c2[j,1,:]))
# Third order model coefficients
ax3[0,1].scatter(np.log(epsilonList),np.log(-c3[j,0,:]))
ax3[1,1].scatter(np.log(epsilonList),np.log(-c3[j,1,:]))
ax3[2,1].scatter(np.log(epsilonList),np.log(-c3[j,2,:]))
# Fourth order model coefficients
ax4[0,1].scatter(np.log(epsilonList),np.log(-c4[j,0,:]))
ax4[1,1].scatter(np.log(epsilonList),np.log(-c4[j,1,:]))
ax4[2,1].scatter(np.log(epsilonList),np.log(-c4[j,2,:]))
ax4[3,1].scatter(np.log(epsilonList),np.log(-c4[j,3,:]))
# Only t2-model
ax5[1].scatter(np.log(epsilonList),np.log(-c2only[j,0,:]))
# Only t2- and t4-models
ax6[0,1].scatter(np.log(epsilonList),np.log(-c24only[j,0,:]))
ax6[1,1].scatter(np.log(epsilonList),np.log(-c24only[j,1,:]))
# plot best fit lines
myN = Nlist[j]
myFit = fitLines["t-model"][0,:]
ax1[1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t2-model"][0,:]
ax2[0,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t2-model"][1,:]
ax2[1,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t3-model"][0,:]
ax3[0,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t3-model"][1,:]
ax3[1,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t3-model"][2,:]
ax3[2,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t4-model"][0,:]
ax4[0,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t4-model"][1,:]
ax4[1,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t4-model"][2,:]
ax4[2,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t4-model"][3,:]
ax4[3,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t2-model only"][0,:]
ax5[1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t2- and t4-models"][0,:]
ax6[0,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
myFit = fitLines["t2- and t4-models"][1,:]
ax6[1,1].plot(np.log(epsilonList),np.log(-myFit[0])+myFit[1]*np.log(myN)+myFit[2]*np.log(epsilonList))
# label all plots
fig1.suptitle("t-model")
ax1[0].set_title("log(a1) vs log(N)")
ax1[0].legend(["epsilon = "+str(round(epsilonList[i]),2) for i in range(len(epsilonList))],prop = {"size":5})
ax1[1].set_title("log(a1) vs log(epsilon)")
ax1[1].legend(["N = "+str(Nlist[i]) for i in range(len(Nlist))],prop = {"size":5})
plt.tight_layout()
fig2.suptitle("Second Order Renormalization")
ax2[0,0].set_title("log(a1) vs log(N)")
ax2[1,0].set_title("log(a2) vs log(N)")
ax2[0,0].legend(["epsilon = "+str(round(epsilonList[i]),2) for i in range(len(epsilonList))],prop = {"size":5})
ax2[0,1].set_title("log(a1) vs log(epsilon)")
ax2[1,1].set_title("log(a1) vs log(epsilon)")
ax2[0,1].legend(["N = "+str(Nlist[i]) for i in range(len(Nlist))],prop = {"size":5})
plt.tight_layout()
fig3.suptitle("Third Order Renormalization")
ax3[0,0].set_title("log(a1) vs log(N)")
ax3[1,0].set_title("log(a2) vs log(N)")
ax3[2,0].set_title("log(a3) vs log(N)")
ax3[0,0].legend(["epsilon = "+str(round(epsilonList[i]),2) for i in range(len(epsilonList))],prop = {"size":5})
ax3[0,1].set_title("log(a1) vs log(epsilon)")
ax3[1,1].set_title("log(a2) vs log(epsilon)")
ax3[2,1].set_title("log(a3) vs log(epsilon)")
ax3[0,1].legend(["N = "+str(Nlist[i]) for i in range(len(Nlist))],prop = {"size":5})
plt.tight_layout()
fig4.suptitle("Fourth Order Renormalization")
ax4[0,0].set_title("log(a1) vs log(N)")
ax4[1,0].set_title("log(a2) vs log(N)")
ax4[2,0].set_title("log(a3) vs log(N)")
ax4[3,0].set_title("log(a4) vs log(N)")
ax4[0,0].legend(["epsilon = "+str(round(epsilonList[i]),2) for i in range(len(epsilonList))],prop = {"size":5})
ax4[0,1].set_title("log(a1) vs log(epsilon)")
ax4[1,1].set_title("log(a2) vs log(epsilon)")
ax4[2,1].set_title("log(a3) vs log(epsilon)")
ax4[3,1].set_title("log(a4) vs log(epsilon)")
ax4[0,1].legend(["N = "+str(Nlist[i]) for i in range(len(Nlist))],prop = {"size":5})
plt.tight_layout()
fig5.suptitle("Only t2-Model Renormalization")
ax5[0].set_title("log(a2) vs log(N)")
ax5[0].legend(["epsilon = "+str(round(epsilonList[i]),2) for i in range(len(epsilonList))],prop = {"size":5})
ax5[1].set_title("log(a2) vs log(epsilon)")
ax5[1].legend(["N = "+str(Nlist[i]) for i in range(len(Nlist))],prop = {"size":5})
plt.tight_layout()
fig6.suptitle("Second and Fourth Order Renormalization")
ax6[0,0].set_title("log(a2) vs log(N)")
ax6[1,0].set_title("log(a4) vs log(N)")
ax6[0,0].legend(["epsilon = "+str(round(epsilonList[i]),2) for i in range(len(epsilonList))],prop = {"size":5})
ax6[0,1].set_title("log(a2) vs log(epsilon)")
ax6[1,1].set_title("log(a4) vs log(epsilon)")
ax6[0,1].legend(["N = "+str(Nlist[i]) for i in range(len(Nlist))],prop = {"size":5})
plt.tight_layout()
return coefficients,fitLines
def epsilonNscalingLaw(coeffArray,Nlist,epsilonList):
numEps = len(epsilonList)
numN = len(Nlist)
epsilonTile = np.tile(epsilonList,(numN,1))
Ntile = np.transpose(np.tile(Nlist,(numEps,1)))
LSMatrix = (np.array([[numEps*numN,np.sum(np.log(Ntile)),np.sum(np.log(epsilonTile))],
[np.sum(np.log(Ntile)),np.sum(np.log(Ntile)**2),np.sum(np.log(Ntile)*np.log(epsilonTile))],
[np.sum(np.log(epsilonTile)),np.sum(np.log(Ntile)*np.log(epsilonTile)),np.sum(np.log(epsilonTile)**2)]])
)
LSb = np.array([np.sum(np.log(np.abs(coeffArray))),np.sum(np.log(np.abs(coeffArray))*np.log(Ntile)),np.sum(np.log(np.abs(coeffArray))*np.log(epsilonTile))])
sol = np.linalg.solve(LSMatrix,LSb)
sol[0] = -np.exp(sol[0])
return sol
def findError(compareList,exact,t):
"""
Finds the two norm of the error between a list of ROMs and an exact solution.
Parameters
----------
compareList : List of Numpy arrays of size (N,T)
Set of state vector evolutions to find errors from
exact : Numpy array of size (N,T)
Exact solution for the same timesteps
t : Numpy array (T,)
Timesteps associated with simulations (must all be the same)
Returns
-------
errList : List of Numpy arrays of size (T,1)
Arrays with the two-norm of the error at all timesteps for each ROM
"""
# find the ROM size
N = compareList[0].shape[0]
# generate real space solutions
realSols = [makeRealSpace(x,N) for x in compareList]
exactSol = makeRealSpace(exact,N)
# compute two norm of error at all times
errList =[np.sum((i - exactSol)**2,0) for i in realSols]
return errList
def renormalizeRobust(fullM, endtime, Nlist, Mlist, epsilon, alpha, tau, timesteps, IC = np.sin, plots = False):
"""
Finds renormalization coefficients based on a single simulation. If the
simulation doesn't yet exist, it creates it
Parameters
----------
fullM : int
Size of full simulation to base fits on
endtime : int
Endtime of full simulation
Nlist : list of ints
List of resolutions for which to find coefficients
Mlist : list of ints
List of intermediary "full" simulations to use for ROMs
epsilon : float
size of linear term (stiffness)
alpha : float
degree of nonlinearity in KdV
tau : float
time decay modifier
timesteps : Numpy array
specific timesteps for which to save solution
IC : function handle
initial condition of simulation (default np.sin)
plots : boolean
Indicates whether to generate plots (default: False)
Returns
-------
coeeffsArray1 : Numpy array (length(Nlist),1)
Renormalization coefficients for t-model only
coeffsArray2 : Numpy array (length(Nlist),2)
Renormalization coefficients for t-model and t2-model only
coeffsArray3 : Numpy array (length(Nlist),3)
Renormalization coefficients for t1-t3-models
coeffsArray4 : Numpy array (length(Nlist),4)
Renormalization coefficients for t1-t4-models
coeffsArray2only : Numpy array (length(Nlist),1)
Renormalization coefficients for t2-model only
coeffsArray24only : Numpy array (length(Nlist),2)
Renormalization coefficients for t2-model and t4-model only
fitLines : Dict
Contains scaling law fits for each ROM coefficients
of form c = -b * N^a
Terms given are a, b, and r (correlation coefficient of fit)
"""
# Check if full simulation has already been constructed
# if so, load it, if not, generate it
try:
uFull = np.load("u" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__+".npy")
tFull = np.load("t" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__+".npy")
except:
fullParams = {
'N': fullM,
'M': int(3/2*fullM),
'alpha': 1,
'epsilon': epsilon,
'tau': 1,
'coeffs': None,
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
uSimFull = runSim(fullParams)
uFull = uSimFull.y
tFull = uSimFull.t
np.save( "u" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__,uFull)
np.save( "t" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__,tFull)
# recover number of timesteps
numSteps = tFull.shape[0]
# initialize output arrays
coeffsArray1 = np.zeros((Nlist.shape[0],numSteps - 30,1))
coeffsArray2 = np.zeros((Nlist.shape[0],numSteps - 30,2))
coeffsArray3 = np.zeros((Nlist.shape[0],numSteps - 30,3))
coeffsArray4 = np.zeros((Nlist.shape[0],numSteps - 30,4))
coeffsArray2only = np.zeros((Nlist.shape[0],numSteps - 30,1))
coeffsArray24only = np.zeros((Nlist.shape[0],numSteps - 30,2))
# loop through all resolutions
for j in np.arange(0,Nlist.shape[0]):
# Find number of positive terms in ROM, in intermediate calculations, and wavenumber array
N = Nlist[j]
M = Mlist[j]
k = np.concatenate([np.arange(0,M),np.arange(-M,0)])
# Gather first derivative data for fitting purposes
exactEnergy = np.zeros((N,numSteps))
R0Energy = np.zeros((N,numSteps))
R1Energy = np.zeros((N,numSteps))
R2Energy = np.zeros((N,numSteps))
R3Energy = np.zeros((N,numSteps))
R4Energy = np.zeros((N,numSteps))
# plug exact solution into exact RHS and all ROM terms and find energy contribution of each
for i in np.arange(0,numSteps):
# exact RHS
exactRHS,dummyU = markovKdV(uFull[:,i],int(fullM*3/2),alpha)
exactEnergy[:,i] = np.real(exactRHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(exactRHS[0:N])*uFull[0:N,i])
# Markov RHS
nonlin0,u_full = markovKdV(uFull[0:N,i],M,alpha)
R0RHS = nonlin0
R0Energy[:,i] = np.real(R0RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R0RHS[0:N])*uFull[0:N,i])
# First order RHS term
F_modes = np.concatenate([np.arange(0,N),np.arange(2*N-1,M+N+2),np.arange(2*M-N+1,2*M)])
G_modes = np.arange(N,2*M-N+1)
nonlin1,uuStar = tModelKdV(u_full,nonlin0,alpha,F_modes)
R1RHS = nonlin1*tFull[i]**(1-tau)
R1Energy[:,i] = np.real(R1RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R1RHS[0:N])*uFull[0:N,i])
# Second order RHS term
nonlin2,uk3,uu,A,AStar,B,BStar,C,CStar,D,DStar = t2ModelKdV(u_full,nonlin0,uuStar,alpha,F_modes,G_modes,k,epsilon)
R2RHS = nonlin2*tFull[i]**(2*(1-tau))
R2Energy[:,i] = np.real(R2RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R2RHS[0:N])*uFull[0:N,i])
# Third order RHS term
nonlin3,uk6,E,EStar,F,FStar = t3ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,A,AStar,B,BStar,C,CStar,DStar)
R3RHS = nonlin3*tFull[i]**(3*(1-tau))
R3Energy[:,i] = np.real(R3RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R3RHS[0:N])*uFull[0:N,i])
# Fourth order RHS term
nonlin4 = t4ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,uk6,A,AStar,B,BStar,C,CStar,D,DStar,E,EStar,F,FStar)
R4RHS = nonlin4*tFull[i]**(4*(1-tau))
R4Energy[:,i] = np.real(R4RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R4RHS[0:N])*uFull[0:N,i])
##################################################
# Use least-squares fit to identify coefficients #
##################################################
for i in np.arange(30,numSteps):
exactEnergySnip = exactEnergy[:,0:i]
R0EnergySnip = R0Energy[:,0:i]
R1EnergySnip = R1Energy[:,0:i]
R2EnergySnip = R2Energy[:,0:i]
R3EnergySnip = R3Energy[:,0:i]
R4EnergySnip = R4Energy[:,0:i]
# t-model coefficient
coeffsArray1[j,i-30,:] = np.sum((exactEnergySnip - R0EnergySnip)*R1EnergySnip)/np.sum(R1EnergySnip*R1EnergySnip)
# t2-model coefficient
LSMatrix = (np.array([[np.sum(R1EnergySnip*R1EnergySnip),np.sum(R1EnergySnip*R2EnergySnip)],
[np.sum(R2EnergySnip*R1EnergySnip),np.sum(R2EnergySnip*R2EnergySnip)]]))
LSb = (np.array([np.sum(R1EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray2[j,i-30,:] = np.linalg.solve(LSMatrix,LSb)
# t3-model coefficient
LSMatrix = (np.array([[np.sum(R1EnergySnip*R1EnergySnip),np.sum(R1EnergySnip*R2EnergySnip),np.sum(R1EnergySnip*R3EnergySnip)],
[np.sum(R2EnergySnip*R1EnergySnip),np.sum(R2EnergySnip*R2EnergySnip),np.sum(R2EnergySnip*R3EnergySnip)],
[np.sum(R3EnergySnip*R1EnergySnip),np.sum(R3EnergySnip*R2EnergySnip),np.sum(R3EnergySnip*R3EnergySnip)]]))
LSb = (np.array([np.sum(R1EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R3EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray3[j,i-30,:] = np.linalg.solve(LSMatrix,LSb)
# t4-model coefficient
LSMatrix = (np.array([[np.sum(R1EnergySnip*R1EnergySnip),np.sum(R1EnergySnip*R2EnergySnip),np.sum(R1EnergySnip*R3EnergySnip),np.sum(R1EnergySnip*R4EnergySnip)],
[np.sum(R2EnergySnip*R1EnergySnip),np.sum(R2EnergySnip*R2EnergySnip),np.sum(R2EnergySnip*R3EnergySnip),np.sum(R2EnergySnip*R4EnergySnip)],
[np.sum(R3EnergySnip*R1EnergySnip),np.sum(R3EnergySnip*R2EnergySnip),np.sum(R3EnergySnip*R3EnergySnip),np.sum(R3EnergySnip*R4EnergySnip)],
[np.sum(R4EnergySnip*R1EnergySnip),np.sum(R4EnergySnip*R2EnergySnip),np.sum(R4EnergySnip*R3EnergySnip),np.sum(R4EnergySnip*R4EnergySnip)]]))
LSb = (np.array([np.sum(R1EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R3EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R4EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray4[j,i-30,:] = np.linalg.solve(LSMatrix,LSb)
# t2-model with *no* t-model
coeffsArray2only[j,i-30,:] = np.sum((exactEnergySnip - R0EnergySnip)*R2EnergySnip)/np.sum(R2EnergySnip*R2EnergySnip)
# t2-model and t4-model with *no* t-model or t3-model
LSMatrix = (np.array([[np.sum(R2EnergySnip*R2EnergySnip),np.sum(R2EnergySnip*R4EnergySnip)],
[np.sum(R4EnergySnip*R2EnergySnip),np.sum(R4EnergySnip*R4EnergySnip)]]))
LSb = (np.array([np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R4EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray24only[j,i-30,:] = np.linalg.solve(LSMatrix,LSb)
for ind in np.arange(Nlist.shape[0]):
fig1,ax1 = plt.subplots(2,2)
fig1.suptitle("N = "+str(Nlist[ind]))
ax1[0,0].plot(timesteps[30:],coeffsArray1[ind,:,0],color = "blue")
ax1[0,0].plot(timesteps[30:],coeffsArray2[ind,:,0],color = "red")
ax1[0,0].plot(timesteps[30:],coeffsArray3[ind,:,0],color = "green")
ax1[0,0].plot(timesteps[30:],coeffsArray4[ind,:,0],color = "black")
ax1[0,0].set_title("t-model")
ax1[0,1].plot([],[],color = "blue")
ax1[0,1].plot(timesteps[30:],coeffsArray2[ind,:,1], color = "red")
ax1[0,1].plot(timesteps[30:],coeffsArray3[ind,:,1], color = "green")
ax1[0,1].plot(timesteps[30:],coeffsArray4[ind,:,1], color = "black")
ax1[0,1].plot(timesteps[30:],coeffsArray2only[ind,:,0],color = "cyan")
ax1[0,1].plot(timesteps[30:],coeffsArray24only[ind,:,0], color = "magenta")
ax1[0,1].set_title("t2-model")
ax1[0,1].legend(["First order","Second order","Third order","Fourth order","Only t2","t2 and t4"],prop = {"size":5})
ax1[1,0].plot(timesteps[30:],coeffsArray3[ind,:,2], color = "green")
ax1[1,0].plot(timesteps[30:],coeffsArray4[ind,:,2],color = "black")
ax1[1,0].set_title("t3-model")
ax1[1,1].plot(timesteps[30:],coeffsArray4[ind,:,3], color = "black")
ax1[1,1].plot(timesteps[30:],coeffsArray24only[ind,:,1], color = "magenta")
ax1[1,1].set_title("t4-model")
plt.tight_layout()
return coeffsArray1,coeffsArray2,coeffsArray3,coeffsArray4,coeffsArray2only,coeffsArray24only
def renormalizeWindow(fullM, endtime, width, Nlist, Mlist, epsilon, alpha, tau, timesteps, IC = np.sin, plots = False):
"""
Finds renormalization coefficients using sliding window least squares.
Parameters
----------
fullM : int
Size of full simulation to base fits on
endtime : int
Endtime of full simulation
width : float
Size of sliding window to use in fitting
Nlist : list of ints
List of resolutions for which to find coefficients
Mlist : list of ints
List of intermediary "full" simulations to use for ROMs
epsilon : float
size of linear term (stiffness)
alpha : float
degree of nonlinearity in KdV
tau : float
time decay modifier
timesteps : Numpy array
specific timesteps for which to save solution
IC : function handle
initial condition of simulation (default np.sin)
plots : boolean
Indicates whether to generate plots (default: False)
Returns
-------
coeeffsArray1 : Numpy array (length(Nlist),1)
Renormalization coefficients for t-model only
coeffsArray2 : Numpy array (length(Nlist),2)
Renormalization coefficients for t-model and t2-model only
coeffsArray3 : Numpy array (length(Nlist),3)
Renormalization coefficients for t1-t3-models
coeffsArray4 : Numpy array (length(Nlist),4)
Renormalization coefficients for t1-t4-models
coeffsArray2only : Numpy array (length(Nlist),1)
Renormalization coefficients for t2-model only
coeffsArray24only : Numpy array (length(Nlist),2)
Renormalization coefficients for t2-model and t4-model only
fitLines : Dict
Contains scaling law fits for each ROM coefficients
of form c = -b * N^a
Terms given are a, b, and r (correlation coefficient of fit)
"""
# Check if full simulation has already been constructed
# if so, load it, if not, generate it
try:
uFull = np.load("u" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__+".npy")
tFull = np.load("t" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__+".npy")
except:
fullParams = {
'N': fullM,
'M': int(3/2*fullM),
'alpha': 1,
'epsilon': epsilon,
'tau': 1,
'coeffs': None,
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
uSimFull = runSim(fullParams)
uFull = uSimFull.y
tFull = uSimFull.t
np.save( "u" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__,uFull)
np.save( "t" + str(fullM) + "t" + str(endtime)+"e"+str(round(epsilon,2)).replace('.','p')+IC.__name__,tFull)
# recover number of timesteps
numSteps = tFull.shape[0]
widthSteps = round(width/(tFull[1]-tFull[0]))
# initialize output arrays
coeffsArray1 = np.zeros((Nlist.shape[0],numSteps - widthSteps+1,1))
coeffsArray2 = np.zeros((Nlist.shape[0],numSteps - widthSteps+1,2))
coeffsArray3 = np.zeros((Nlist.shape[0],numSteps - widthSteps+1,3))
coeffsArray4 = np.zeros((Nlist.shape[0],numSteps - widthSteps+1,4))
coeffsArray2only = np.zeros((Nlist.shape[0],numSteps - widthSteps+1,1))
coeffsArray24only = np.zeros((Nlist.shape[0],numSteps - widthSteps+1,2))
exact1 = np.zeros((Nlist.shape[0],1))
exact2 = np.zeros((Nlist.shape[0],2))
exact3 = np.zeros((Nlist.shape[0],3))
exact4 = np.zeros((Nlist.shape[0],4))
exact2o = np.zeros((Nlist.shape[0],1))
exact24o = np.zeros((Nlist.shape[0],2))
# loop through all resolutions
for j in np.arange(0,Nlist.shape[0]):
# Find number of positive terms in ROM, in intermediate calculations, and wavenumber array
N = Nlist[j]
M = Mlist[j]
k = np.concatenate([np.arange(0,M),np.arange(-M,0)])
# Gather first derivative data for fitting purposes
exactEnergy = np.zeros((N,numSteps))
R0Energy = np.zeros((N,numSteps))
R1Energy = np.zeros((N,numSteps))
R2Energy = np.zeros((N,numSteps))
R3Energy = np.zeros((N,numSteps))
R4Energy = np.zeros((N,numSteps))
# plug exact solution into exact RHS and all ROM terms and find energy contribution of each
for i in np.arange(0,numSteps):
# exact RHS
exactRHS,dummyU = markovKdV(uFull[:,i],int(fullM*3/2),alpha)
exactEnergy[:,i] = np.real(exactRHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(exactRHS[0:N])*uFull[0:N,i])
# Markov RHS
nonlin0,u_full = markovKdV(uFull[0:N,i],M,alpha)
R0RHS = nonlin0
R0Energy[:,i] = np.real(R0RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R0RHS[0:N])*uFull[0:N,i])
# First order RHS term
F_modes = np.concatenate([np.arange(0,N),np.arange(2*N-1,M+N+2),np.arange(2*M-N+1,2*M)])
G_modes = np.arange(N,2*M-N+1)
nonlin1,uuStar = tModelKdV(u_full,nonlin0,alpha,F_modes)
if tFull[i] == 0:
R1RHS = nonlin1*0
else:
R1RHS = nonlin1*tFull[i]**(1-tau)
R1Energy[:,i] = np.real(R1RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R1RHS[0:N])*uFull[0:N,i])
# Second order RHS term
nonlin2,uk3,uu,A,AStar,B,BStar,C,CStar,D,DStar = t2ModelKdV(u_full,nonlin0,uuStar,alpha,F_modes,G_modes,k,epsilon)
R2RHS = nonlin2*tFull[i]**(2*(1-tau))
R2Energy[:,i] = np.real(R2RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R2RHS[0:N])*uFull[0:N,i])
# Third order RHS term
nonlin3,uk6,E,EStar,F,FStar = t3ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,A,AStar,B,BStar,C,CStar,DStar)
R3RHS = nonlin3*tFull[i]**(3*(1-tau))
R3Energy[:,i] = np.real(R3RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R3RHS[0:N])*uFull[0:N,i])
# Fourth order RHS term
nonlin4 = t4ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,uk6,A,AStar,B,BStar,C,CStar,D,DStar,E,EStar,F,FStar)
R4RHS = nonlin4*tFull[i]**(4*(1-tau))
R4Energy[:,i] = np.real(R4RHS[0:N]*np.conj(uFull[0:N,i]) + np.conj(R4RHS[0:N])*uFull[0:N,i])
exact1[j,:] = np.sum((exactEnergy - R0Energy)*R1Energy)/np.sum(R1Energy*R1Energy)
LSMatrix = (np.array([[np.sum(R1Energy*R1Energy),np.sum(R1Energy*R2Energy)],
[np.sum(R2Energy*R1Energy),np.sum(R2Energy*R2Energy)]]))
LSb = (np.array([np.sum(R1Energy*(exactEnergy-R0Energy)),np.sum(R2Energy*(exactEnergy-R0Energy))]))
exact2[j,:] = np.linalg.solve(LSMatrix,LSb)
LSMatrix = (np.array([[np.sum(R1Energy*R1Energy),np.sum(R1Energy*R2Energy),np.sum(R1Energy*R3Energy)],
[np.sum(R2Energy*R1Energy),np.sum(R2Energy*R2Energy),np.sum(R2Energy*R3Energy)],
[np.sum(R3Energy*R1Energy),np.sum(R3Energy*R2Energy),np.sum(R3Energy*R3Energy)]]))
LSb = (np.array([np.sum(R1Energy*(exactEnergy-R0Energy)),np.sum(R2Energy*(exactEnergy-R0Energy)),np.sum(R3Energy*(exactEnergy-R0Energy))]))
exact3[j,:] = np.linalg.solve(LSMatrix,LSb)
LSMatrix = (np.array([[np.sum(R1Energy*R1Energy),np.sum(R1Energy*R2Energy),np.sum(R1Energy*R3Energy),np.sum(R1Energy*R4Energy)],
[np.sum(R2Energy*R1Energy),np.sum(R2Energy*R2Energy),np.sum(R2Energy*R3Energy),np.sum(R2Energy*R4Energy)],
[np.sum(R3Energy*R1Energy),np.sum(R3Energy*R2Energy),np.sum(R3Energy*R3Energy),np.sum(R3Energy*R4Energy)],
[np.sum(R4Energy*R1Energy),np.sum(R4Energy*R2Energy),np.sum(R4Energy*R3Energy),np.sum(R4Energy*R4Energy)]]))
LSb = (np.array([np.sum(R1Energy*(exactEnergy-R0Energy)),np.sum(R2Energy*(exactEnergy-R0Energy)),np.sum(R3Energy*(exactEnergy-R0Energy)),np.sum(R4Energy*(exactEnergy-R0Energy))]))
exact4[j,:] = np.linalg.solve(LSMatrix,LSb)
exact2o[j,:] = np.sum((exactEnergy - R0Energy)*R2Energy)/np.sum(R2Energy*R2Energy)
LSMatrix = (np.array([[np.sum(R2Energy*R2Energy),np.sum(R2Energy*R4Energy)],
[np.sum(R4Energy*R2Energy),np.sum(R4Energy*R4Energy)]]))
LSb = (np.array([np.sum(R2Energy*(exactEnergy-R0Energy)),np.sum(R4Energy*(exactEnergy-R0Energy))]))
exact24o[j,:] = np.linalg.solve(LSMatrix,LSb)
##################################################
# Use least-squares fit to identify coefficients #
##################################################
for i in np.arange(0,numSteps-widthSteps+1):
exactEnergySnip = exactEnergy[:,i:i+widthSteps]
R0EnergySnip = R0Energy[:,i:i+widthSteps]
R1EnergySnip = R1Energy[:,i:i+widthSteps]
R2EnergySnip = R2Energy[:,i:i+widthSteps]
R3EnergySnip = R3Energy[:,i:i+widthSteps]
R4EnergySnip = R4Energy[:,i:i+widthSteps]
# t-model coefficient
coeffsArray1[j,i,:] = np.sum((exactEnergySnip - R0EnergySnip)*R1EnergySnip)/np.sum(R1EnergySnip*R1EnergySnip)
# t2-model coefficient
LSMatrix = (np.array([[np.sum(R1EnergySnip*R1EnergySnip),np.sum(R1EnergySnip*R2EnergySnip)],
[np.sum(R2EnergySnip*R1EnergySnip),np.sum(R2EnergySnip*R2EnergySnip)]]))
LSb = (np.array([np.sum(R1EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray2[j,i,:] = np.linalg.solve(LSMatrix,LSb)
# t3-model coefficient
LSMatrix = (np.array([[np.sum(R1EnergySnip*R1EnergySnip),np.sum(R1EnergySnip*R2EnergySnip),np.sum(R1EnergySnip*R3EnergySnip)],
[np.sum(R2EnergySnip*R1EnergySnip),np.sum(R2EnergySnip*R2EnergySnip),np.sum(R2EnergySnip*R3EnergySnip)],
[np.sum(R3EnergySnip*R1EnergySnip),np.sum(R3EnergySnip*R2EnergySnip),np.sum(R3EnergySnip*R3EnergySnip)]]))
LSb = (np.array([np.sum(R1EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R3EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray3[j,i,:] = np.linalg.solve(LSMatrix,LSb)
# t4-model coefficient
LSMatrix = (np.array([[np.sum(R1EnergySnip*R1EnergySnip),np.sum(R1EnergySnip*R2EnergySnip),np.sum(R1EnergySnip*R3EnergySnip),np.sum(R1EnergySnip*R4EnergySnip)],
[np.sum(R2EnergySnip*R1EnergySnip),np.sum(R2EnergySnip*R2EnergySnip),np.sum(R2EnergySnip*R3EnergySnip),np.sum(R2EnergySnip*R4EnergySnip)],
[np.sum(R3EnergySnip*R1EnergySnip),np.sum(R3EnergySnip*R2EnergySnip),np.sum(R3EnergySnip*R3EnergySnip),np.sum(R3EnergySnip*R4EnergySnip)],
[np.sum(R4EnergySnip*R1EnergySnip),np.sum(R4EnergySnip*R2EnergySnip),np.sum(R4EnergySnip*R3EnergySnip),np.sum(R4EnergySnip*R4EnergySnip)]]))
LSb = (np.array([np.sum(R1EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R3EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R4EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray4[j,i,:] = np.linalg.solve(LSMatrix,LSb)
# t2-model with *no* t-model
coeffsArray2only[j,i,:] = np.sum((exactEnergySnip - R0EnergySnip)*R2EnergySnip)/np.sum(R2EnergySnip*R2EnergySnip)
# t2-model and t4-model with *no* t-model or t3-model
LSMatrix = (np.array([[np.sum(R2EnergySnip*R2EnergySnip),np.sum(R2EnergySnip*R4EnergySnip)],
[np.sum(R4EnergySnip*R2EnergySnip),np.sum(R4EnergySnip*R4EnergySnip)]]))
LSb = (np.array([np.sum(R2EnergySnip*(exactEnergySnip-R0EnergySnip)),np.sum(R4EnergySnip*(exactEnergySnip-R0EnergySnip))]))
coeffsArray24only[j,i,:] = np.linalg.solve(LSMatrix,LSb)
for ind in np.arange(Nlist.shape[0]):
fig1,ax1 = plt.subplots(2,2)
fig1.suptitle("N = "+str(Nlist[ind]))
ax1[0,0].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray1[ind,:,0],color = "blue")
ax1[0,0].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray2[ind,:,0],color = "red")
ax1[0,0].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray3[ind,:,0],color = "green")
ax1[0,0].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray4[ind,:,0],color = "black")
ax1[0,0].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact1[ind,0],exact1[ind,0]],color="blue",linestyle=":")
ax1[0,0].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact2[ind,0],exact2[ind,0]],color="red",linestyle=":")
ax1[0,0].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact3[ind,0],exact3[ind,0]],color="green",linestyle=":")
ax1[0,0].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact4[ind,0],exact4[ind,0]],color="black",linestyle=":")
ax1[0,0].set_title("t-model")
ax1[0,1].plot([],[],color = "blue")
ax1[0,1].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray2[ind,:,1], color = "red")
ax1[0,1].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray3[ind,:,1], color = "green")
ax1[0,1].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray4[ind,:,1], color = "black")
ax1[0,1].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray2only[ind,:,0],color = "cyan")
ax1[0,1].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray24only[ind,:,0], color = "magenta")
ax1[0,1].set_title("t2-model")
ax1[0,1].legend(["First order","Second order","Third order","Fourth order","Only t2","t2 and t4"],prop = {"size":5})
ax1[0,1].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact2[ind,1],exact2[ind,1]],color="red",linestyle=":")
ax1[0,1].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact3[ind,1],exact3[ind,1]],color="green",linestyle=":")
ax1[0,1].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact4[ind,1],exact4[ind,1]],color="black",linestyle=":")
ax1[0,1].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact2o[ind,0],exact2o[ind,0]],color="cyan",linestyle=":")
ax1[0,1].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact24o[ind,0],exact24o[ind,0]],color="magenta",linestyle=":")
ax1[1,0].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray3[ind,:,2], color = "green")
ax1[1,0].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray4[ind,:,2],color = "black")
ax1[1,0].set_title("t3-model")
ax1[1,0].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact3[ind,2],exact3[ind,2]],color="green",linestyle=":")
ax1[1,0].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact4[ind,2],exact4[ind,2]],color="black",linestyle=":")
ax1[1,1].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray4[ind,:,3], color = "black")
ax1[1,1].plot(timesteps[0:numSteps-widthSteps+1],coeffsArray24only[ind,:,1], color = "magenta")
ax1[1,1].set_title("t4-model")
ax1[1,1].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact4[ind,3],exact4[ind,3]],color="black",linestyle=":")
ax1[1,1].plot([timesteps[0],timesteps[numSteps-widthSteps+1]],[exact24o[ind,1],exact24o[ind,1]],color="magenta",linestyle=":")
plt.tight_layout()
return coeffsArray1,coeffsArray2,coeffsArray3,coeffsArray4,coeffsArray2only,coeffsArray24only
def renormalizeTau(fullM, endtime, Nlist, Mlist, epsilon, alpha, tauList, timesteps, IC = np.sin):
"""
Tests a range of tau values for fitting coefficients.
Parameters
----------
fullM : int
Resolution of full model to use for fitting
endtime : float
Final time to use for fitting
epsilon : float
size of linear term (stiffness)
alpha : float
degree of nonlinearity in KdV
timesteps : Numpy array
specific timesteps for which to save solution
tauList : float
Grid of tau values to test (default 0:0.05:1)
IC : function handle
initial condition of simulation (default np.sin)
Returns
-------
out : dict
Contains optimal coefficients for each model for each value of tau.
't-model' is len(tauList) x len(Nlist) x 1,
't2-model' is len(tauList) x len(Nlist) x 2, etc.
err :
"""
out = {"t-model" : np.zeros((tauList.shape[0],Nlist.shape[0],1)),
"t2-model" : np.zeros((tauList.shape[0],Nlist.shape[0],2)),
"t3-model" : np.zeros((tauList.shape[0],Nlist.shape[0],3)),
"t4-model" : np.zeros((tauList.shape[0],Nlist.shape[0],4)),
"t2-model only" : np.zeros((tauList.shape[0],Nlist.shape[0],1)),
"t2- and t4-models" : np.zeros((tauList.shape[0],Nlist.shape[0],2))
}
errList = []
for i in np.arange(tauList.shape[0]):
result = renormalize(fullM, endtime, Nlist, Mlist, epsilon, alpha, tauList[i], timesteps, IC = IC, plots = False)
out["t-model"][i,:,:] = result[0]
out["t2-model"][i,:,:] = result[1]
out["t3-model"][i,:,:] = result[2]
out["t4-model"][i,:,:] = result[3]
out["t2-model only"][i,:,:] = result[4]
out["t2- and t4-models"][i,:,:] = result[5]
errList.append(result[7])
return out,errList
def automatedROM(N,alpha,epsilon,timesteps,fitTime = 10,tauTests = np.arange(0,1.05,0.05),IC = np.sin,tol = 1e-3):
"""
Automatically finds optimal tau and coefficients for an ROM and runs the ROM.
Also produces reference exact solution
Parameters
----------
N : int
Resolution of ROM
alpha : float
degree of nonlinearity in KdV
epsilon : float
size of linear term (stiffness)
timesteps : Numpy array
specific timesteps for which to save solution
fitTime : float
Fits are made over window of exact solution from 0 to fitTime (default 10)
tauTests : float
Grid of tau values to test (default 0:0.05:1)
IC : function handle
initial condition of simulation (default np.sin)
tol : float
Tolerance for declaring full model "resolved"
There must be less than this relative error in the first half of the
full modes up to the end time (default 10^-3)
Returns
-------
simMarkov : SciPy integration object
Simulation up to end time of Markov model
sim2 : SciPy integration object
Simulation up to end time of 2nd order model model
sim4 : SciPy integration object
Simulation up to end time of fourth order model
coefficients :
errors
"""
endtime = timesteps[-1]
M = 16
unresolved = True
#print("Constructing reference exact solution...")
try:
fileList = glob.glob("u" + '[0-9]*' + "t" + str(int(endtime))+"e"+str(round(epsilon,2)).replace('.','p')+".npy")
myFile = fileList[0]
uFull = np.load(myFile)
fileList = glob.glob("t" + '[0-9]*' + "t" + str(int(endtime))+"e"+str(round(epsilon,2)).replace('.','p')+".npy")
myFile = fileList[0]
tFull = np.load(myFile)
#print("Success! (it was already saved)\n")
M = int(re.findall(r'\d+', fileList[0])[0])
except:
# find resolved simulation
while unresolved:
M = 2*M
#print("\nCurrently testing M = "+str(M))
fullParams = {
'N': M,
'M': int(3/2*M),
'alpha': 1,
'epsilon': epsilon,
'tau': 1,
'coeffs': None,
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
uSimFull = runSim(fullParams)
uFull = uSimFull.y
tFull = uSimFull.t
energyCheck = getMass(uFull,int(M/2))
print("Maximum mass deviation in first "+str(M/2)+" modes: "+str(max(abs(energyCheck - energyCheck[0]))))
if max(abs(energyCheck - energyCheck[0])) < tol:
print("Success!\n")
unresolved = False
np.save("u" + str(M) + "t" + str(int(endtime))+"e"+str(round(epsilon,2)).replace('.','p')+".npy",uFull)
np.save("t" + str(M) + "t" + str(int(endtime))+"e"+str(round(epsilon,2)).replace('.','p')+".npy",tFull)
uFit = uFull[:,tFull<=fitTime]
tFit = tFull[tFull<=fitTime]
np.save("u" + str(M) + "t" + str(int(fitTime))+"e"+str(round(epsilon,2)).replace('.','p')+".npy",uFit)
np.save("t" + str(M) + "t" + str(int(fitTime))+"e"+str(round(epsilon,2)).replace('.','p')+".npy",tFit)
# Find specific fitting window
#print("Finding coefficients for range of tau values...")
coefficients,errors = renormalizeTau(fullM = M,
endtime = fitTime,
Nlist = np.array([N]),
Mlist = np.array([N*3]),
epsilon = epsilon,
alpha = alpha,
tauList = tauTests,
timesteps = tFit,
IC = IC)
err2 = [fit["t2-model only"][0][0] for fit in errors]
c2 = coefficients["t2-model only"][err2.index(min(err2))][0]
err4 = [fit["t2- and t4-models"][0][0] for fit in errors]
c4 = coefficients["t2- and t4-models"][err4.index(min(err4))][0]
#print("Coefficients found!\n")
paramsMarkov = {
'N': N,
'M': int(3*N),
'alpha': 1,
'epsilon': epsilon,
'tau': 0,
'coeffs': None,
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
params2 = {
'N': N,
'M': int(3*N),
'alpha': 1,
'epsilon': epsilon,
'tau': tauTests[err2.index(min(err2))],
'coeffs': np.array([0,c2[0]]),
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
params4 = {
'N': N,
'M': int(3*N),
'alpha': 1,
'epsilon': epsilon,
'tau': tauTests[err4.index(min(err4))],
'coeffs': np.array([0,c4[0],0,c4[1]]),
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
params2scaling = {
'N': N,
'M': int(3*N),
'alpha': 1,
'epsilon': epsilon,
'tau': 1,
'coeffs': np.array([0,-0.7615*N**-5.8081*epsilon**-3.7681]), # NOTE GET THE LATEST SCALING LAWS HERE
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
params4scaling = {
'N': N,
'M': int(3*N),
'alpha': 1,
'epsilon': epsilon,
'tau': 1,
'coeffs': np.array([0,-1.2473*N**-5.7356*epsilon**-3.6910,0,-0.3675*N**-11.4719*epsilon**-7.3881]), # NOTE GET THE LATEST SCALING LAWS HERE
'IC': IC,
'endtime': endtime,
'timesteps': timesteps
}
#print("Running Markov simulation...\n")
simMarkov = runSim(paramsMarkov)
#print("Running second order simulation with tau = "+str(tauTests[err2.index(min(err2))])+"...\n")
sim2 = runSim(params2)
#print("Running fourth order simulation with tau = "+str(tauTests[err4.index(min(err4))])+"...")
sim4 = runSim(params4)
sim2scale = runSim(params2scaling)
sim4scale = runSim(params4scaling)
return simMarkov,sim2,sim4,sim2scale,sim4scale,coefficients,errors
# coeffsArray1,coeffsArray2,coeffsArray3,coeffsArray4,coeffsArray2only,coeffsArray24only,fitLines
# old functions that should be defunct
# def RHSFullKdV(u,M,alpha,epsilon):
# """Computes RHS of KdV with no reduced order model and dealiasing
# Parameters
# ----------
# u : 1D Numpy Array (M,)
# Positive modes of state vector whose RHS is being computed
# M : int
# Number of positive modes in "full" model for intermediary calculations
# alpha : float
# Degree of nonlinearity in KdV
# epsilon : float
# Size of linear term (stiffness)
# Returns
# -------
# RHS : 1D Numpy Array (M,)
# RHS of ODE for state vector u
# """
# # compute nonlinear part of RHS using "full" model with M positive modes
# # the factor of 3/2 is for dealiasing
# a,b = markovKdV(u,int(3/2*M),alpha)
# # compute the linear part of the RHS and add it to the nonlinear part
# L = u.shape[0]
# k = np.arange(0,L)
# RHS = 1j*k**3*epsilon**2*u + a[0:L]
# return RHS
# def RHSMarkovKdV(u,N,M,alpha,epsilon):
# """Computes RHS of KdV with no reduced order model and dealiasing
# Parameters
# ----------
# u : 1D Numpy Array (N,)
# Positive modes of state vector whose RHS is being computed
# N : int
# Number of positive modes in ROM
# M : int
# Number of positive modes in "full" model for intermediary calculations
# alpha : float
# Degree of nonlinearity in KdV
# epsilon : float
# Size of linear term (stiffness)
# Returns
# -------
# RHS : 1D Numpy Array (N,)
# RHS of ODE for state vector u
# """
# # compute nonlinear part of RHS using "full" model with M positive modes
# a,b = markovKdV(u,M,alpha)
# # compute the linear part of the RHS and add it to the nonlinear part
# L = u.shape[0]
# k = np.arange(0,L)
# RHS = 1j*k**3*epsilon**2*u + a[0:L]
# return RHS
# def RHStModelKdV(u,N,M,alpha,epsilon,t):
# """Computes RHS of KdV for the t-model with coefficient one
# Parameters
# ----------
# u : 1D Numpy Array (N,)
# Positive modes of state vector whose RHS is being computed
# N : int
# Number of positive modes in ROM
# M : int
# Number of positive modes in "full" model for intermediary calculations
# alpha : float
# Degree of nonlinearity in KdV
# epsilon : float
# Size of linear term (stiffness)
# t : float
# Current timestep
# Returns
# -------
# RHS : 1D Numpy Array (N,)
# RHS of ODE for state vector u
# """
# # compute nonlinear part of RHS using "full" model with M positive modes
# nonlin0,u_full = markovKdV(u,M,alpha)
# # define which modes are resolved in full array
# F_modes = np.concatenate([np.arange(0,N),np.arange(2*N-1,M+N+2),np.arange(2*M-N+1,2*M)])
# # compute t-model term
# nonlin1,uuStar = tModelKdV(u_full,nonlin0,alpha,F_modes)
# # compute the linear part of the RHS and add it to the nonlinear part
# L = u.shape[0]
# k = np.arange(0,L)
# # combine linear, Markov, and t-model terms
# RHS = 1j*k**3*epsilon**2*u + nonlin0[0:L] + nonlin1[0:L]*t
# return RHS
# def RHSt2ModelKdV(u,N,M,alpha,epsilon,t,tau,coeffs):
# """Computes RHS of KdV for the t2-model with variable coefficients
# Parameters
# ----------
# u : 1D Numpy Array (N,)
# Positive modes of state vector whose RHS is being computed
# N : int
# Number of positive modes in ROM
# M : int
# Number of positive modes in "full" model for intermediary calculations
# alpha : float
# Degree of nonlinearity in KdV
# epsilon : float
# Size of linear term (stiffness)
# t : float
# Current timestep
# tau : float
# Time decay modifier (default 0 - time dependence of memory fully retained)
# coeffs : 1D Numpy Array (2,1)
# Renormalization coefficients for t-model and t2-model
# Returns
# -------
# RHS : 1D Numpy Array (N,)
# RHS of ODE for state vector u
# """
# # compute nonlinear part of RHS using "full" model with M positive modes
# nonlin0,u_full = markovKdV(u,M,alpha)
# # define which modes are resolved / unresolved in full array
# F_modes = np.concatenate([np.arange(0,N),np.arange(2*N-1,M+N+2),np.arange(2*M-N+1,2*M)])
# G_modes = np.arange(N,2*M-N+1)
# # compute t-model term
# nonlin1,uuStar = tModelKdV(u_full,nonlin0,alpha,F_modes)
# k = np.concatenate([np.arange(0,M),np.arange(-M,0)])
# # compute t2-model term
# nonlin2,uk3,uu,A,AStar,B,BStar,C,CStar,D,DStar = t2ModelKdV(u_full,nonlin0,uuStar,alpha,F_modes,G_modes,k,epsilon)
# # combine linear, Markov, t-model, and t2-model terms
# RHS = 1j*k[0:N]**3*epsilon**2*u + nonlin0[0:N] + coeffs[0]*nonlin1[0:N]*t**(1-tau) + coeffs[1]*nonlin2[0:N]*t**(2*(1-tau))
# return RHS
# def RHSt4ModelKdV(u,N,M,alpha,epsilon,t,tau,coeffs):
# """Computes RHS of KdV for the t4-model with variable coefficients
# Parameters
# ----------
# u : 1D Numpy Array (N,)
# Positive modes of state vector whose RHS is being computed
# N : int
# Number of positive modes in ROM
# M : int
# Number of positive modes in "full" model for intermediary calculations
# alpha : float
# Degree of nonlinearity in KdV
# epsilon : float
# Size of linear term (stiffness)
# t : float
# Current timestep
# tau : float
# Time decay modifier (default 0 - time dependence of memory fully retained)
# coeffs : 1D Numpy Array (2,1)
# Renormalization coefficients for t-model and t2-model
# Returns
# -------
# RHS : 1D Numpy Array (N,)
# RHS of ODE for state vector u
# """
# # compute nonlinear part of RHS using "full" model with M positive modes
# nonlin0,u_full = markovKdV(u,M,alpha)
# # define which modes are resolved / unresolved in full array
# F_modes = np.concatenate([np.arange(0,N),np.arange(2*N-1,M+N+2),np.arange(2*M-N+1,2*M)])
# G_modes = np.arange(N,2*M-N+1)
# # compute t-model term
# nonlin1,uuStar = tModelKdV(u_full,nonlin0,alpha,F_modes)
# k = np.concatenate([np.arange(0,M),np.arange(-M,0)])
# # compute t2-model term
# nonlin2,uk3,uu,A,AStar,B,BStar,C,CStar,D,DStar = t2ModelKdV(u_full,nonlin0,uuStar,alpha,F_modes,G_modes,k,epsilon)
# # compute t3-model term
# nonlin3,uk6,E,EStar,F,FStar = t3ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,A,AStar,B,BStar,C,CStar,DStar)
# # compute t4-model term
# nonlin4 = t4ModelKdV(alpha,F_modes,G_modes,k,epsilon,u_full,uu,uuStar,uk3,uk6,A,AStar,B,BStar,C,CStar,D,DStar,E,EStar,F,FStar)
# # combine linear, Markov, t-model, and t2-model terms
# RHS = (1j*k[0:N]**3*epsilon**2*u + nonlin0[0:N] + coeffs[0]*nonlin1[0:N]*t**(1-tau)
# + coeffs[1]*nonlin2[0:N]*t**(2*(1-tau))
# + coeffs[2]*nonlin3[0:N]*t**(3*(1-tau))
# + coeffs[3]*nonlin4[0:N]*t**(4*(1-tau)))
# return RHS
# def nonRenormSim(N, endtime = 10, alpha = 1, epsilon = 0.1, IC = np.sin, timesteps = None):
# """Runs a non-renormalized simulation of KdV - use large N so it is resolved
# Parameters
# ----------
# N : int
# Number of positive modes in simulation
# endtime : float
# Final time to run simulation to (default 10)
# alpha : float
# Degree of nonlinearity in KdV (default 1)
# epsilon : float
# Size of linear term (stiffness) (default 0.1)
# IC : function handle
# Initial condition (default sin(x))
# Returns
# -------
# uSim : ODE Solution object
# Data from completed simulation
# """
# # generate initial condition
# x = np.linspace(0,2*np.pi-2*np.pi/(2*N),2*N)
# y = IC(x)
# uFull = fftnorm(y)
# u = uFull[0:N]
# # define RHS for integration
# # (RHS FullKdV uses "full" size with 3/2*N positive modes to dealias results)
# def myRHS(t,y):
# out = RHSFullKdV(y,N,alpha,epsilon)
# return out
# # run simulation using stiff solver
# uSim = sp.integrate.solve_ivp(fun = myRHS, t_span = [0,endtime], y0 = u,method = "BDF", t_eval = timesteps)
# return uSim
# def markovSim(N, M = None, endtime = 10, alpha = 1, epsilon = 0.1, IC = np.sin, timesteps = None):
# """Runs a non-renormalized simulation of KdV - use large N so it is resolved
# Parameters
# ----------
# N : int
# Number of positive modes in simulation
# M : int
# Number of positive modes in "full" model for intermediary calculations (default 2N)
# endtime : float
# Final time to run simulation to (default 10)
# alpha : float
# Degree of nonlinearity in KdV (default 1)
# epsilon : float
# Size of linear term (stiffness) (default 0.1)
# IC : function handle
# Initial condition (default sin(x))
# Returns
# -------
# uSim : ODE Solution object
# Data from completed simulation
# """
# # default value of M is double N
# if M is None:
# M = 2*N
# # generate initial condition
# x = np.linspace(0,2*np.pi-2*np.pi/(2*N),2*N)
# y = IC(x)
# uFull = fftnorm(y)
# u = uFull[0:N]
# # define RHS for integration
# # (RHS FullKdV uses "full" size with 3/2*N positive modes to dealias results)
# def myRHS(t,y):
# out = RHSMarkovKdV(y,N,M,alpha,epsilon)
# return out
# # run simulation using stiff solver
# uSim = sp.integrate.solve_ivp(fun = myRHS, t_span = [0,endtime], y0 = u,method = "BDF", t_eval = timesteps)
# return uSim
# def tModelSim(N, M = None, endtime = 10, alpha = 1, epsilon = 0.1, IC = np.sin, timesteps = None):
# """Runs a basic t-model simulation of KdV
# Parameters
# ----------
# N : int
# Number of positive modes in simulation
# M : int
# Number of positive modes in "full" model for intermediary calculations (default 3N)
# endtime : float
# Final time to run simulation to (default 10)
# alpha : float
# Degree of nonlinearity in KdV (default 1)
# epsilon : float
# Size of linear term (stiffness) (default 0.1)
# IC : function handle
# Initial condition (default sin(x))
# timesteps : Numpy array (t,1)
# Specific timesteps to save
# Returns
# -------
# uSim : ODE Solution object
# Data from completed simulation
# """
# # default value of M is triple N
# if M is None:
# M = 3*N
# # generate initial condition
# x = np.linspace(0,2*np.pi-2*np.pi/(2*N),2*N)
# y = IC(x)
# uFull = fftnorm(y)
# u = uFull[0:N]
# # define RHS for integration
# # (RHS FullKdV uses "full" size with 3/2*N positive modes to dealias results)
# def myRHS(t,y):
# out = RHStModelKdV(y,N,M,alpha,epsilon,t)
# return out
# # run simulation using stiff solver
# uSim = sp.integrate.solve_ivp(fun = myRHS, t_span = [0,endtime], y0 = u,method = "BDF", t_eval = timesteps)
# return uSim
# def t2ModelSim(N, M = None, endtime = 10, alpha = 1, epsilon = 0.1, tau = 0, IC = np.sin, timesteps = None, coeffs = np.array([1,-0.5])):
# """Runs a t2-model simulation of KdV
# Parameters
# ----------
# N : int
# Number of positive modes in simulation
# M : int
# Number of positive modes in "full" model for intermediary calculations (default 3N)
# endtime : float
# Final time to run simulation to (default 10)
# alpha : float
# Degree of nonlinearity in KdV (default 1)
# epsilon : float
# Size of linear term (stiffness) (default 0.1)
# tau : float
# Time decay modifier (default 0 - time dependence of memory fully retained)
# IC : function handle
# Initial condition (default sin(x))
# timesteps : Numpy array (t,1)
# Specific timesteps to save
# coeffs : Numpy array (2,1)
# Renormalization coefficients on t-model and t2-model terms (default [1,-0.5])
# Returns
# -------
# uSim : ODE Solution object
# Data from completed simulation
# """
# # default value of M is triple N
# if M is None:
# M = 3*N
# # generate initial condition
# x = np.linspace(0,2*np.pi-2*np.pi/(2*N),2*N)
# y = IC(x)
# uFull = fftnorm(y)
# u = uFull[0:N]
# # define RHS for integration
# # (RHS FullKdV uses "full" size with 3/2*N positive modes to dealias results)
# def myRHS(t,y):
# out = RHSt2ModelKdV(y,N,M,alpha,epsilon,t,tau,coeffs)
# return out
# # run simulation using stiff solver
# uSim = sp.integrate.solve_ivp(fun = myRHS, t_span = [0,endtime], y0 = u,method = "BDF", t_eval = timesteps)
# return uSim
# def t4ModelSim(N, M = None, endtime = 10, alpha = 1, epsilon = 0.1, tau = 0, IC = np.sin, timesteps = None, coeffs = np.array([1,-0.5,1/6,-1/24])):
# """Runs a t2-model simulation of KdV
# Parameters
# ----------
# N : int
# Number of positive modes in simulation
# M : int
# Number of positive modes in "full" model for intermediary calculations (default 3N)
# endtime : float
# Final time to run simulation to (default 10)
# alpha : float
# Degree of nonlinearity in KdV (default 1)
# epsilon : float
# Size of linear term (stiffness) (default 0.1)
# tau : float
# Time decay modifier (default 0 - time dependence of memory fully retained)
# IC : function handle
# Initial condition (default sin(x))
# timesteps : Numpy array (t,1)
# Specific timesteps to save
# coeffs : Numpy array (4,1)
# Renormalization coefficients on ROM terms (default [1,-0.5,1/6,-1/24])
# Returns
# -------
# uSim : ODE Solution object
# Data from completed simulation
# """
# # default value of M is triple N
# if M is None:
# M = 3*N
# # generate initial condition
# x = np.linspace(0,2*np.pi-2*np.pi/(2*N),2*N)
# y = IC(x)
# uFull = fftnorm(y)
# u = uFull[0:N]
# # define RHS for integration
# # (RHS FullKdV uses "full" size with 3/2*N positive modes to dealias results)
# def myRHS(t,y):
# out = RHSt4ModelKdV(y,N,M,alpha,epsilon,t,tau,coeffs)
# return out
# # run simulation using stiff solver
# uSim = sp.integrate.solve_ivp(fun = myRHS, t_span = [0,endtime], y0 = u,method = "BDF", t_eval = timesteps)
# return uSim
# def makeAnimation(u,t,N):
# """Takes a completed simulation and creates a real space animation of a subset of modes
# Parameters
# ----------
# u : Numpy array (M,t)
# Output of simulation giving energy in first M positive modes for all timesteps t
# t : Numpy array (t,1)
# Timesteps associated with the simulation
# N : int
# Number of positive modes to use in real space
# Returns
# -------
# anim : Animation object
# Real space solution animated
# """
# # generate real space solutions
# xgrid,yarray = makeRealSpace(u,N)
# # initialize figure
# myFig = plt.figure()
# ax = plt.subplot(111)
# ax.axis(xmin = 0,xmax = 2*np.pi,ymin = -2, ymax = 3)
# myLine, = ax.plot([],[], 'b')
# # define function to draw each frame
# def makeFrame(n):
# myLine.set_data(xgrid,yarray[:,n])
# plt.title('t = '+str(round(t[n],1)))
# return myLine
# # generate animation
# anim = animation.FuncAnimation(fig = myFig,func = makeFrame,frames = t.shape[0])
# return anim
# def makeAnimationFullMarkov(uFull,uMarkov,t):
# """Takes a completed full and Markov simulation and creates a real space animation comparing them
# Parameters
# ----------
# uFull : Numpy array (M,t)
# Output of full simulation giving energy in first M positive modes for all timesteps t
# uMarkov : Numpy array (M,t)
# Output of Markov ROM simulation of size N for all timesteps t
# t : Numpy array (t,1)
# Timesteps associated with the simulation
# Returns
# -------
# anim : Animation object
# Real space solution animated
# """
# N = uMarkov.shape[0]
# # generate real space solutions
# xgrid,yarrayFull = makeRealSpace(uFull,N)
# xgrid,yarrayMarkov = makeRealSpace(uMarkov,N)
# # initialize figure
# myFig = plt.figure()
# ax = plt.subplot(111)
# ax.axis(xmin = 0,xmax = 2*np.pi-np.pi/N,ymin = -2, ymax = 3)
# myFullLine, = ax.plot([],[], 'b')
# myMarkovLine, = ax.plot([],[], 'r')
# # define function to draw each frame
# def makeFrame(n):
# myFullLine.set_data(xgrid,yarrayFull[:,n])
# myMarkovLine.set_data(xgrid,yarrayMarkov[:,n])
# plt.title('t = '+str(round(t[n],1)))
# plt.legend((myFullLine,myMarkovLine),("Exact Solution","Markov Model"))
# return myFullLine,myMarkovLine
# # generate animation
# anim = animation.FuncAnimation(fig = myFig,func = makeFrame,frames = t.shape[0])
# return anim
# def makeAnimationtModel(uFull,uMarkov,utModel,t):
# """Takes a completed full, Markov, and t-model simulation and creates a real space animation comparing them
# Parameters
# ----------
# uFull : Numpy array (M,t)
# Output of full simulation giving energy in first M positive modes for all timesteps t
# uMarkov : Numpy array (M,t)
# Output of Markov ROM simulation of size N for all timesteps t
# utmodel : Numpy array (M,t)
# Output of t-model ROM simulation of size N for all timesteps t
# t : Numpy array (t,1)
# Timesteps associated with the simulation
# Returns
# -------
# anim : Animation object
# Real space solution animated
# """
# N = uMarkov.shape[0]
# # generate real space solutions
# xgrid,yarrayFull = makeRealSpace(uFull,N)
# xgrid,yarrayMarkov = makeRealSpace(uMarkov,N)
# xgrid,yarraytModel = makeRealSpace(utModel,N)
# # initialize figure
# myFig = plt.figure()
# ax = plt.subplot(111)
# ax.axis(xmin = 0,xmax = 2*np.pi-np.pi/N,ymin = -2, ymax = 3)
# myFullLine, = ax.plot([],[], 'b')
# myMarkovLine, = ax.plot([],[], 'r')
# mytModelLine, = ax.plot([],[], 'g')
# # define function to draw each frame
# def makeFrame(n):
# myFullLine.set_data(xgrid,yarrayFull[:,n])
# myMarkovLine.set_data(xgrid,yarrayMarkov[:,n])
# mytModelLine.set_data(xgrid,yarraytModel[:,n])
# plt.title('t = '+str(round(t[n],1)))
# plt.legend((myFullLine,myMarkovLine,mytModelLine),("Exact Solution","Markov Model","t-Model"))
# return myFullLine,myMarkovLine
# # generate animation
# anim = animation.FuncAnimation(fig = myFig,func = makeFrame,frames = t.shape[0])
# return anim
# def makeAnimationROMS(uFull,uMarkov,utModel,ut2Model,ut4Model,t):
# """
# """
# N = uMarkov.shape[0]
# # generate real space solutions
# xgrid,yarrayFull = makeRealSpace(uFull,N)
# xgrid,yarrayMarkov = makeRealSpace(uMarkov,N)
# xgrid,yarraytModel = makeRealSpace(utModel,N)
# xgrid,yarrayt2Model = makeRealSpace(ut2Model,N)
# xgrid,yarrayt4Model = makeRealSpace(ut4Model,N)
# # initialize figure
# myFig = plt.figure()
# ax = plt.subplot(111)
# ax.axis(xmin = 0,xmax = 2*np.pi-np.pi/N,ymin = -2, ymax = 3)
# myFullLine, = ax.plot([],[], 'blue')
# myMarkovLine, = ax.plot([],[], 'red')
# mytModelLine, = ax.plot([],[], 'green')
# myt2ModelLine, = ax.plot([],[], 'purple')
# myt4ModelLine, = ax.plot([],[], 'black')
# # define function to draw each frame
# def makeFrame(n):
# myFullLine.set_data(xgrid,yarrayFull[:,n])
# myMarkovLine.set_data(xgrid,yarrayMarkov[:,n])
# mytModelLine.set_data(xgrid,yarraytModel[:,n])
# myt2ModelLine.set_data(xgrid,yarrayt2Model[:,n])
# myt4ModelLine.set_data(xgrid,yarrayt4Model[:,n])
# plt.title('t = '+str(round(t[n],1)))
# plt.legend((myFullLine,myMarkovLine,mytModelLine,myt2ModelLine,myt4ModelLine),("Exact Solution","Markov Model","t-Model","2nd Order CMA","4th Order CMA"))
# return myFullLine,myMarkovLine
# # generate animation
# anim = animation.FuncAnimation(fig = myFig,func = makeFrame,frames = t.shape[0])
# return anim
|
[
"jrprice@pugetsound.edu"
] |
jrprice@pugetsound.edu
|
b15cb12b89fe63134e1f752e00d8353aa705616b
|
82eff9bf5b66359786b83e9e63669bbed68135f7
|
/tools/read_yaml.py
|
d90210f36c111b41775f94942ad61003683135b0
|
[] |
no_license
|
chunshan160/requests_pytest
|
a342cd8a9c26b6ef94a35dfce8838262a22727e5
|
2d94336c475e53bd9e652ea1b9223d398b4f20ec
|
refs/heads/master
| 2023-03-14T04:31:57.244132
| 2021-02-27T07:24:47
| 2021-02-27T07:24:47
| 327,075,031
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 294
|
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time :2020/9/10 13:55
# @Author :春衫
# @File :read_yaml.py
import yaml
def read_yaml(file_path):
fs = open(file_path, encoding="utf-8")
data = yaml.load(fs,Loader=yaml.FullLoader)
return data
if __name__ == '__main__':
pass
|
[
"1605936478@qq.com"
] |
1605936478@qq.com
|
31a4117bf7d5808eb49856299756dd6f40979207
|
a652341f459d51821135c898dbed74f235522d9b
|
/rc_hr_payroll/models/account_move.py
|
c4c3f0f4824c5523d9bf655d2946c887d5486aec
|
[] |
no_license
|
cokotracy/Rack-Centre-Limited
|
4faacb786b7970ea6aae14784f1b8f2aa41ac089
|
d99ad07828c67165e6182def5c5288fc70c964ec
|
refs/heads/main
| 2023-08-18T23:58:18.035063
| 2021-10-08T09:19:00
| 2021-10-08T09:19:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 555
|
py
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api
class AccountJournal(models.Model):
_inherit = 'account.move'
state = fields.Selection(selection=[
('draft', 'Draft'),
('submit', 'Submitted'),
('approve', 'Approved'),
('posted', 'Posted'),
('cancel', 'Cancelled'),
], string='Status', required=True, readonly=True, copy=False, tracking=True, default='draft')
def action_submit(self):
self.state = "submit"
def action_approve(self):
self.state = "approve"
|
[
"padinality@yahoo.com"
] |
padinality@yahoo.com
|
d1d4d820a0192a57ce3f08ff8119ef3b48143c2f
|
8b57fa887dd292ec65bd34de881d4f4cf6d88b61
|
/PY/longestword01.py
|
9d37a8172ec456931af005e4ee7f3c03275ac235
|
[] |
no_license
|
ggerod/Code
|
f83b8d4f15f93d61f20c467bb9ed686ffd61805f
|
61108017191bce3e3ed93d954ec795fac3666cba
|
refs/heads/master
| 2020-06-27T10:47:05.925558
| 2019-12-12T21:43:25
| 2019-12-12T21:43:25
| 199,931,500
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 544
|
py
|
#!/usr/local/bin/python3
filename = "/usr/share/dict/words"
file = open(filename, "r")
longestword=''
longestwordlen=0
for line in file:
word=(line.strip()).casefold()
if (len(word) <= longestwordlen):
continue
norepeat = True
for char in word:
if (word.count(char) > 1):
norepeat=False
continue
if (norepeat):
longestword=word
longestwordlen = len(word)
print("new longest word =",word)
print("The longest word with no repeated letters in ",filename, " is: ",longestword)
|
[
"glen_gerod@comcast.com"
] |
glen_gerod@comcast.com
|
a6b1ec75c16eda1006d8b4afae13de3fc33f9b90
|
e378f1b2d57000d87ee58a981642192fe8101b1c
|
/tests/test_functional.py
|
9e59336a9639e1962bf1c3396c39d621050b2f9a
|
[
"MIT"
] |
permissive
|
gjo/pyramid_services_autowire
|
267fbed3425d89687c8c9de0c7cc85252f17be5d
|
11eeb7afe1c207c077f9440a3b0b4ceacb6fbfef
|
refs/heads/master
| 2020-03-19T15:54:45.663611
| 2018-06-09T07:50:03
| 2018-06-09T07:50:03
| 136,691,404
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,145
|
py
|
# -*- coding: utf-8 -*-
import unittest
from pyramid.config import Configurator
from pyramid.request import Request
from zope.interface import Interface, implementer
class FunctionalTestCase(unittest.TestCase):
def test_does_not_wired(self):
from pyramid_services_autowire import Autowire, DoesNotWired
class IDepSvc(Interface):
pass
class Svc(object):
dep_svc = Autowire(IDepSvc)
self.assertIsInstance(Svc.dep_svc, Autowire)
svc = Svc()
self.assertRaises(DoesNotWired, getattr, svc, 'dep_svc')
def test_by_iface(self):
from pyramid_services_autowire import Autowire
class ISvc(Interface):
pass
class IDepSvc(Interface):
pass
class Svc(object):
dep_svc = Autowire(IDepSvc)
config = Configurator()
config.include('pyramid_services_autowire')
dep_svc = object()
config.register_autowire(Svc, ISvc)
config.register_service(dep_svc, IDepSvc)
def aview(request):
svc = request.find_service(ISvc)
self.assertIsInstance(svc, Svc)
self.assertIs(svc.dep_svc, dep_svc)
return request.response
config.add_route('root', pattern='/')
config.add_view(aview, route_name='root')
app = config.make_wsgi_app()
resp = Request.blank('/').get_response(app)
self.assertEqual(resp.status_code, 200)
def test_by_contextual(self):
from pyramid_services_autowire import Autowire
class IRoot(Interface):
pass
class ISome(Interface):
pass
@implementer(ISome)
class Some(object):
def __init__(self, parent, name):
self.__parent__ = parent
self.__name__ = name
@implementer(IRoot)
class Root(dict):
def __init__(self, request):
super(Root, self).__init__()
self['some'] = Some(self, 'some')
class ISvc(Interface):
pass
class IDepSvc(Interface):
pass
class Svc(object):
dep_svc = Autowire(IDepSvc, is_contextual=True)
config = Configurator(root_factory=Root)
config.include('pyramid_services_autowire')
dep_svc_root = object()
dep_svc_some = object()
config.register_autowire(Svc, ISvc)
config.register_service(dep_svc_root, IDepSvc, context=IRoot)
config.register_service(dep_svc_some, IDepSvc, context=ISome)
def root_view(request):
svc = request.find_service(ISvc)
self.assertIsInstance(svc, Svc)
self.assertIs(svc.dep_svc, dep_svc_root)
return request.response
def some_view(request):
svc = request.find_service(ISvc)
self.assertIsInstance(svc, Svc)
self.assertIs(svc.dep_svc, dep_svc_some)
return request.response
config.add_view(root_view, context=IRoot)
config.add_view(some_view, context=ISome)
app = config.make_wsgi_app()
resp = Request.blank('/').get_response(app)
self.assertEqual(resp.status_code, 200)
resp = Request.blank('/some/').get_response(app)
self.assertEqual(resp.status_code, 200)
def test_by_name(self):
from pyramid_services_autowire import Autowire
class ISvc(Interface):
pass
class Svc(object):
dep_svc = Autowire(name='foo')
config = Configurator()
config.include('pyramid_services_autowire')
dep_svc = object()
config.register_autowire(Svc, ISvc)
config.register_service(dep_svc, name='foo')
def aview(request):
svc = request.find_service(ISvc)
self.assertIsInstance(svc, Svc)
self.assertIs(svc.dep_svc, dep_svc)
return request.response
config.add_route('root', pattern='/')
config.add_view(aview, route_name='root')
app = config.make_wsgi_app()
resp = Request.blank('/').get_response(app)
self.assertEqual(resp.status_code, 200)
def test_by_name_property(self):
from pyramid_services_autowire import Autowire
class ISvc(Interface):
pass
class Svc(object):
dep_svc = Autowire(name_property='foo')
def __init__(self, foo=None):
self.foo = foo
config = Configurator()
config.include('pyramid_services_autowire')
dep_svc = object()
config.register_autowire(Svc, ISvc, foo='foo')
config.register_service(dep_svc, name='foo')
def aview(request):
svc = request.find_service(ISvc)
self.assertIsInstance(svc, Svc)
self.assertIs(svc.dep_svc, dep_svc)
return request.response
config.add_route('root', pattern='/')
config.add_view(aview, route_name='root')
app = config.make_wsgi_app()
resp = Request.blank('/').get_response(app)
self.assertEqual(resp.status_code, 200)
|
[
"gjo.ext@gmail.com"
] |
gjo.ext@gmail.com
|
7d60b4ea277ff718ac71e522b814eb5897edeeef
|
265eb14569da3c97ff7d7f4ba8b73a39e41bf120
|
/atv.py
|
02c274466046a37323a7316674b7ccffd3a12e79
|
[] |
no_license
|
skandamurthy/vehicle
|
1108d2330c930864325d8b477b2279340cc61cbb
|
be59e69a95e5caca7ed2e972f1a21d443e5a399c
|
refs/heads/master
| 2020-03-24T10:54:26.343589
| 2018-08-27T20:27:49
| 2018-08-27T20:27:49
| 142,670,839
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 478
|
py
|
from bike import Bike
from car import Car
class Atv(Bike,Car):
fuel_used ='Petrol'
def __init__(self,fuel_used,handle,is_gear,ac,steering_wheel,seat_belt,audio_player,no_of_wheels,speed,weight,milage,colour):
self.fuel_used =fuel_used
Bike.__init__(self,handle,is_gear,no_of_wheels,speed,weight,milage,colour)
Car.__init__(self,ac,steering_wheel,seat_belt,audio_player,no_of_wheels,speed,weight,milage,colour)
def features():
print("Its an hybrid of Car and Bike")
|
[
"skandasneha@gmail.com"
] |
skandasneha@gmail.com
|
6350e7fa49e2ce61ba60225c83d5827aa6da7a6f
|
7703a8b5ebdcdf38ce296d84899392213c741236
|
/ush/hafs/ww3.py
|
ad9e7da05a45678e0037d4f5b80de4811ef8b363
|
[] |
no_license
|
hafs-community/HAFS
|
b1add5a86b18e00e6a13830dfaa770a3560fd8ab
|
cba6b3649eb7a25bb8be392db1901f47d3287c93
|
refs/heads/develop
| 2023-08-16T21:25:31.668385
| 2023-07-28T20:24:05
| 2023-07-28T20:24:05
| 176,363,600
| 22
| 48
| null | 2023-08-31T22:06:20
| 2019-03-18T20:23:54
|
Fortran
|
UTF-8
|
Python
| false
| false
| 44,649
|
py
|
#! /usr/bin/env python3
"""This module handles WW3 related scripts for HAFS system."""
__all__ = ['WW3Init', 'WW3Post']
import os, sys, re
import produtil.datastore, produtil.fileop, produtil.cd, produtil.run, produtil.log
import produtil.dbnalert
import tcutil.numerics
import hafs.hafstask, hafs.exceptions
import hafs.namelist, hafs.input
import hafs.launcher, hafs.config
from produtil.datastore import FileProduct, RUNNING, COMPLETED, FAILED, UpstreamFile
from produtil.fileop import make_symlink, deliver_file, wait_for_files
from produtil.dbnalert import DBNAlert
from produtil.cd import NamedDir, TempDir
from produtil.run import mpi, mpirun, run, runstr, checkrun, exe, bigexe, alias
from tcutil.numerics import to_datetime, to_datetime_rel, to_fraction, to_timedelta
from hafs.exceptions import WW3InputError
prodnames={
'mod_def': ( './mod_def.ww3', '{intercom}/ww3/mod_def.ww3' ),
'ww3_mesh': ( './ww3_mesh.nc', '{intercom}/ww3/ww3_mesh.nc' ),
'wind': ( './wind.ww3', '{intercom}/ww3/wind.ww3' ),
'current': ( './current.ww3', '{intercom}/ww3/current.ww3' ),
'restart': ( './restart.ww3', '{intercom}/ww3/restart_init.ww3' ) }
# 'ww3_shel': ( './ww3_shel.inp', '{intercom}/ww3/ww3_shel.inp' ) }
########################################################################
class WW3Init(hafs.hafstask.HAFSTask):
def __init__(self,dstore,conf,section,taskname=None,fcstlen=126,
outstep=21600, pntstep=21600, rststep=21600, **kwargs):
"""Creates a WW3Init
dstore - the produtil.datastore.Datastore to use
conf - the HAFSConfig to use
section - the section name for this task
taskname - the task name. Default: section
fcstlen - the forecast length in hours
outstep - the output step in seconds
pntstep - the pntout step in seconds
rststep - the restart output step in seconds
Other keyword arguments are passed to the superclass constructor."""
super(WW3Init,self).__init__(dstore,conf,section,taskname=taskname,**kwargs)
self._make_products()
self.fcstlen=float(fcstlen)
self.outstep=int(outstep)
self.pntstep=int(pntstep)
self.rststep=int(rststep)
def _make_products(self):
"""Creates FileProduct objects for all output files. The
outdir is the directory to which the WW3 package output its
final files."""
self._products=dict()
atime=tcutil.numerics.to_datetime(self.conf.cycle)
ww3_bdy=self.confstr('ww3_bdy','no')
if ww3_bdy == 'yes':
prodnames['nest']=( './nest.ww3', '{intercom}/ww3/nest.ww3' )
with self.dstore.transaction():
for prodname,filepaths in prodnames.items():
(localpath,compath)=filepaths
prod=produtil.datastore.FileProduct(
self.dstore,prodname,self.taskname)
prod.location=self.timestr(compath,atime,atime)
prod['localpath'] = localpath
self._products[prodname]=( prod,localpath )
def products(self,name=None,**kwargs):
"""Iterate over all products."""
for prodname,stuff in self._products.items():
(prod,localpath)=stuff
if name is None or name==prod.prodname:
yield prod
def inputiter(self):
atime=to_datetime(self.conf.cycle)
etime=to_datetime_rel(3600*self.fcstlen,atime)
interval=to_fraction(self.confint('input_step',6*3600))
dataset=self.confstr('gfs_dataset','gfs')
item=self.confstr('gfs_item','gfs')
epsilon=to_timedelta(interval/10)
ende=to_datetime_rel(epsilon,etime)
when=atime
while when<ende:
yield dict(self.taskvars,dataset=dataset,item=item,ftime=when,atime=atime)
when=to_datetime_rel(interval,when)
ww3_bdy=self.confstr('ww3_bdy','no')
if ww3_bdy == 'yes':
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
dataset=self.confstr('gfswave_dataset','gfswave')
item=self.confstr('ww3bdy_item','ww3bdy_ibp')
when=wtime
yield dict(self.taskvars,dataset=dataset,item=item,ftime=when,atime=when,optional=True)
ww3_rst=self.confstr('ww3_rst','no')
print('ww3_rst=%s'%(ww3_rst))
if ww3_rst == 'yes' or ww3_rst == 'always':
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
dataset=self.confstr('gdaswave_dataset','gdaswave')
item=self.confstr('ww3rst_item','ww3rst_gnh_10m')
when=wtime
yield dict(self.taskvars,dataset=dataset,item=item,ftime=when,atime=when,optional=True)
def gfsgrib2iter(self):
logger=self.log()
atime=to_datetime(self.conf.cycle) # sim start time
etime=to_datetime_rel(self.fcstlen*3600,atime) # sim end time
interval=to_fraction(self.confint('input_step',6*3600))
dataset=self.confstr('gfs_dataset','gfs')
item=self.confstr('gfs_item','gfs')
hd=self.confstr('catalog','hafsdata')
dc=hafs.input.DataCatalog(self.conf,hd,atime)
epsilon=to_timedelta(interval/10)
ende=to_datetime_rel(epsilon,etime)
when=atime
fhour=0
maxwait=self.confint('max_grib_wait',900)
sleeptime=self.confint('grib_sleep_time',20)
min_size=self.confint('min_grib_size',1)
min_mtime_age=self.confint('min_grib_age',30)
while when<ende:
thefile=dc.locate(dataset=dataset,item=item,ftime=when,atime=atime,**self.taskvars)
waited=wait_for_files(
[thefile],logger,maxwait=maxwait,sleeptime=sleeptime,
min_size=min_size,min_mtime_age=min_mtime_age)
if not waited:
msg='FATAL ERROR: %s: did not exist or was too small after %d seconds'%(
thefile,maxwait)
self.log().error(msg)
raise hafs.exceptions.WW3InputError(msg)
sys.exit(2)
yield thefile
fhour=fhour+interval/3600
when=to_datetime_rel(interval,when)
def deliver_products(self):
logger=self.log()
for prodname,stuff in self._products.items():
(prod,localpath)=stuff
prod.deliver(frominfo=localpath,keep=True,logger=logger)
def run(self):
"""Runs the WW3 initialization"""
logger=self.log()
dummycurr=True
usegfswind=self.confstr('usegfswind','yes')
if usegfswind == 'yes':
dummywind=False
elif usegfswind == 'no':
dummywind=True
else:
# Wrong usegfswind value
logger.warning('Wrong usegfswind value: %s. Assume usegfswind=yes.'
'Set dummywind to False.'%(usegfswind,))
usegfswind='yes'
dummywind=False
ww3_bdy=self.confstr('ww3_bdy','no')
ww3_rst=self.confstr('ww3_rst','no')
try:
self.state=RUNNING
redirect=self.confbool('redirect',True)
with NamedDir(self.workdir,keep=not self.scrub,logger=logger,rm_first=True) as d:
# Run ww3_grid
def link(s,t):
make_symlink(s,t,force=True,logger=logger)
deliver_file(self.icstr('{grid_inp}'),'ww3_grid.inp',keep=True,logger=logger)
link(self.icstr('{grid_bot}'),'.')
if ww3_bdy == 'yes':
link(self.icstr('{grid_msk2}'),self.icstr('./ww3_grid_{vit[basin1lc]}.msk'))
else:
link(self.icstr('{grid_msk}'),'.')
link(self.icstr('{grid_obr}'),'.')
link(self.getexe('ww3_grid'),'ww3_grid')
#checkrun(exe(self.getexe('ww3_grid'))>='ww3_grid.log',logger=logger)
cmd=exe('./ww3_grid')
if redirect: cmd = cmd>='ww3_grid.log'
checkrun(cmd,logger=logger)
# Copy over the pre-generated grid mesh file
deliver_file(self.icstr('{grid_mesh}'),'./ww3_mesh.nc')
if usegfswind == 'yes':
# Extract gfs wind from gfs grib2 data
ncfile='gfs.uvgrd10m.nc'
produtil.fileop.remove_file(ncfile,logger=logger)
cmd=alias(bigexe(self.getexe('wgrib2','wgrib2')))
for f in self.gfsgrib2iter():
logger.info('Extracting wind at 10 m from %s'%(f))
subset=''
for line in runstr(cmd[f],logger=logger).splitlines(True):
if re.search(':[UV]GRD:10 m above ground:',line):
subset+=line
runme=cmd[f,'-i', '-append', '-netcdf', ncfile] << subset
checkrun(runme, logger=logger)
if produtil.fileop.isnonempty(ncfile):
dummywind=False
else:
dummywind=True
produtil.log.jlogger.warning(
'ww3init: will use dummy wind because %s is missing '
'or empty.'%(ncfile,))
if dummywind:
# Run ww3_prep for dummy wind
deliver_file(self.icstr('{wind_inp}'),'ww3_prep.inp',keep=True,logger=logger)
link(self.getexe('ww3_prep'),'ww3_prep')
#checkrun(exe(self.getexe('ww3_prep'))>='ww3_prep_wind.log',logger=logger)
cmd=exe('./ww3_prep')
if redirect: cmd = cmd>='ww3_prep_wind.log'
checkrun(cmd,logger=logger)
else:
# Run ww3_prnc for prep gfs wind
deliver_file(self.icstr('{prnc_inp_gfswind}'),'ww3_prnc.inp',keep=True,logger=logger)
link(self.getexe('ww3_prnc'),'ww3_prnc')
cmd=exe('./ww3_prnc')
if redirect: cmd = cmd>='ww3_prnc_wind.log'
checkrun(cmd,logger=logger)
if dummycurr:
# Run ww3_prep for dummy current
deliver_file(self.icstr('{curr_inp}'),'ww3_prep.inp',keep=True,logger=logger)
# Prepare the curdummy.dat
with open('./curdummy.dat','w') as of:
for x in range(6):
of.write('0. 0. 0.\n')
link(self.getexe('ww3_prep'),'ww3_prep')
#checkrun(exe(self.getexe('ww3_prep'))>='ww3_prep_curr.log')
cmd=exe('./ww3_prep')
if redirect: cmd = cmd>='ww3_prep_curr.log'
checkrun(cmd,logger=logger)
else:
# Extract current from global ocean model
logger.error('Not implemented yet')
have_restart=False
if os.environ.get('ww3_force_cold_start','no').lower() == 'yes':
logger.warning('ww3_force_cold_start is yes and will generate restart.ww3.')
else:
oldrst='(unknown)'
oldconffile=self.icstr('{oldcom}/{old_out_prefix}.{RUN}.conf')
if produtil.fileop.isnonempty(oldconffile):
logger.info('%s: prior cycle exists.'%(oldconffile,))
oldconf=hafs.launcher.HAFSLauncher()
oldconf.read(oldconffile)
oldrst=self.icstr('{oldcom}/{old_out_prefix}.{RUN}.ww3.restart.f006')
if not oldconf.getbool('config','run_wave'):
logger.info('restart.ww3: will generate restart.ww3 because prior cycle did not run wave.')
elif not oldconf.getstr('config','wave_model').lower() == 'ww3':
logger.info('restart.ww3: will generate restart.ww3 because prior cycle did not run WW3.')
elif produtil.fileop.isnonempty(oldrst):
produtil.fileop.deliver_file(oldrst,'restart.ww3',logger=logger)
have_restart=True
logger.info('%s: warm start from prior cycle 6-h output restart file.'%(oldrst,))
else:
logger.critical('FATAL ERROR: exiting because piror cycle %s is missing or empty.'%(oldrst,))
logger.critical('FATAL ERROR: if desired, set force_cold_start or ww3_force_cold_start = yes can bypass this failure.')
sys.exit(2)
else:
logger.info('restart.ww3: will generate restart.ww3 because prior cycle does not exist.')
if (not have_restart and ww3_rst == 'yes') or ww3_rst == 'always':
try:
with NamedDir('ww3gint',keep=True,logger=logger) as nameddir:
logger.info('ww3_grid: generating mod_def.ww3 for gnh_10m gridi from gdaswave')
make_symlink('../mod_def.ww3','mod_def.hafs_ww3',force=True,logger=logger)
make_symlink(self.getexe('ww3_grid'),'ww3_grid',force=True,logger=logger)
deliver_file(self.icstr('{grid_gnh_10m_inp}'),'ww3_grid.inp',keep=True,logger=logger)
cmd=exe('./ww3_grid')
if redirect: cmd = cmd>='ww3_grid.log'
checkrun(cmd,logger=logger)
deliver_file('./mod_def.ww3','./mod_def.gnh_10m',keep=False,logger=logger)
logger.info('ww3_gint: generating restart.ww3 by using ww3_gint with restart files from gdaswave')
make_symlink(self.getexe('ww3_gint'),'ww3_gint',force=True,logger=logger)
#Get restart.gnh_10m
self.get_ww3rst_inputs()
#Prepare the namelist
self.make_gint_inp(logger)
#run ww3_gint
cmd=exe('./ww3_gint')
if redirect: cmd = cmd>='ww3_gint.log'
checkrun(cmd,logger=logger)
deliver_file('./restart.hafs_ww3','../restart.ww3',keep=False,logger=logger)
if produtil.fileop.isnonempty('restart.ww3'):
have_restart=True
except Exception as ee:
produtil.log.jlogger.warning(
'restart.ww3: will generate dummy because ww3_gint '
'did not run successfully.',exc_info=True)
if not have_restart:
logger.info('restart.ww3: generating dummy with ww3_strt')
# Run ww3_strt
deliver_file(self.icstr('{strt_inp}'),'ww3_strt.inp',keep=True,logger=logger)
link(self.getexe('ww3_strt'),'ww3_strt')
cmd=exe('./ww3_strt')
if redirect: cmd = cmd>='ww3_strt.log'
checkrun(cmd,logger=logger)
if ww3_bdy == 'yes':
try:
logger.info('ww3_bound: generating ww3 boundary condition')
self.get_ww3bdy_inputs()
# Run ww3_bound
deliver_file(self.icstr('{bound_inp}'),'ww3_bound.inp',keep=True,logger=logger)
link(self.getexe('ww3_bound'),'ww3_bound')
cmd=exe('./ww3_bound')
if redirect: cmd = cmd>='ww3_bound.log'
checkrun(cmd,logger=logger)
except Exception as ee:
self._products.pop('nest',None)
prodnames.pop('nest',None)
produtil.log.jlogger.warning(
'ww3_bound: will run without input boundary condition because ww3_bound '
'did not run successfully.',exc_info=True)
#if redirect: self._copy_log()
## Prepare ww3_shel.inp
#ni=hafs.namelist.NamelistInserter(self.conf,self.section)
#shel_inp=self.icstr('{shel_inp}')
#atime=to_datetime(self.conf.cycle) # sim start time
#etime=to_datetime_rel(self.fcstlen*3600,atime) # sim end time
#flddt=int(self.outstep)
#pntdt=int(self.pntstep)
##flddt=self.conf.getint('forecast_products','ww3_output_step',10800)
##pntdt=self.conf.getint('forecast_products','ww3_pntout_step',10800)
#if pntdt > 0:
# # Point output requested, need to provide buoy information
# buoy_inp=self.icstr('{buoy_inp}')
# with open(buoy_inp,'r') as bf:
# #Read the file content and take out the eof character in the end.
# buoyfile=bf.read()[:-1]
#elif pntdt == 0:
# # Point output no requested, no further info needed
# buoyfile='$'
#else:
# # Wrong pntdt value
# logger.warning('Wrong ww3_pntout_step value: %d. Set ww3_pntout_step = 0'%(pntdt,))
# pntdt=0
# self.pntout=0
# buoyfile='$'
#ci=self.conf.getfloat('config','cycling_interval',6)
#retime=to_datetime_rel(ci*3600*1,atime) # restart end time
#invars=dict()
#invars.update(RUN_BEG=atime.strftime('%Y%m%d %H%M%S'),
# RUN_END=etime.strftime('%Y%m%d %H%M%S'),
# FLD_BEG=atime.strftime('%Y%m%d %H%M%S'),
# FLD_END=etime.strftime('%Y%m%d %H%M%S'),
# FLD_DT=int(flddt),
# PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
# PNT_END=etime.strftime('%Y%m%d %H%M%S'),
# PNT_DT=int(pntdt),
# BUOY_FILE=buoyfile,
# RST_BEG=atime.strftime('%Y%m%d %H%M%S'),
# RST_END=retime.strftime('%Y%m%d %H%M%S'),
# RST_DT=int(self.rststep) )
#with open(shel_inp,'rt') as nf:
# with open('ww3_shel.inp','wt') as of:
# of.write(ni.parse(nf,logger=logger,source=shel_inp,
# raise_all=True,atime=self.conf.cycle,**invars))
self.deliver_products()
self.state=COMPLETED
except Exception as e:
logger.error('Unhandled exception in wave init: %s'
%(str(e),),exc_info=True)
self.state=FAILED
#self._copy_log()
raise
def _copy_log(self):
logger=self.log()
for lf in [ 'ww3_grid.log', 'ww3_prep_wind.log', 'ww3_prep_curr.log',
'ww3_strt.log', 'ww3_untarbdy.log', 'ww3_bound.log' ]:
comloc=self.icstr('{com}/{out_prefix}.{RUN}.{lf}.ww3',lf=lf)
if os.path.exists(lf):
deliver_file(lf,comloc,keep=True,logger=logger)
def get_ww3bdy_inputs(self):
"""!Obtains WW3 input boundary condition data, links or copies to ww3init dir.
WW3 input boundary data comes from previous cycle's gfswave."""
logger=self.log()
redirect=self.confbool('redirect',True)
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
ww3catalog=self.confstr('catalog','hafsdata')
ww3dc=hafs.input.DataCatalog(self.conf,ww3catalog,wtime)
dataset=self.confstr('gfswave_dataset','gfswave')
item=self.confstr('ww3bdy_item','ww3bdy_ibp')
when=wtime
for itry in range(3):
when=to_datetime_rel(-6*3600*itry,wtime)
ww3bdyfile=ww3dc.locate(dataset,item,atime=when,logger=logger)
if not ww3bdyfile:
logger.info('%s: cannot decide data location for this time.'%(
when.strftime('%Y%m%d%H'),))
ok=True
(L,S) = produtil.fileop.lstat_stat(ww3bdyfile)
if S is None:
logger.info('%s: does not exist'%(ww3bdyfile,))
ok=False
if S.st_size<10000:
logger.info('%s: too small (should be >=%d bytes)'%(
ww3bdyfile,10000))
ok=False
if not ok: continue
# We get here if the ww3bdyfile exists and is big enough.
ww3bdyspectar='gfs.t'+when.strftime('%H')+'z.ibp_tar'
make_symlink(ww3bdyfile,ww3bdyspectar,force=True,logger=logger)
ww3bdyfbase=self.icstr('./gfswave.HWRF{vit[basin1lc]}*')
#cmd=exe('tar')['-zxvf', ww3bdyspectar, '--wildcards', ww3bdyfbase]
cmd=exe('tar')['-xvf', ww3bdyspectar, '--wildcards', ww3bdyfbase]
if redirect: cmd = cmd>='ww3_untarbdy.log'
checkrun(cmd,logger=logger)
return
def get_ww3rst_inputs(self):
"""!Obtains global gdaswave restart file, links or copies to ww3init dir.
WW3 input restart file comes from current cycle's gdaswave."""
logger=self.log()
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
ww3catalog=self.confstr('catalog','hafsdata')
ww3dc=hafs.input.DataCatalog(self.conf,ww3catalog,atime)
dataset=self.confstr('gdaswave_dataset','gdaswave')
item=self.confstr('ww3rst_item','ww3rst_gnh_10m')
when=wtime
ww3rstfile=ww3dc.locate(dataset,item,atime=when,logger=logger)
if not ww3rstfile:
logger.info('%s: cannot decide data location for this time.'%(
when.strftime('%Y%m%d%H'),))
ok=True
(L,S) = produtil.fileop.lstat_stat(ww3rstfile)
if S is None:
logger.info('%s: does not exist'%(ww3bdyfile,))
ok=False
if S.st_size<10000:
logger.info('%s: too small (should be >=%d bytes)'%(
ww3rstfile,10000))
ok=False
if not ok:
logger.warning('%s: ww3rst file from gdaswave not ok for this time.'%(
when.strftime('%Y%m%d%H'),))
# We get here if the ww3rstfile exists and is big enough.
make_symlink(ww3rstfile,'restart.gnh_10m',force=True,logger=logger)
return
def make_gint_inp(self,logger):
# Prepare ww3_gint.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
gint_inp=self.confstr('gint_inp','')
if not gint_inp: gint_inp=self.icstr('{PARMww3}/ww3_gint.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(gint_inp,'rt') as nf:
with open('ww3_gint.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=gint_inp,
raise_all=True,atime=self.conf.cycle,**invars))
########################################################################
ww3postprodnames={
'ww3outgrd': ( './out_grd.ww3', '{com}/{out_prefix}.{RUN}.out_grd.ww3' ),
'ww3grb2': ( './gribfile', '{com}/{out_prefix}.{RUN}.ww3.grb2' ),
'ww3grb2idx': ( './gribfile.idx', '{com}/{out_prefix}.{RUN}.ww3.grb2.idx' ),
'ww3ounf': ( './ww3.%Y.nc', '{com}/{out_prefix}.{RUN}.ww3_ounf.nc' ),
'ww3outpnt': ( './out_pnt.ww3', '{com}/{out_prefix}.{RUN}.out_pnt.ww3' ),
'ww3ounpspec': ( './ww3.%Y_spec.nc', '{com}/{out_prefix}.{RUN}.ww3_ounp_spec.nc' ),
'ww3outpbull': ( './ww3_bull.tar', '{com}/{out_prefix}.{RUN}.ww3_bull.tar' ),
'ww3outpcbull': ( './ww3_cbull.tar', '{com}/{out_prefix}.{RUN}.ww3_cbull.tar' ),
'ww3outpcsbull': ( './ww3_csbull.tar', '{com}/{out_prefix}.{RUN}.ww3_csbull.tar' ),
'ww3outpspec': ( './ww3_spec.tar', '{com}/{out_prefix}.{RUN}.ww3_spec.tar' ) }
class WW3Post(hafs.hafstask.HAFSTask):
"""Run WW3 post-process."""
def __init__(self,dstore,conf,section,fcstlen=126,outstep=10800,pntstep=10800,**kwargs):
super(WW3Post,self).__init__(dstore,conf,section,**kwargs)
self.fcstlen=float(fcstlen)
self.outstep=int(outstep)
self.pntstep=int(pntstep)
self._make_products()
self._ncks_path=False
def _make_products(self):
"""Creates FileProduct objects for all WW3Post output files. The
outdir is the directory to which the WW3Post package output its
final files."""
self._products=dict()
atime=tcutil.numerics.to_datetime(self.conf.cycle)
with self.dstore.transaction():
for prodname,filepaths in ww3postprodnames.items():
(localpath,compath)=filepaths
localpath=self.conf.cycle.strftime(localpath)
prod=produtil.datastore.FileProduct(
self.dstore,prodname,self.taskname)
prod.location=self.timestr(compath,atime,atime)
prod['localpath'] = localpath
self._products[prodname]=( prod,localpath )
def products(self,name=None):
"""Iterate over all products."""
for prodname,stuff in self._products.items():
(prod,localpath)=stuff
if name is None or name==prod.prodname:
yield prod
def __copy_ncks(self,source,target,ignore):
ncks=self.ncks_path
logger=self.log()
produtil.fileop.remove_file(target,logger=logger)
checkrun(bigexe(ncks)['-4','-L','6',source,target]<'/dev/null',
logger=logger)
@property
def ncks_path(self):
"""Returns the path to ncks. Returns None if ncks cannot be
found. This function will only search for ncks once, and will
cache the result. Set self._ncks_path=False to force a
recheck."""
if self._ncks_path is False:
ncks=self.getexe('ncks','')
if not self._ncks_path:
ncks=produtil.fileop.find_exe('ncks',raise_missing=False)
assert(ncks is None or
(isinstance(ncks,str) and ncks!=''))
self._ncks_path=ncks
return self._ncks_path
def run(self):
"""Run the WW3 post."""
logger=self.log()
redirect=self.confbool('redirect',True)
self.state=RUNNING
# The line below makes a DBNAlert object, which can be reused for the later alerts.
alerter=produtil.dbnalert.DBNAlert(['MODEL','{type}','{job}','{location}'])
modelrun=self.icstr('{RUN}').upper()
try:
with NamedDir(self.workdir,keep=True,logger=logger,rm_first=True) as d:
# Prepare mod_def.ww3
ww3moddef=self.icstr('{intercom}/ww3/mod_def.ww3')
if not os.path.exists(ww3moddef):
logger.error('%s: mod_def.ww3 not yet available from forecast'%(
ww3moddef,))
deliver_file(ww3moddef,'mod_def.ww3',force=True,logger=logger)
# Prepare and deliver out_grd.ww3
if self.outstep>0:
ww3out=self.icstr('{WORKhafs}/forecast/out_grd.ww3')
if not os.path.exists(ww3out):
logger.error('%s: out_grd.ww3 not yet available from forecast'%(
ww3out,))
deliver_file(ww3out,'out_grd.ww3',force=True,logger=logger)
(prod,localpath)=self._products['ww3outgrd']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
# Prepare and deliver out_pnt.ww3
if self.pntstep>0:
ww3pnt=self.icstr('{WORKhafs}/forecast/out_pnt.ww3')
if not os.path.exists(ww3pnt):
logger.error('%s: out_pnt.ww3 not yet available from forecast'%(
ww3pnt,))
deliver_file(ww3pnt,'out_pnt.ww3',force=True,logger=logger)
(prod,localpath)=self._products['ww3outpnt']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
# For field output in grib2 format
ww3_grib_post=self.confstr('ww3_grib_post','yes',section='ww3post')
if ww3_grib_post == 'yes' and self.outstep>0:
make_symlink(self.getexe('ww3_grib'),'ww3_grib',force=True,logger=logger)
# Prepare the namelist
self.make_grib_inp(logger)
cmd=exe('./ww3_grib')
if redirect: cmd = cmd>='ww3_grib.log'
checkrun(cmd,logger=logger)
indexfile='gribfile.idx'
wgrib2=self.getexe('wgrib2')
logger.info('ww3post: Generating grib idx file for gribfile')
checkrun(bigexe(wgrib2)['-s','gribfile'] > indexfile,logger=logger)
os.system('chmod -x '+indexfile) # Remove the -x permission
(prod,localpath)=self._products['ww3grb2']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3GB2')
(prod,localpath)=self._products['ww3grb2idx']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3GB2_WIDX')
# For point output ww3_outp
ww3_outp_bull_post=self.confstr('ww3_outp_bull_post','yes',section='ww3post')
ww3_outp_spec_post=self.confstr('ww3_outp_spec_post','yes',section='ww3post')
if self.pntstep>0:
make_symlink(self.getexe('ww3_outp'),'ww3_outp',force=True,logger=logger)
# Need to get information about the total number of buoys and their IDs
self.make_outp_info_inp(logger)
cmd=exe('./ww3_outp')
cmd = cmd>='ww3_outp_info.log'
checkrun(cmd,logger=logger)
fname='ww3_outp_info.log'
with open(fname) as f:
ww3_outp_info = f.readlines()
indices = [i for i, elem in enumerate(ww3_outp_info) if '----------' in elem]
buoys=ww3_outp_info[indices[0]+1:indices[1]-2]
# For point bullitin output
if ww3_outp_bull_post == 'yes':
filebull=[]
filecbull=[]
filecsbull=[]
filelog=[]
commands=list()
for i, buoy in enumerate(buoys):
ipnt=i+1
buoyid=buoy.split()[0]
buoylon=buoy.split()[1]
buoylat=buoy.split()[2]
logger.info('ww3_outp_bull for buoy: %i, %s, %s, %s'%(ipnt,buoyid,buoylon,buoylat))
with NamedDir('ww3outpbull.%s'%(buoyid,),keep=True,logger=logger) as nameddir:
self.make_outp_bull_inp(ipnt,logger)
make_symlink('../mod_def.ww3','mod_def.ww3',force=True,logger=logger)
make_symlink('../out_pnt.ww3','out_pnt.ww3',force=True,logger=logger)
make_symlink(self.getexe('ww3_outp'),'ww3_outp',force=True,logger=logger)
buoybull=buoyid+'.bull'
buoycbull=buoyid+'.cbull'
buoycsv=buoyid+'.csv'
buoycsbull=buoyid+'.csbull'
buoylog='ww3_outp_bull_'+buoyid+'.log'
filebull.append(buoybull)
filecbull.append(buoycbull)
filecsbull.append(buoycsbull)
filelog.append(buoylog)
cmd=('cd '+nameddir.dirname+' && '+
'./ww3_outp > ../'+buoylog+' && '+
'mv '+buoybull+' ../ && '+
'mv '+buoycbull+' ../ && '+
'mv '+buoycsv+' ../'+buoycsbull+' && '+
'cd ../')
commands.append(cmd)
cmdfname='command.file.ww3outpbull'
with open(cmdfname,'wt') as cfpf:
cfpf.write('\n'.join(commands))
threads=os.environ['TOTAL_TASKS']
logger.info('ww3_outp_bull total threads: %s ',threads)
mpiserial_path=os.environ.get('MPISERIAL','*MISSING*')
if mpiserial_path=='*MISSING*':
mpiserial_path=self.getexe('mpiserial')
cmd2=mpirun(mpi(mpiserial_path)['-m',cmdfname],allranks=True)
checkrun(cmd2)
# Tar the outputs and diliver to com dir
cmd=exe('tar')['-cvf', 'ww3_bull.tar'][filebull]
checkrun(cmd,logger=logger)
cmd=exe('tar')['-cvf', 'ww3_cbull.tar'][filecbull]
checkrun(cmd,logger=logger)
cmd=exe('tar')['-cvf', 'ww3_csbull.tar'][filecsbull]
checkrun(cmd,logger=logger)
cmd=exe('cat')[filelog] >> 'ww3_outp_bull.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3outpbull']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
(prod,localpath)=self._products['ww3outpcbull']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
(prod,localpath)=self._products['ww3outpcsbull']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
# For point spec output
if ww3_outp_spec_post == 'yes':
fileout=[]
filelog=[]
commands=list()
ww3tstr=self.conf.cycle.strftime('%y%m%d%H')
for i, buoy in enumerate(buoys):
ipnt=i+1
buoyid=buoy.split()[0]
buoylon=buoy.split()[1]
buoylat=buoy.split()[2]
logger.info('ww3_outp_spec for buoy: %i, %s, %s, %s'%(ipnt,buoyid,buoylon,buoylat))
with NamedDir('ww3outpspec.%s'%(buoyid,),keep=True,logger=logger) as nameddir:
self.make_outp_spec_inp(ipnt,logger)
make_symlink('../mod_def.ww3','mod_def.ww3',force=True,logger=logger)
make_symlink('../out_pnt.ww3','out_pnt.ww3',force=True,logger=logger)
make_symlink(self.getexe('ww3_outp'),'ww3_outp',force=True,logger=logger)
buoyspc='ww3.'+ww3tstr+'.spc'
buoyout=buoyid+'.spc'
buoylog='ww3_outp_spec_'+buoyid+'.log'
fileout.append(buoyout)
filelog.append(buoylog)
cmd=('cd '+nameddir.dirname+' && '+
'./ww3_outp > ../'+buoylog+' && '+
'mv '+buoyspc+' ../'+buoyout+' && '+
'cd ../')
commands.append(cmd)
cmdfname='command.file.ww3outpspec'
with open(cmdfname,'wt') as cfpf:
cfpf.write('\n'.join(commands))
threads=os.environ['TOTAL_TASKS']
logger.info('ww3_outp_spec total threads: %s ',threads)
mpiserial_path=os.environ.get('MPISERIAL','*MISSING*')
if mpiserial_path=='*MISSING*':
mpiserial_path=self.getexe('mpiserial')
cmd2=mpirun(mpi(mpiserial_path)['-m',cmdfname],allranks=True)
checkrun(cmd2)
# Tar the outputs and deliver to com dir
cmd=exe('tar')['-cvf', 'ww3_spec.tar'][fileout]
checkrun(cmd,logger=logger)
cmd=exe('cat')[filelog] >> 'ww3_outp_spec.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3outpspec']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
# Additional ww3post products
# For field output in netcdf format
ww3_ounf_post=self.confstr('ww3_ounf_post','yes',section='ww3post')
if ww3_ounf_post == 'yes' and self.outstep>0:
make_symlink(self.getexe('ww3_ounf'),'ww3_ounf',force=True,logger=logger)
# Prepare the namelist
self.make_ounf_inp(logger)
# Run ww3_ounf
cmd=exe('./ww3_ounf')
if redirect: cmd = cmd>='ww3_ounf.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3ounf']
logger.info('Delivering ww3ounf from %s to %s'%(localpath,prod.location))
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=self.__copy_ncks)
# For point spec output in netcdf format
ww3_ounp_spec_post=self.confstr('ww3_ounp_spec_post','yes',section='ww3post')
if ww3_ounp_spec_post == 'yes' and self.pntstep>0:
make_symlink(self.getexe('ww3_ounp'),'ww3_ounp',force=True,logger=logger)
# Prepare the namelist
self.make_ounp_spec_inp(logger)
# Run ww3_ounp
cmd=exe('./ww3_ounp')
if redirect: cmd = cmd>='ww3_ounp.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3ounpspec']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=self.__copy_ncks)
self.state=COMPLETED
except Exception as e:
self.state=FAILED
logger.error("WW3 post failed: %s"%(str(e),),exc_info=True)
raise
def make_grib_inp(self,logger):
# Prepare ww3_grib.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
grib_inp=self.confstr('grib_inp','')
if not grib_inp: grib_inp=self.icstr('{PARMww3}/ww3_grib.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(FLD_BEG=atime.strftime('%Y%m%d %H%M%S'),
FLD_DT=int(self.outstep),
RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(grib_inp,'rt') as nf:
with open('ww3_grib.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=grib_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_ounf_inp(self,logger):
# Prepare ww3_ounf.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
ounf_inp=self.confstr('ounf_inp','')
if not ounf_inp: ounf_inp=self.icstr('{PARMww3}/ww3_ounf.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(FLD_BEG=atime.strftime('%Y%m%d %H%M%S'),
FLD_DT=int(self.outstep))
with open(ounf_inp,'rt') as nf:
with open('ww3_ounf.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=ounf_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_ounp_spec_inp(self,logger):
# Prepare ww3_ounp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
ounp_spec_inp=self.confstr('ounp_spec_inp','')
if not ounp_spec_inp: ounp_spec_inp=self.icstr('{PARMww3}/ww3_ounp_spec.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep))
with open(ounp_spec_inp,'rt') as nf:
with open('ww3_ounp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=ounp_spec_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_outp_info_inp(self,logger):
# Prepare ww3_outp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
outp_info_inp=self.confstr('outp_info_inp','')
if not outp_info_inp: outp_info_inp=self.icstr('{PARMww3}/ww3_outp_info.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep))
with open(outp_info_inp,'rt') as nf:
with open('ww3_outp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=outp_info_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_outp_bull_inp(self,ipnt,logger):
# Prepare ww3_outp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
outp_bull_inp=self.confstr('outp_bull_inp','')
if not outp_bull_inp: outp_bull_inp=self.icstr('{PARMww3}/ww3_outp_bull.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep),
PNT_NUM=int(ipnt),
RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(outp_bull_inp,'rt') as nf:
with open('ww3_outp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=outp_bull_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_outp_spec_inp(self,ipnt,logger):
# Prepare ww3_outp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
outp_spec_inp=self.confstr('outp_spec_inp','')
if not outp_spec_inp: outp_spec_inp=self.icstr('{PARMww3}/ww3_outp_spec.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep),
PNT_NUM=int(ipnt),
RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(outp_spec_inp,'rt') as nf:
with open('ww3_outp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=outp_spec_inp,
raise_all=True,atime=self.conf.cycle,**invars))
|
[
"Bin.Liu@noaa.gov"
] |
Bin.Liu@noaa.gov
|
fa85f6349539cbce239586af29361b2e453947ec
|
3d8a2e25588dc4205aa5ebf02021ae055917cccd
|
/selexsubmitter.py
|
fb34291f2db38bbf19bd8d4333f1a385f7be8cbb
|
[
"MIT"
] |
permissive
|
jctoledo/abselexsubmit-freebase-writer
|
af66613f745577af9781ed1962a8c81d74b9c95c
|
1601f8b57386f69fffc2a8a4a4cf43ac256cfdb3
|
refs/heads/master
| 2021-01-23T22:11:05.694811
| 2013-10-09T15:52:44
| 2013-10-09T15:52:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 43,987
|
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Jose Cruz-Toledo
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Copyright (C) 2013 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command-line skeleton application for Freebase API.
Usage:
$ python selexsubmitter.py
You can also get help on all the command-line flags the program understands
by running:
$ python selexsubmitter.py --help
"""
import argparse
import pygame
import httplib2
import os
import sys
import json
import urllib
import os.path
import re
import RNA
import time
from urllib import urlencode
from apiclient import discovery
from oauth2client import file
from oauth2client import client
from oauth2client import tools
#a dict of target names to mids
target_dict = {}
#a reference dictionary doi, pmid, ref
reference_dict = {}
# Parser for command-line arguments.
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[tools.argparser])
parser.add_argument('-su', '--servlet_url', help='the URL of the servlet that parses the JSON files', required=True)
parser.add_argument('-if', '--input_file', help='an input JSON file', required=True)
parser.add_argument('-FW', '--write_to_freebase', help='write the topic to freebase.com not the SANDBOX', action='store_true')
# CLIENT_SECRETS is name of a file containing the OAuth 2.0 information for this
# application, including client_id and client_secret. You can see the Client ID
# and Client secret on the APIs page in the Cloud Console:
# <https://cloud.google.com/console#/project/1083819269171/apiui>
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
# Set up a Flow object to be used for authentication.
# Add one or more of the following scopes. PLEASE ONLY ADD THE SCOPES YOU
# NEED. For more information on using scopes please see
# <https://developers.google.com/+/best-practices>.
FLOW = client.flow_from_clientsecrets(CLIENT_SECRETS,
scope=[
'https://www.googleapis.com/auth/freebase',
],
message=tools.message_if_missing(CLIENT_SECRETS))
def main(argv):
# Parse the command-line flags.
flags = parser.parse_args(argv[1:])
#define some vars
service_url_write = 'https://www.googleapis.com/freebase/v1sandbox/mqlwrite'
inputFile = flags.input_file
servlet_url = flags.servlet_url
fw = flags.write_to_freebase
if fw :
service_url_write = 'https://www.googleapis.com/freebase/v1/mqlwrite'
# If the credentials don't exist or are invalid run through the native client
# flow. The Storage object will ensure that if successful the good
# credentials will get written back to the file.
storage = file.Storage('sample.dat')
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = tools.run_flow(FLOW, storage, flags)
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Construct the service object for the interacting with the Freebase API.
service = discovery.build('freebase', 'v1', http=http)
if inputFile:
ifp = os.path.dirname(inputFile)
ifn = os.path.basename(inputFile)
start = time.time()
cleanJson = getCleanJson(servlet_url, ifp ,ifn)
if cleanJson:
#now prepare a write query for the cleanJSON
se_mid = writeToFreebase(cleanJson, service_url_write, http, credentials)
print "created selex experiment topic with mid: "+se_mid["mid"]
end = time.time()
tt = end-start
print "time elapsed in seconds: "+str(tt)
def writeToFreebase(cleanJson, writeServiceUrl, anHttp, someCredentials):
#create an empty selex experiment topic and get its mid
mid_dict = createSelexExperimentTopic(writeServiceUrl, anHttp, someCredentials)
#add the reference details from this experiment
addReferenceDetails(mid_dict, cleanJson, writeServiceUrl, anHttp, someCredentials)
addSelexDetails(mid_dict, cleanJson, writeServiceUrl, anHttp, someCredentials)
addSelexConditions(mid_dict, cleanJson, writeServiceUrl, anHttp,someCredentials)
addInteractions(mid_dict["mid"], cleanJson, writeServiceUrl, anHttp, someCredentials)
return mid_dict
#Creates a empty interaction topic including and returns the mid
# and connects it to the given selex experiment mid
def createInteractionTopic(aSelexExperimentMid, aServiceUrl, anHttp, someCredentials):
q = {
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/interaction",
"b:type":"/base/aptamer/experimental_outcome",
"/base/aptamer/experimental_outcome/is_outcome_of":{
"connect":"insert",
"mid" : aSelexExperimentMid
}
}
params = makeRequestBody(someCredentials, q)
r = runWriteQuery(params, aServiceUrl, anHttp)
if r == None:
raise Exception ("Could not create interaction topic!")
sys.exit()
return r
#creates an affinity conditions topic and its corresponding binding solution
# and connects it to the given affinity experiment mid and returns a dictionary of the mids
def createAffinityConditions(anAffinityExperimentMid, aServiceUrl, anHttp, someCredentials):
rm = {}
q = {
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/affinity_conditions",
"b:type":"/base/aptamer/experimental_conditions",
"/base/aptamer/experimental_conditions/are_experimental_conditions_of":{
"connect":"insert",
"mid":anAffinityExperimentMid
}
}
params = makeRequestBody(someCredentials, q)
aff_cond_mid = runWriteQuery(params, aServiceUrl, anHttp)
if aff_cond_mid == None:
raise Exception("could not create affinity conditions!")
sys.exit()
else:
rm["mid"] = aff_cond_mid
#create a binding solution and attach it
q={
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/binding_solution",
"/base/aptamer/binding_solution/is_binding_solution_of":{
"connect":"insert",
"mid": aff_cond_mid
}
}
params = makeRequestBody(someCredentials, q)
bs_mid = runWriteQuery(params, aServiceUrl, anHttp)
rm["binding_solution"] = bs_mid
if bs_mid == None:
raise Exception("Could not create bidning solution topic!")
sys.exit()
else:
return rm
#creates an aptamer topic and connects it to the passed in interaction mid.
#Uses the given aptamer type, mutational analysis and sequence
def createAptamerTopic(anInteractionMid, aType, aSequence, aServiceUrl, anHttp, someCredentials):
seq_len = len(aSequence)
at = ""
if aType.lower() == "dna":
at = "/base/aptamer/dna"
if aType.lower() == "rna":
at = "/base/aptamer/rna"
if len(at):
q = {
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/interactor",
"b:type":"/base/aptamer/aptamer",
"c:type":"/base/aptamer/linear_polymer",
"d:type":"/chemistry/chemical_compound",
"e:type":at,
"f:type":"/base/aptamer/nucleic_acid",
"/base/aptamer/interactor/is_participant_in":{
"connect":"insert",
"mid":anInteractionMid
},
"/base/aptamer/linear_polymer/sequence":{
"connect":"insert",
"value": aSequence
},
"/base/aptamer/linear_polymer/sequence_length":{
"connect":"insert",
"value":int(seq_len)
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not create aptamer topic")
sys.exit()
else:
return r
else:
raise Exception("Not a valid aptamer type was passed in")
sys.exit()
#creates an aptamer target topic and connects it to a passed in interaction mid. Uses the given name aswell
def createAptamerTargetTopic(anInteractionMid, aTargetName,aTargetTypeMid,aServiceUrl,anHttp,someCredentials):
#first check if there exists a chemical compound with the given name
cc_mid = checkFreebaseForChemicalCompound(aTargetName, anHttp)
if cc_mid == None:
q = {
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/interactor",
"b:type":"/base/aptamer/aptamer_target",
"c:type" :"/chemistry/chemical_compound",
"/base/aptamer/interactor/is_participant_in":{
"connect":"insert",
"mid":anInteractionMid
},
"name":{
"connect":"insert",
"value" : str(aTargetName),
"lang":"/lang/en"
},
"/base/aptamer/aptamer_target/has_type":{
"connect":"insert",
"mid":aTargetTypeMid
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not create aptamer target topic")
sys.exit()
else:
return r
else:
q = {
"mid" : cc_mid,
"type":{
"connect":"insert",
"id" : "/base/aptamer/interactor"
},
"b:type":{
"connect":"insert",
"id":"/base/aptamer/aptamer_target"
},
"/base/aptamer/aptamer_target/has_type":{
"connect":"insert",
"mid":aTargetTypeMid
},
"/base/aptamer/interactor/is_participant_in":{
"connect":"insert",
"mid":anInteractionMid
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not create aptamer target topic 2")
sys.exit()
else:
return r
#This function returns the first mid of the chemical compound (/chemistry/chemical_compound) topic
#that has as name aName. If none is found None is returned
def checkFreebaseForChemicalCompound(aName, anHttp):
if aName:
q = [{
"mid":None,
"name": str(aName),
"type|=":[
"/chemistry/chemical_compound",
"/medicine/drug_brand",
"/biology/protein",
"/medicine/drug",
"/medicine/medical_treatment",
"/medicine/drug_ingredient"
]
}]
r = runReadQuery(q, anHttp)
if r == None :
raise Exception("Could not search for a chemical compound!")
sys.exit()
else:
if len(r) == 0:
return None
else:
return r[0]["mid"]
return None
#Creates an empty affinityExperiment topic and returs its mid
# attaches the created topic to the given interaction topic mid
def createAffinityExperimentTopic(anInteractionMid, aServiceUrl, anHttp, someCredentials):
q={
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/affinity_experiment",
"b:type":"/base/aptamer/experiment",
"/base/aptamer/affinity_experiment/confirms":{
"connect":"insert",
"mid":anInteractionMid
}
}
params = makeRequestBody(someCredentials, q)
afe_mid = runWriteQuery(params, aServiceUrl, anHttp)
if afe_mid == None:
raise Exception("Could not create affinity experiment!")
sys.exit()
else:
return afe_mid
#Create an empty floating point range topic
def createFloatingPointRangeTopic(aKdMid, aServiceUrl, anHttp, someCredentials):
q = {
"create":"unconditional",
"mid":None,
"type":"/measurement_unit/floating_point_range"
}
p = makeRequestBody(someCredentials, q)
fpr_mid = runWriteQuery(p, aServiceUrl, anHttp)
if fpr_mid == None:
raise Exception("Could not create floating point range!")
sys.exit()
else:
return fpr_mid
#creates a predicted secondary structure topic
# adds the given dbn and mfe
# assumes program used was RNAfold
def createPredictedSecondaryStructureTopic(apt_mid, dbn, mfe, aServiceUrl, anHttp, someCredentials):
q = {
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/predicted_secondary_structure",
"/base/aptamer/predicted_secondary_structure/software_used":{
"connect":"insert",
"mid":"/m/0gkkmsx"
},
"/base/aptamer/predicted_secondary_structure/dot_bracket_notation":{
"connect":"insert",
"value":str(dbn),
"lang":"/lang/en"
},
"/base/aptamer/predicted_secondary_structure/minimum_free_energy":{
"connect":"insert",
"value":float(mfe)
},
"/base/aptamer/predicted_secondary_structure/is_predicted_secondary_structure_of":{
"connect":"insert",
"mid":apt_mid
}
}
p = makeRequestBody(someCredentials, q)
pss_mid = runWriteQuery(p, aServiceUrl, anHttp)
if pss_mid == None:
raise Exception("Could not create predicted secondary structure topic!")
sys.exit()
else:
return pss_mid
#creates an empty dissociation constant topic and returns it
# atttaches it to the given affinity experiment mid
def createDissociationConstantTopic(aff_exp_mid, aServiceUrl, anHttp, someCredentials):
q = {
"create":"unconditional",
"mid":None,
"type":"/base/aptamer/dissociation_constant",
"b:type":"/base/aptamer/experimental_outcome",
"/base/aptamer/experimental_outcome/is_outcome_of":{
"connect":"insert",
"mid":aff_exp_mid
}
}
params = makeRequestBody(someCredentials, q)
kd_mid = runWriteQuery(params, aServiceUrl, anHttp)
if kd_mid == None:
raise Exception("Cannot create kd topic!")
sys.exit()
else:
return kd_mid
#Creates an empty selex experiment topic
#creates the corresponding topics:
# partitioning method, recovery methods and selex conditions
#returns a dictionary with mids for all its parts
def createSelexExperimentTopic(aServiceUrl, anHttp, someCredentials):
rm = {}
#1: create a selex experiment topic
q = {
"create" :"unconditional",
"mid" : None,
"type" : "/base/aptamer/selex_experiment",
"b:type" : "/base/aptamer/experiment",
"c:type" : "/base/aptamer/interaction_experiment",
}
params = makeRequestBody(someCredentials, q)
se_mid = runWriteQuery(params, aServiceUrl, anHttp)
if se_mid:
rm["mid"]= se_mid
#now create the partitioning and recovery methods and attach them
#to the selex experiment topic created earlier
#create a partitioning method topic
q = {
"create" :"unconditional",
"mid":None,
"type":"/base/aptamer/partitioning_method",
"/base/aptamer/partitioning_method/is_partitioning_method_of":{"connect":"insert", "mid":se_mid}
}
params = makeRequestBody(someCredentials, q)
pm_mid = runWriteQuery(params, aServiceUrl, anHttp)
rm["partitioning_method"] = pm_mid
#create a recovery method topic
q = {
"create":"unconditional", "mid":None,
"type":"/base/aptamer/recovery_method_se",
"/base/aptamer/recovery_method_se/is_recovery_method_of":{"connect":"insert", "mid":se_mid}
}
params = makeRequestBody(someCredentials, q)
rm_mid = runWriteQuery(params, aServiceUrl, anHttp)
rm["recovery_method"] = rm_mid
#create an empty selex condition topic
q = {
"create":"unconditional", "mid":None,
"type":"/base/aptamer/selex_conditions",
"b:type": "/base/aptamer/experimental_conditions",
"/base/aptamer/experimental_conditions/are_experimental_conditions_of":{"connect":"insert", "mid":se_mid}
}
params = makeRequestBody(someCredentials, q)
sc_mid = runWriteQuery(params, aServiceUrl, anHttp)
rm["selex_conditions"] = sc_mid
if sc_mid:
#create a selection solution and attach it to the selex conditions topic
q = {
"create":"unconditional", "mid":None,
"type":"/base/aptamer/selection_solution",
"/base/aptamer/selection_solution/is_selection_solution_of_sc":{"connect":"insert", "mid":sc_mid}
}
params = makeRequestBody(someCredentials, q)
ss_mid = runWriteQuery(params, aServiceUrl, anHttp)
rm["selection_solution"] = ss_mid
if not ss_mid:
raise Exception ("Could not create selection solution!")
sys.exit()
else:
raise Exception("Could not create selex conditions!")
sys.exit()
return rm
else:
raise Exception("Could not create Selex experiment topic!")
sys.exit()
return None;
def makeRequestBody(someCredentials, aQuery):
p ={
'oauth_token': someCredentials.access_token,
'query': json.dumps(aQuery)
}
return p
def runReadQuery(aQuery, anHttp):
s = 'https://www.googleapis.com/freebase/v1/mqlread'
q = json.dumps(aQuery)
url = s+'?query='+urllib.quote_plus(q)
resp, content = anHttp.request(url)
if resp["status"] == '200':
r = json.loads(content)
return r["result"]
else:
return None
def runWriteQuery(someParams, aServiceUrl, anHttp, firstOnly=False):
url = aServiceUrl+'?'+urllib.urlencode(someParams)
resp, content = anHttp.request(url)
if resp["status"] == '200':
#everything worked
r = json.loads(content)
if not firstOnly:
return r["result"]["mid"]
else:
if len(r["result"]) == 3:
return r["result"]["mid"]
else:
return r["result"][0]["mid"]
else:
print someParams
print resp
print content
raise Exception("Could not run query!! erno:234442")
return None
def addInteractions(aSelexExperimentMid,cleanJson, aServiceUrl, anHttp, someCredentials):
#iterate over the interactions
for ai in cleanJson["interactions"]:
#create an empty interaction topic
int_mid = createInteractionTopic(aSelexExperimentMid, aServiceUrl, anHttp, someCredentials)
#now iterate over the affinity experiments in clean json
for ae in ai["affinity_experiments"]:
#create an empty affinity experiment topic
aff_mid = createAffinityExperimentTopic(int_mid, aServiceUrl, anHttp, someCredentials)
#add the reference details to the affinityExperimentTopic
addAffinityExperimentReferenceDetails(aff_mid, aServiceUrl, anHttp, someCredentials)
#create an empty kd topic
kd_mid = createDissociationConstantTopic(aff_mid, aServiceUrl, anHttp, someCredentials)
#add the value of the dissociation constant
#(add the value to temporary value as well)
try:
kd = ae["kd"]
q={
"mid":kd_mid,
"/base/aptamer/dissociation_constant/has_value":{
"connect":"insert",
"value":float(kd),
},
"/base/aptamer/dissociation_constant/has_temporary_string_value":{
"connect":"insert",
"value" :str(kd),
"lang":"/lang/en"
},
"/base/aptamer/dissociation_constant/is_dissociation_constant_of":{
"connect":"insert",
"mid":int_mid
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not create kd topic!")
sys.exit()
except KeyError:
pass
#add the range of the dissociation constant
try:
kd_range_dirty = ae["kd_range"]
#now split by " to "
kd_range = kd_range_dirty.split(" to ")
if len(kd_range) == 2:
low = kd_range[0]
high = kd_range[1]
#create a floating_point_range topic
pfr_mid = createFloatingPointRangeTopic(kd_mid, aServiceUrl, anHttp, someCredentials)
#add the values
q = {
"mid":pfr_mid,
"/measurement_unit/floating_point_range/low_value":{
"connect":"insert",
"value":float(low)
},
"/measurement_unit/floating_point_range/high_value":{
"connect":"insert",
"value":float(high)
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not create range topic")
sys.exit()
else:
#connect the floating point range back to the kd topic
q = {
"mid":kd_mid,
"/base/aptamer/dissociation_constant/has_value_range":{
"connect":"insert",
"mid":pfr_mid
},
"/base/aptamer/dissociation_constant/has_temporary_string_value_range":{
"connect":"insert",
"value":str(kd_range),
"lang":"/lang/en"
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not connect kd to range value")
sys.exit()
except KeyError:
pass
#add the error of the dissociation constant
try:
error = ae["kd_error"]
q = {
"mid":kd_mid,
"/base/aptamer/dissociation_constant/has_error":{
"connect":"insert",
"value":float(error)
},
"/base/aptamer/dissociation_constant/temporary_error_string":{
"connect":"insert",
"value": error,
"lang": "/lang/en"
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add kd error")
sys.exit()
except KeyError:
pass
#create an affinity conditions topic
aff_cond_dict = createAffinityConditions(aff_mid, aServiceUrl, anHttp, someCredentials)
#add the affinity experiment details
#add the affinity method to the affinity experiment topic
try:
for afn in ae["affinity_methods_names"]:
#affinity method
q = {
"mid": aff_mid,
"/base/aptamer/affinity_experiment/affinity_method":{
"connect":"insert",
"name": str(afn),
"type":"/base/aptamer/affinity_method"
}
}
params = makeRequestBody(someCredentials, q)
r = runWriteQuery(params, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add affinity method "+ afn)
sys.exit()
except KeyError:
pass
#now add the affinity conditions for this experiment
#first add the buffering agent for the binding solution of the affinity conditions
try:
for aba in ae["buffering_agent_names"]:
q={
"mid": aff_cond_dict["binding_solution"],
"/base/aptamer/binding_solution/has_buffering_agent":{
"connect":"insert",
"name":str(aba),
"type":"/base/aptamer/buffering_agent"
}
}
params = makeRequestBody(someCredentials, q)
r = runWriteQuery(params, aServiceUrl, anHttp)
if r == None:
raise Exception ("Could not add buffering agent to binding solution")
sys.exit()
except KeyError:
q = {
"mid": aff_cond_dict["binding_solution"],
"/base/aptamer/binding_solution/has_buffering_agent":{
"connect":"insert",
"mid":"/m/0g5m7lm"
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add buffering agent to binding solution! errono 99")
sys.exit()
#now add the metal cation concentrations to the binding solution
try:
for amcc in ae["ae_metal_cation_concs"]:
q={
"mid":aff_cond_dict["binding_solution"],
"/base/aptamer/binding_solution/ionic_strength":{
"connect":"insert",
"value":str(amcc),
"lang": "/lang/en"
}
}
params = makeRequestBody(someCredentials, q)
r = runWriteQuery(params, aServiceUrl, anHttp)
if r == None:
raise Exception ("Could not add ionic strength to binding solution!")
sys.exit()
except KeyError:
pass
#now add the ph to the binding solution
try:
ph = ae["ph"]
q={
"mid":aff_cond_dict["binding_solution"],
"/base/aptamer/binding_solution/ph":{
"connect":"insert",
"value":float(ph)
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception ("Could not add ph")
sys.exit()
except KeyError:
pass
#now add the temperature
try:
temp = ae["temperature"]
q = {
"mid":aff_cond_dict["binding_solution"],
"/base/aptamer/binding_solution/temperature":{
"connect":"insert",
"value":float(temp)
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception ("Could not add temperature")
sys.exit()
except KeyError:
pass
#now find the aptamer target name from the input
aptamer_target_name = ai["aptamer_target"]["name"]
#ask the user to identify the type of the target
target_type_mid = promptUserForTargetType(aptamer_target_name)
#create an aptamer target topic and add the passed in name
att_mid = createAptamerTargetTopic(int_mid, aptamer_target_name, target_type_mid, aServiceUrl, anHttp, someCredentials)
#now add the aptamers to the interaction
try:
for anApt in ai["aptamers"]:
apt_mid = createAptamerTopic(int_mid, anApt["polymer_type"], anApt["sequence"], aServiceUrl, anHttp, someCredentials)
#now predict the secondary structure
fold = RNA.fold(str(anApt["sequence"]))
dbn = fold[0]
mfe = fold[1]
#create a predicted secondary structure topic
pred_ss_mid = createPredictedSecondaryStructureTopic(apt_mid, dbn, mfe, aServiceUrl, anHttp, someCredentials)
#now add the mutational analysis to the aptamer topic
try:
ma = True
if anApt["mutational_analysis"].lower() == "no":
ma = False
q={
"mid":apt_mid,
"/base/aptamer/aptamer/has_mutational_analysis":{
"connect":"insert",
"value": ma
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add mutational_analysis ")
sys.exit()
except KeyError:
pass
#now add the secondary structures
try:
for ssn in anApt["secondary_structures_names"]:
q={
"mid":apt_mid,
"/base/aptamer/nucleic_acid/secondary_structure":{
"connect":"insert",
"name": str(ssn),
"type":"/base/aptamer/nucleic_acid_secondary_structure"
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add secondary strucutres")
sys.exit()
except KeyError:
pass
#now add the application
try:
q = {
"mid":apt_mid,
"/base/aptamer/aptamer/application":{
"connect":"insert",
"value":str(anApt["application"]),
"lang":"/lang/en"
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add application")
sys.exit()
except KeyError:
pass
#now add the sequence pattern
try:
q = {
"mid":apt_mid,
"/base/aptamer/linear_polymer/sequence_pattern":{
"connect":"insert",
"value":str(anApt["sequence_pattern"]),
"lang":"/lang/en"
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add sequence pattern")
sys.exit()
except KeyError:
pass
#now add the collective or pairwise interaction type
int_type = "/base/aptamer/pairwise_interaction"
if len(ai["aptamers"]) > 1:
int_type = "/base/aptamer/collective_interaction"
q={
"mid":int_mid,
"type":{
"connect":"insert",
"id":int_type
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add intercaction type")
sys.exit()
except KeyError:
pass
# add the follwing details:
# number of rounds
# template sequence
# template bias
# has template bias
# selection solution
def addSelexConditions(anMidDict, cleanJson, aServiceUrl, anHttp, someCredentials):
#add the number of rounds
try:
nor = cleanJson["se"]["selex_conditions"]["numOfSelectionRounds"]
q = {
"mid": anMidDict["selex_conditions"],
"/base/aptamer/selex_conditions/number_of_selection_rounds": {
"connect":"insert",
"value":int(nor)
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! 9984")
sys.exit()
except KeyError:
pass
#add the template sequence
try:
ts = cleanJson["se"]["selex_conditions"]["template_sequence"]
var_region_summation = computeVariableRegionSummation(ts)
if var_region_summation > 1:
q ={
"mid" : anMidDict["selex_conditions"],
"/base/aptamer/selex_conditions/has_template_sequence":{
"connect":"insert",
"value":str(ts),
},
"/base/aptamer/selex_conditions/template_variable_region_summation":{
"connect": "insert",
"value": int(var_region_summation)
}
}
else:
q = {
"mid" : anMidDict["selex_conditions"],
"/base/aptamer/selex_conditions/has_template_sequence":{
"connect":"insert",
"value":str(ts)
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! 99843234")
sys.exit()
except KeyError:
pass
#add the template bias
try:
tb = cleanJson["se"]["selex_conditions"]["template_bias"]
tb_bool = False
if tb.lower == "yes":
tb_bool = True
q = {
"mid" : anMidDict["selex_conditions"],
"/base/aptamer/selex_conditions/has_template_bias":{
"connect":"insert",
"value": tb_bool
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! 4830943")
sys.exit()
except KeyError:
pass
#add the selection solution's ph
try:
ph = cleanJson["se"]["selex_conditions"]["ph"]
q = {
"mid":anMidDict["selection_solution"],
"/base/aptamer/selection_solution/ph":{
"connect":"insert",
"value": float(ph)
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! 4830943")
sys.exit()
except KeyError:
pass
#add the selection solution's temperature
try:
temp = cleanJson["se"]["selex_conditions"]["temperature"]
q = {
"mid":anMidDict["selection_solution"],
"/base/aptamer/selection_solution/temperature":{
"connect":"insert",
"value":float(temp)
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! 43543543")
sys.exit()
except KeyError:
pass
#add the selection solution's buffering agents
try:
ba = cleanJson["se"]["selex_conditions"]["buffering_agents"]
for aba in ba:
q = {
"mid":anMidDict["selection_solution"],
"/base/aptamer/selection_solution/has_buffering_agent":{
"connect":"insert",
"name": aba,
"type":"/base/aptamer/buffering_agent"
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! 98327492387423")
sys.exit()
except KeyError:
q = {
"mid": anMidDict["selection_solution"],
"/base/aptamer/binding_solution/has_buffering_agent":{
"connect":"insert",
"mid":"/m/0g5m7lm"
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add buffering agent to binding solution! errono 99")
sys.exit()
#add the selection solution's metal cation conc string
try:
mcc = cleanJson["se"]["selex_conditions"]["metal_cation_concentration"]
for amcc in mcc:
q = {
"mid":anMidDict["selection_solution"],
"/base/aptamer/selection_solution/ionic_strength":{
"connect":"insert",
"value":str(amcc),
"lang": "/lang/en"
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! 98327492387423")
sys.exit()
except KeyError:
pass
#add the following details:
# partitioning method
# recovery method
# selex method
def addSelexDetails(anMidDict, cleanJson, aServiceUrl, anHttp, someCredentials):
#add the selex method
try:
sm = cleanJson["se"]["selex_methods"]
for asm in sm:
q = {
"mid": anMidDict["mid"],
"/base/aptamer/selex_experiment/has_selex_method":{
"connect":"insert",
"name": str(asm),
"type":"/base/aptamer/selex_method"
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception("Could not run query! 500-3")
sys.exit()
except KeyError:
pass
#now add the partitioning method
try:
pm_names = cleanJson["se"]["partitioning_methods"]
for an in pm_names:
q = {
"mid": anMidDict["partitioning_method"],
"/base/aptamer/partitioning_method/has_separation_method":{
"connect":"insert",
"name": str(an),
"type":"/base/aptamer/separation_methods"
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception("Could not run query! 113")
sys.exit()
except KeyError:
q = {
"mid": anMidDict["partitioning_method"],
"/base/aptamer/partitioning_method/has_separation_method":{
"connect":"insert",
"mid": "/m/0g5m7lm"
}
}
p = makeRequestBody(someCredentials, q)
if runWriteQuery(p, aServiceUrl, anHttp) == None:
raise Exception ("Could not add default partitioning_method")
sys.exit()
pass
#now add the recovery methods
try:
rm_names = cleanJson["se"]["recovery_methods"]
for an in rm_names:
q ={
"mid": anMidDict["recovery_method"],
"/base/aptamer/recovery_method_se/has_recovery_method":{
"connect":"insert",
"name":an,
"type":"/base/aptamer/recovery_methods"
}
}
p = makeRequestBody(someCredentials, q)
if runWriteQuery(p, aServiceUrl, anHttp) == None:
raise Exception("Could not run query! 324")
sys.exit()
except KeyError:
q ={
"mid":anMidDict["recovery_method"],
"/base/aptamer/recovery_method_se/has_recovery_method":{
"connect":"insert",
"mid":"/m/0g5m7lm"
}
}
p = makeRequestBody(someCredentials, q)
if runWriteQuery(p, aServiceUrl, anHttp) == None:
raise Exception("Could not add default recovery method!")
sys.exit()
#deal - # -
def computeVariableRegionSummation(aTemplateSequence):
#compute the variable region summation
pat1 = '^NO\-TEMPLATE$'
pat2 = '^[ACGTRUYKMSWBDHVNX-]+\s*-\s*(\d+)\s*-\s*[ACGTRUYKMSWBDHVNX-]+$'
pat3 = '^[ACGTRUYKMSWBDHVNX-]+\s*-\s*(\d+)\s*-\s*[ACGTRUYKMSWBDHVNX-]+\s*-\s*(\d+)\s*-\s*[ACGTRUYKMSWBDHVNX-]+\s*$'
m1 = re.match(pat1, aTemplateSequence)
m2 = re.match(pat2, aTemplateSequence)
m3 = re.match(pat3, aTemplateSequence)
if m1:
return -1
elif m2:
return float(m2.group(1))
elif m3:
r = float(m3.group(1)) + float(m3.group(2))
return r
else:
return -1
#add the referente details to the anAffinityExperimentMid topic
# uses the same details as the selex expeirment
def addAffinityExperimentReferenceDetails(anAffinityExperimentMid, aServiceUrl, anHttp, someCredentials):
#pmid
try:
pmid = reference_dict["pmid"]
q = {
"mid":anAffinityExperimentMid,
"/base/aptamer/experiment/pubmed_id":{
"connect":"insert",
"value":str(pmid)
}
}
p = makeRequestBody(someCredentials, q)
r = runWriteQuery(p, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not add pmid@")
sys.exit()
except KeyError:
pass
#doi
try:
doi = reference_dict["doi"]
q = {
"mid":anAffinityExperimentMid,
"/base/aptamer/experiment/digital_object_identifier":{
"connect":"insert",
"value":str(doi),
"lang":"/lang/en"
}
}
params = makeRequestBody(someCredentials, q)
r = runWriteQuery(params, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not run query! oi23442h")
sys.exit()
except KeyError:
pass
#reference
try:
reference = reference_dict["reference"]
q = {
"mid":anAffinityExperimentMid,
"/base/aptamer/experiment/has_bibliographic_reference":{
"connect":"insert",
"value":str(reference)
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception("Could not run query! #dslk33fj")
sys.exit()
except KeyError:
pass
#add the reference details to the anMid's selex experiment topic
# details to be added here are:
# pmid, doi or reference string
def addReferenceDetails(anMidDict, cleanJson, aServiceUrl, anHttp, someCredentials):
#first try the pmid
try:
pmid = cleanJson["se"]["pmid"]
reference_dict["pmid"] = pmid
q = {
"mid":anMidDict["mid"],
"/base/aptamer/experiment/pubmed_id":{
"connect":"insert",
"value":str(pmid)
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception ("Could not run query! #2433211.3")
sys.exit()
except KeyError:
pass
#now try the doi
try:
doi = cleanJson["se"]["doi"]
reference_dict["doi"] = doi
q = {
"mid":anMidDict["mid"],
"/base/aptamer/experiment/digital_object_identifier":{
"connect":"insert",
"value":str(doi),
"lang":"/lang/en"
}
}
params = makeRequestBody(someCredentials, q)
r = runWriteQuery(params, aServiceUrl, anHttp)
if r == None:
raise Exception("Could not run query! oi42h")
sys.exit()
except KeyError:
pass
#now try the reference
try:
reference = cleanJson["se"]["reference"]
reference_dict["reference"] = reference
q = {
"mid":anMidDict["mid"],
"/base/aptamer/experiment/has_bibliographic_reference":{
"connect":"insert",
"value":str(reference)
}
}
params = makeRequestBody(someCredentials, q)
if runWriteQuery(params, aServiceUrl, anHttp) == None:
raise Exception("Could not run query! #dslkfj")
sys.exit()
except KeyError:
pass
#this method prompts the user to select the correct target type
# for the passed in target name. The options are: 1. cell, 2. small molecule 3. protein
# ask the user until they answer correctly
# returns the Mid of the type the user picked
def promptUserForTargetType(aTargetName):
opts = "Please choose one of the following options that best describes the aptamer target : "+aTargetName+"\n"
opts += "1 : cell\n2 : protein\n3 : small molecule\n"
anMid = None
if not aTargetName in target_dict:
playsound()
x = 0
while not x:
try:
choice = int(raw_input(opts))
if choice == 1:
x =1
target_dict[aTargetName] = "/m/01cbd"
return "/m/01cbd"
elif choice == 2:
x =1
target_dict[aTargetName] = "/m/05wvs"
return "/m/05wvs"
elif choice == 3:
x= 1
target_dict[aTargetName] = "/m/043tvww"
return "/m/043tvww"
else:
print "invalid option... try again"
except ValueError, e:
print ("'%s' is not a valid integer." % e.args[0].split(": ")[1])
else:
return target_dict[aTargetName]
#This function calls the java servlet that parses the output of selexsubmit form
def getCleanJson(aServletUrl, aDirPath,aFileName):
json_raw = open(aDirPath+'/'+aFileName, 'r')
for aline in json_raw:
fchar = aline[0]
if fchar == '{':
data = json.loads(aline)
if data:
print 'processing ' + aFileName + '...'
#prepare the query
params = {
"se" : aline,
"fn" : aDirPath+'/'+aFileName
}
#now call the servlet
f = urllib.urlopen(aServletUrl, urlencode(params))
output = f.read().replace("\\\"", "")
if output:
try:
json_raw.close()
rm = json.loads(output)
return rm
except ValueError:
print "Could not get data from servlet for this file: "+aFileName
return None
else:
print "skipping file :"+aFileName
json_raw.close()
#raise Exception("Servlet found here: "+aServletUrl+" did not respond!")
return None
else:
continue
def playsound():
fn = os.getcwd()+'/b.mp3'
pygame.init()
pygame.mixer.init()
pygame.mixer.music.load(fn)
pygame.mixer.music.play()
while pygame.mixer.music.get_busy():
pygame.time.Clock().tick(10)
if __name__ == '__main__':
main(sys.argv)
|
[
"josemiguelcruztoledo@gmail.com"
] |
josemiguelcruztoledo@gmail.com
|
00a04631d405777d81b1263ac835474cf042b535
|
36118a688c058d04cc15b63c0e2e7e03a1ebf8a0
|
/custom_components/luxtronik/const.py
|
4e8579de4cb0d0539363222b592d4b455acc4a22
|
[
"MIT"
] |
permissive
|
AlmAck/luxtronik
|
79535e08d2e1a47d11faea2e87ea3950a6592565
|
c6d590390911cbee13fee30bda4e3b9f777f1310
|
refs/heads/master
| 2023-01-13T18:54:00.115713
| 2020-11-23T06:17:13
| 2020-11-23T06:17:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,823
|
py
|
"""Constants for the Luxtronik integration."""
from homeassistant.const import (
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TIMESTAMP,
ENERGY_KILO_WATT_HOUR,
PRESSURE_BAR,
TEMP_CELSIUS,
)
ATTR_PARAMETER = "parameter"
ATTR_VALUE = "value"
CONF_INVERT_STATE = "invert"
CONF_SAFE = "safe"
CONF_GROUP = "group"
CONF_PARAMETERS = "parameters"
CONF_CALCULATIONS = "calculations"
CONF_VISIBILITIES = "visibilities"
CONF_CELSIUS = "celsius"
CONF_SECONDS = "seconds"
CONF_TIMESTAMP = "timestamp"
CONF_KELVIN = "kelvin"
CONF_BAR = "bar"
CONF_PERCENT = "percent"
CONF_ENERGY = "energy"
CONF_VOLTAGE = "voltage"
CONF_HOURS = "hours"
CONF_FLOW = "flow"
ICONS = {
"celsius": "mdi:thermometer",
"seconds": "mdi:timer-sand",
"pulses": "mdi:pulse",
"ipaddress": "mdi:ip-network-outline",
"timestamp": "mdi:calendar-range",
"errorcode": "mdi:alert-circle-outline",
"kelvin": "mdi:thermometer",
"bar": "mdi:arrow-collapse-all",
"percent": "mdi:percent",
"rpm": "mdi:rotate-right",
"energy": "mdi:flash-circle",
"voltage": "mdi:flash-outline",
"hours": "mdi:clock-outline",
"flow": "mdi:chart-bell-curve",
"level": "mdi:format-list-numbered",
"count": "mdi:counter",
"version": "mdi:information-outline",
}
DEVICE_CLASSES = {
CONF_CELSIUS: DEVICE_CLASS_TEMPERATURE,
CONF_KELVIN: DEVICE_CLASS_TEMPERATURE,
CONF_BAR: DEVICE_CLASS_PRESSURE,
CONF_SECONDS: DEVICE_CLASS_TIMESTAMP,
CONF_HOURS: DEVICE_CLASS_TIMESTAMP,
CONF_TIMESTAMP: DEVICE_CLASS_TIMESTAMP,
}
UNITS = {
CONF_CELSIUS: TEMP_CELSIUS,
CONF_SECONDS: "s",
CONF_KELVIN: "K",
CONF_BAR: PRESSURE_BAR,
CONF_PERCENT: "%",
CONF_ENERGY: ENERGY_KILO_WATT_HOUR,
CONF_VOLTAGE: "V",
CONF_HOURS: "h",
CONF_FLOW: "l/min",
}
|
[
"bouni@owee.de"
] |
bouni@owee.de
|
d003323768ea7f4519c90921a33b9eb198732852
|
69033ac834a34f10df535f102197d3af05e5ee69
|
/cmstack/codegen/tvmgen/tvm_translation.py
|
94d23726e2acc6b4c753f6d6f9920df4d7801b75
|
[
"Apache-2.0"
] |
permissive
|
he-actlab/cdstack
|
126c3699074bf6ef30f9f9246704069d27e9e614
|
38f605cfa299bf97b5875a19f9fd811a2671d56f
|
refs/heads/master
| 2023-04-10T10:42:10.199207
| 2019-10-03T02:12:49
| 2019-10-03T02:12:49
| 354,713,812
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,582
|
py
|
import tvm
import numpy as np
from hdfg import hdfgutils
from hdfg import load_store
from tvm.contrib import graph_runtime
from tvm.relay import op as _op
from hdfg.passes.flatten import flatten_graph, is_literal, is_number
from hdfg.passes.node_mapping import map_nodes
from codegen.codegen_utils import CMLANG_CAST_MAP
from tvm import relay
from tvm.relay.testing.init import Xavier
import codegen as c
from tvm.relay.testing import layers, init
from hdfg.hdfg_pb2 import Component, Program
from hdfg.visualize import *
import inspect
import json
import importlib
def benchmark_execution(mod,
params,
measure=True,
data_shape=(1, 3, 224, 224),
out_shape=(1, 1000),
dtype='float32'):
def get_tvm_output(mod, data, params, target, ctx, dtype='float32'):
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod, target, params=params)
m = graph_runtime.create(graph, lib, ctx)
# set inputs
m.set_input("data", data)
m.set_input(**params)
m.run()
out = m.get_output(0, tvm.nd.empty(out_shape, dtype))
if measure:
print("Evaluate graph runtime inference time cost...")
ftimer = m.module.time_evaluator("run", ctx, number=1, repeat=20)
# Measure in millisecond.
prof_res = np.array(ftimer().results) *1000
print("Mean inference time (std dev): %.2f ms (%.2f ms)" %
(np.mean(prof_res), np.std(prof_res)))
return out.asnumpy()
# random input
data = np.random.uniform(size=data_shape).astype(dtype)
target = "llvm"
ctx = tvm.cpu(0)
tvm_out = get_tvm_output(mod, tvm.nd.array(data.astype(dtype)), params,
target, ctx, dtype)
class TvmTranslation(object):
def __init__(self, onnx_proto, run_async=False):
self.input_proto = onnx_proto
self.output_dir, self.output_file = os.path.split(self.input_proto)
self.proto_name = self.output_file.split('.')[0]
self.program = load_store.load_program(self.input_proto)
self.graph = self.program.graph
self.templates = self.program.templates
self.components = {}
self.includes = []
self.functions = []
self.structs = []
self.signature_map = {}
self.initializer = None
self.header = []
self.exec = []
self.run_async = run_async
self.load_config()
self.tvm_passes()
self.create_tvm_graph()
self.execute_graph()
def load_config(self):
config_path = os.path.dirname(os.path.realpath(__file__)) + "/tvm_config.json"
with open(config_path, 'r') as config_file:
config_data = config_file.read()
self.translator_config = json.loads(config_data)
def execute_graph(self):
mod, params = init.create_workload(self.tvm_func)
benchmark_execution(mod, params, data_shape=(1, 3, 416, 416), out_shape=(1, 125, 14, 14))
# benchmark_execution(mod, params)
# print(f"Module: {mod}")
# #
# # for p in params.keys():
# # print(f"Key: {p}, shape: {params[p].shape}")
def tvm_passes(self):
self.tvm_graph = Component(name="tvm_graph_" + str(self.proto_name))
edge_node_ids = {'edges': {},
'nodes': {}}
self.tvm_graph.statement_graphs.extend([])
map_nodes(self.graph, self.templates, [], self.translator_config)
#
flatten_graph(self.tvm_graph, self.graph, self.templates, '', edge_node_ids, {})
flattened_graph_attr = hdfgutils.make_attribute('flattened', self.tvm_graph)
self.program.attributes['flattened_graph'].CopyFrom(flattened_graph_attr)
def create_tvm_graph(self):
self.graph_variables = {}
output_id = None
assert len(self.tvm_graph.input) == 1
input_name = self.tvm_graph.input[0]
input_shape = self.get_arg_attribute("shape", input_name)
self.graph_variables[input_name] = self.get_func('tvm.relay.var')(input_name, shape=input_shape)
for n in self.tvm_graph.sub_graph:
op_cat = hdfgutils.get_attribute_value(n.attributes['op_cat'])
if op_cat == 'mapped_node':
op_context = str(n.name).rsplit("/", 1)
if len(op_context) > 1 and op_context[0] != 'main':
scope = op_context[0] + '/'
else:
scope = ''
op_config = self.translator_config['ops'][n.op_type]
op_func = self.get_func(op_config['op_name'])
args, kwargs, output_id = self.create_op_args(n.op_type, n, self.templates[n.op_type], scope)
if len(output_id) == 1:
self.graph_variables[output_id[0]] = op_func(*args, **kwargs)
if output_id[0] in list(self.tvm_graph.edge_info):
iedge = self.tvm_graph.edge_info[output_id[0]]
if iedge.name != output_id[0]:
self.graph_variables[str(iedge.name)] = self.graph_variables[output_id[0]]
else:
temp = op_func(*args, **kwargs)
if not hasattr(temp, '__len__'):
logging.error(f"Size mismatch between output of {n.op_type} which has length 1 output"
f"Supplied config outputs: {output_id}")
exit(1)
elif len(temp) != len(output_id):
logging.error(f"Size mismatch between output of {n.op_type} which has length {len(temp)} output"
f"Supplied config outputs: {output_id}")
exit(1)
for i in range(len(temp)):
self.graph_variables[output_id[i]] = temp[i]
if output_id[i] in list(self.tvm_graph.edge_info):
iedge = self.tvm_graph.edge_info[output_id[i]]
if iedge.name != output_id[i]:
self.graph_variables[str(iedge.name)] = self.graph_variables[output_id[i]]
if not output_id:
logging.error(f"No nodes mapped for graph")
exit(1)
elif len(output_id) != 1:
logging.error(f"More than one output supplied for graph: {output_id}")
exit(1)
self.tvm_func = relay.Function(relay.analysis.free_vars(self.graph_variables[output_id[0]]), self.graph_variables[output_id[0]])
def create_op_args(self, op_name, node, node_signature, scope):
op_config = self.translator_config['ops'][op_name]
instance_args = hdfgutils.get_attribute_value(node.attributes['ordered_args'])
signature_args = hdfgutils.get_attribute_value(node_signature.attributes['ordered_args'])
default_map = self.create_default_map(self.templates[op_name])
for i in range(len(instance_args)):
instance_args[i] = scope + instance_args[i]
args = self.get_ordered_args(op_config, signature_args, instance_args, default_map, op_name, scope)
kwargs = self.get_kwargs(op_config, signature_args, instance_args,default_map, op_name, scope)
output_keys = self.get_output_keys(op_config, signature_args, instance_args, op_name, scope)
return args, kwargs, output_keys
def get_ordered_args(self, op_config, signature_args, instance_args,default_map, op, scope):
args = []
for a in op_config['positional_arguments']:
if a not in op_config['arg_map'].keys():
logging.error(f"{a} not found in argument map for op {op}. Please check config")
exit(1)
arg = op_config['arg_map'][a]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
if default_map[signature_args[idx]] is None:
logging.error(f"Error! No default argument for unspecified parameter {arg} in {op}, name: {signature_args[idx]}")
exit(1)
if op_config['arg_map'][a]['init_func']:
var = self.init_var(op_config['arg_map'][a], default_map[signature_args[idx]], literal=True)
elif op_config['arg_map'][a]['type'] in CMLANG_CAST_MAP.keys():
var = default_map[signature_args[idx]]
else:
logging.error(f"Unable to resolve argument {default_map[signature_args[idx]]} for keyword {a}={signature_args[arg]}")
var = None
exit(1)
else:
instance_arg = instance_args[idx]
if instance_arg in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_arg]
ename = edge.name
else:
ename = instance_arg
if ename in self.graph_variables.keys() and instance_arg not in self.graph_variables.keys():
var = self.graph_variables[ename]
elif instance_arg not in self.graph_variables.keys():
if op_config['arg_map'][a]['init_func']:
var = self.init_var(op_config['arg_map'][a], instance_arg)
if op_config['arg_map'][a]['arg_type'] != 'parameter':
self.graph_variables[instance_arg] = var
elif op_config['arg_map'][a]['type'] in CMLANG_CAST_MAP.keys():
var = CMLANG_CAST_MAP[op_config['arg_map'][a]['type']](instance_arg)
else:
logging.error(f"Unable to resolve argument {instance_arg} for keyword {a}={signature_args[arg]}")
var = None
exit(1)
else:
var = self.graph_variables[instance_arg]
args.append(var)
return args
def get_kwargs(self, op_config, signature_args, instance_args,default_map, op, scope):
kwargs = {}
for k in op_config['keyword_arguments'].keys():
if op_config['keyword_arguments'][k] not in op_config['arg_map'].keys():
logging.error(f"Key id {k} with value {op_config['keyword_arguments'][k]} not found in argument map for op {op}."
f" Please check config")
exit(1)
id = op_config['keyword_arguments'][k]
arg = op_config['arg_map'][id]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
if default_map[signature_args[idx]] is None:
logging.error(f"Error! No default argument for unspecified parameter {arg} in {op}, name: {signature_args[idx]}")
exit(1)
if op_config['arg_map'][id]['init_func']:
var = self.init_var(op_config['arg_map'][id], default_map[signature_args[idx]], literal=True)
elif op_config['arg_map'][id]['type'] in CMLANG_CAST_MAP.keys():
var = default_map[signature_args[idx]]
else:
logging.error(f"Unable to resolve argument {default_map[signature_args[idx]]} for keyword {id}={signature_args[arg]}")
var = None
exit(1)
else:
instance_arg = instance_args[idx]
if instance_arg in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_arg]
ename = edge.name
else:
ename = instance_arg
if ename in self.graph_variables.keys() and instance_arg not in self.graph_variables.keys():
var = self.graph_variables[ename]
elif instance_arg not in self.graph_variables.keys():
if op_config['arg_map'][id]['init_func']:
var = self.init_var(op_config['arg_map'][id], instance_arg)
if op_config['arg_map'][id]['arg_type'] != 'parameter':
self.graph_variables[instance_arg] = var
elif op_config['arg_map'][id]['type'] in CMLANG_CAST_MAP.keys():
var = CMLANG_CAST_MAP[op_config['arg_map'][id]['type']](instance_arg)
else:
logging.error(f"Unable to resolve argument {instance_arg} for keyword {id}={signature_args[arg]}")
exit(1)
else:
var = self.graph_variables[instance_arg]
kwargs[k] = var
return kwargs
def get_output_keys(self, op_config, signature_args, instance_args, op, scope):
output_keys = []
for o in op_config['op_output']:
if o not in op_config['arg_map'].keys():
logging.error(f"Key id {o} with value {op_config['keyword_arguments'][o]} not found in argument map for op {op}."
f" Please check config")
exit(1)
arg = op_config['arg_map'][o]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
logging.error(f"Error! Cannot assign output {o} to unspecified parameter {signature_args[idx]}")
exit(1)
output_keys.append(instance_args[idx])
return output_keys
def create_default_map(self, template):
default_map = {}
ordered_args = hdfgutils.get_attribute_value(template.attributes['ordered_args'])
for a in ordered_args:
if a not in list(template.edge_info):
logging.error(f"Argument {a} not found in edges for {template.op_type}")
edge = template.edge_info[a]
if 'default' in list(edge.attributes):
dtype = hdfgutils.get_attribute_value(edge.attributes['type'])
default_map[a] = CMLANG_CAST_MAP[dtype](hdfgutils.get_attribute_value(edge.attributes['default']))
else:
default_map[a] = None
return default_map
def init_var(self, var, instance_name, literal=False):
args = []
kwargs = {}
arg_type = var['arg_type']
if isinstance(instance_name, str):
id = instance_name.rsplit('/', 1)
if len(id) > 1:
id = id[-1]
else:
id = id[0]
else:
id = str(instance_name).rsplit('/', 1)
if len(id) > 1:
id = id[-1]
else:
id = id[0]
if arg_type == 'parameter' and not literal and not is_literal(id):
if instance_name not in list(self.tvm_graph.edge_info):
logging.error(f"Unable to get value for parameter {instance_name}")
exit(1)
edge = self.tvm_graph.edge_info[instance_name]
if 'value' not in list(edge.attributes):
logging.error(f"Could not find literal for parameter argument {instance_name}.\n"
f"Possible attributes: {list(edge.attributes)}")
exit(1)
value = hdfgutils.get_attribute_value(edge.attributes['value'])
elif is_literal(id) and isinstance(instance_name, str):
if id in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[id]
value = hdfgutils.get_attribute_value(edge.attributes['value'])
elif instance_name in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_name]
value = hdfgutils.get_attribute_value(edge.attributes['value'])
else:
logging.error(f"Could not find literal for parameter argument {instance_name} with id {id}.\n"
f"var: {var['key']}")
exit(1)
else:
value = instance_name
for a in var['init_func_args']:
arg_result = self.get_arg_attribute(a, value, literal=literal)
args.append(arg_result)
for k in var['init_func_kw'].keys():
kwargs[k] = self.get_arg_attribute(var['init_func_kw'][k], value, literal=literal)
if len(kwargs.keys()) == 0:
var = self.get_func(var['init_func'])(*args)
else:
var = self.get_func(var['init_func'])(*args, **kwargs)
return var
def get_arg_attribute(self, key, instance_name, literal=False):
if isinstance(key, list):
arg = []
for k in key:
arg.append(self.get_arg_attribute(k, instance_name, literal=literal))
return arg
elif key == 'name':
return instance_name
elif key == 'shape':
if literal:
logging.error(f"Cannot get shape for literal value {instance_name} as attribute")
exit(1)
edge = self.tvm_graph.edge_info[instance_name]
if 'dimensions' not in list(edge.attributes):
logging.error(f"No dimensions for edge {instance_name}")
tuple_dims = ()
else:
dimensions = hdfgutils.get_attribute_value(edge.attributes['dimensions'])
tuple_dims = tuple(int(d) if is_number(d) else d for d in dimensions)
return tuple_dims
elif key == 'type':
if literal:
return type(instance_name).__name__
edge = self.tvm_graph.edge_info[instance_name]
if 'type' not in list(edge.attributes):
logging.error(f"No type for edge {instance_name}")
dtype = 'float32'
else:
dtype = hdfgutils.get_attribute_value(edge.attributes['type'])
return dtype
elif instance_name in self.graph_variables.keys():
return self.graph_variables[instance_name]
else:
logging.error(f"Could not create attribute for {instance_name} with key {key}.")
exit(1)
def get_args(self, names, vars):
args = []
for n in names:
if n not in vars.keys():
logging.error(f"Operation argument {n} not in created variables: {vars.keys()}")
else:
args.append(vars[n])
return args
def arg_conversion(self, instance_arg, target_arg):
if isinstance(target_arg, tuple):
result = tuple(instance_arg for _ in range(len(target_arg)))
return result
else:
return instance_arg
def get_func(self, function_name):
mod_id, func_id = function_name.rsplit('.', 1)
mod = importlib.import_module(mod_id)
func = getattr(mod, func_id)
return func
|
[
"sean.kinzer@gmail.com"
] |
sean.kinzer@gmail.com
|
1c0c27da1a5ffd2ada1e238f96d4179c01990331
|
2cb9d1bf6e674049dd03b04e5714d12a268425a4
|
/sariq_dev/darslar/10_dars_uy_ishi_5_.py
|
eb5ae2c36841e0c7e64652eb8400aa8e43b095c9
|
[] |
no_license
|
RuzimovJavlonbek/anvar.nazrullayevning-mohirdev.uz-platformasidagi-dasturlash.asoslari.python-kursidagi-amaliyotlar
|
02885608c40e9dd9ae0d13013619ef787240bcf6
|
f999be39d0e3c7edb990f9c3c29edbeeb0e19c2d
|
refs/heads/main
| 2023-07-02T20:16:56.146956
| 2021-08-06T03:12:49
| 2021-08-06T03:12:49
| 390,466,668
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 96
|
py
|
a = float(input("a="))
if a < 0:
print(" Manfiy son")
else:
print(" Musbat son")
input()
|
[
"ruzimov_javlonbek_1997@mail.ru"
] |
ruzimov_javlonbek_1997@mail.ru
|
1d8b9e59de43646ea4b505eedff319fb9b974db2
|
4b449a67f2907638a563eee529073d23c346acc7
|
/Card/migrations/0001_initial.py
|
9ef256b4b11a0a383319b00003e6242222d1eb0d
|
[] |
no_license
|
yuanjie101/Ecar
|
f3adced83e154d36f3ca3bfa1b829ca62471e8ad
|
24b0789469ca2715cc01038ec8d0fe7be3a6e350
|
refs/heads/master
| 2022-10-12T06:02:12.373394
| 2020-05-23T09:49:45
| 2020-05-23T09:49:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,069
|
py
|
# Generated by Django 3.0.4 on 2020-03-16 09:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('Login', '0001_initial'),
('Deck', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Card',
fields=[
('card_id', models.IntegerField(primary_key=True, serialize=False)),
('q_text', models.TextField(blank=True, null=True)),
('q_img', models.ImageField(blank=True, null=True, upload_to='question')),
('ans_text', models.TextField(blank=True, null=True)),
('ans_img', models.ImageField(blank=True, null=True, upload_to='answer')),
('recall_secs', models.IntegerField(default=10)),
('c_time', models.DateTimeField(auto_now_add=True)),
('deck', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Deck.Deck')),
],
),
migrations.CreateModel(
name='MemoryInfo',
fields=[
('info_id', models.IntegerField(primary_key=True, serialize=False)),
('memory_integral', models.IntegerField(default=0)),
('last_memory_time', models.DateField(null=True)),
('review_time', models.DateField(null=True)),
('now_correct_times', models.IntegerField(default=0)),
('now_error_times', models.IntegerField(default=0)),
('need_correct_times', models.IntegerField(default=4)),
('is_memory_over', models.BooleanField(default=False)),
('memory_times', models.IntegerField(default=0)),
('card', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='InfoToCard', to='Card.Card')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='InfoToUser', to='Login.User')),
],
),
]
|
[
"re.masterzero@gmail.com"
] |
re.masterzero@gmail.com
|
66e35dbb42b295345eba7f7c190fc3f63a6c00f8
|
67423cd76b549cea780059856f149c15207fc205
|
/run1.py
|
52ca277cd729722b5679e4bc8e92adda75ea00f4
|
[] |
no_license
|
checkcheckzach/lunar-lander
|
e69435fc73fe6dd19e00422c5d9946f42e6d41e1
|
1a01acaab2106398e4043f2ff45ccec623467e0a
|
refs/heads/master
| 2020-06-16T23:48:53.399525
| 2019-07-08T04:25:37
| 2019-07-08T04:25:37
| 195,736,031
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,842
|
py
|
import lunarlander
import matplotlib.pyplot as plt
PARAMS2 = [
{'l_rate': 0.0001, 'gamma': 0.9, 'epsilon_decay': 0.998, 'min_epsilon': 0, 'max_epsilon': 1, 'buffer_zize': 5000,
'batch_size': 32},
{'l_rate': 0.0001, 'gamma': 0.94, 'epsilon_decay': 0.998, 'min_epsilon': 0, 'max_epsilon': 1, 'buffer_zize': 5000,
'batch_size': 32},
{'l_rate': 0.0001, 'gamma': 0.99, 'epsilon_decay': 0.998, 'min_epsilon': 0, 'max_epsilon': 1, 'buffer_zize': 5000,
'batch_size': 32},
{'l_rate': 0.0001, 'gamma': 0.999, 'epsilon_decay': 0.998, 'min_epsilon': 0, 'max_epsilon': 1, 'buffer_zize': 5000,
'batch_size': 32}
]
if __name__ == "__main__":
scorega = {0.9: [], 0.94: [],0.99: [], 0.999: []}
ga = [0.9, 0.94,0.99,0.999]
ep = 1500
for para in PARAMS2:
exp = lunarlander.Experiment(episode_num=ep, train_mode=True, para=para)
exp.run()
if para['gamma'] == 0.9:
scorega[0.9]= exp.rep
if para['gamma'] == 0.94:
scorega[0.94]= exp.rep
if para['gamma'] == 0.99:
scorega[0.99]= exp.rep
if para['gamma'] == 0.999:
scorega[0.999]= exp.rep
epsex = range(0, ep)
for i in ga:
if i == 0.9:
plt.plot(epsex, scorega[i], 'r-', label='gamma = {}'.format(i))
if i == 0.94:
plt.plot(epsex, scorega[i], 'b-', label='gamma= {}'.format(i))
if i == 0.99:
plt.plot(epsex, scorega[i], 'g-', label='gamma = {}'.format(i))
if i == 0.999:
plt.plot(epsex, scorega[i], 'k-', label='gamma = {}'.format(i))
#plt.xlim([0,ep+10])
plt.legend()
plt.ylabel('Score',size=20)
plt.xlabel('Episode',size=20)
plt.title('Gamma Search with other parameters fixed',size=14)
plt.show()
|
[
"noreply@github.com"
] |
checkcheckzach.noreply@github.com
|
cf0a7901e1817c03cbed6cd2351cd3b0691188c2
|
109af9bb1315554acc2266f97f777049d1449520
|
/input.py
|
ffc0a5c42986684d13ac5f6a16a11b9705fb4d2e
|
[] |
no_license
|
itsuttida/Python
|
c2ad326a58d50bea27dcd7f970cdde326f574896
|
2b034d028d292384e14e1f458ac68ed4c7ccc63e
|
refs/heads/master
| 2023-05-25T20:10:32.822410
| 2021-06-13T07:00:00
| 2021-06-13T07:00:00
| 345,884,013
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 420
|
py
|
# age_aew = input("Enter your age: ")
# print("aew : " , age_aew , '123' , 1 , 2.34)
# age_nick = input("Enter your age")
# print("nick : " , age_nick)
# f_name = input("Enter your name : ")
# l_name = input("Enter your lastname : ")
# full = f_name + l_name
# print(full)
#sum age = f
age_aew = int(input("Enter your age : "))
age_nick = input("Enter your age : ")
sum_age = age_aew + int(age_nick)
print(sum_age)
|
[
"suttida.s@teohong.com"
] |
suttida.s@teohong.com
|
ec662f925b59e24fde024e4243aba389f33e0432
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/pa3/benchmarks/sieve-14.py
|
52bce3a36228b57f2739edf857ed492498c0ab0c
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,587
|
py
|
# A resizable list of integers
class Vector(object):
items: [$ID] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
# Data
v:Vector = None
i:int = 0
# Crunch
v = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
3195ba4e15a300cbdd468a885fafc5e2749eb25f
|
37a05e589e90e8a265eb849bdebf8f8739fb97ac
|
/img1.py
|
b0ddf657c966dfd475d4f658d045b6156ab4dcff
|
[] |
no_license
|
Dust1N69/Jerry
|
06d1425b1495401f15f0e24b671dc06d13399e6b
|
75af9954e20f03857d2e5981c9414ae63dfbb4ed
|
refs/heads/master
| 2023-06-25T22:31:44.388203
| 2021-07-29T13:50:28
| 2021-07-29T13:50:28
| 389,825,418
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 497
|
py
|
import cv2
img1 = cv2.imread('./images/test1.jpg')
print(img1.shape)
print(img1[100,200])
print(img1.item(100,200,1))
# img1[200:400,300:700]=[255,0,0]
roi = img1[400:600,300:700]
roi[:,:,1]=255
# roi[:,:,2]=0
h,w,a =roi.shape
img1[100:300,800:1200] = roi
print(len(roi),len(roi[0]))
img1[10:10+h,10:10+w] = roi
print(img1.size)
#秀圖
cv2.namedWindow('test',cv2.WINDOW_NORMAL)
cv2.imshow('test',img1)
cv2.waitKey(0)
cv2.destroyAllWindows()
cv2.imwrite('test.png',img1)
|
[
"noreply@github.com"
] |
Dust1N69.noreply@github.com
|
51da6df9e40cee8f9c3da7d7716748f4d0cefe6a
|
eb6fdd2f3e0363bf18b5819f5032c0143b0fc540
|
/dashboard/decorators.py
|
ec745dde820cda3d2349ba324b57e817698065d6
|
[] |
no_license
|
AnuraghSarkar/Halkhabar
|
61afaa9c8f884d51cdb9ebe69ea565c6fdab996d
|
f54e186fc083d5d75822756fdffae3784324bb48
|
refs/heads/master
| 2023-04-23T06:02:06.725189
| 2021-05-10T04:14:00
| 2021-05-10T04:14:00
| 361,974,054
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 974
|
py
|
from django.shortcuts import redirect
def unauthenticated_user(view_function):
def wrapper_function(request, *args, **kwargs):
if request.user.is_authenticated:
if request.user.is_staff:
return redirect('/myadmin')
elif not request.user.is_staff:
return redirect('/')
else:
return view_function(request, *args, **kwargs)
return wrapper_function
def admin_only(view_function):
def wrapper_function(request, *args, **kwargs):
if request.user.is_staff:
return view_function(request, *args, **kwargs)
else:
return redirect('posts:feed')
return wrapper_function
def user_only(view_function):
def wrapper_function(request, *args, **kwargs):
if request.user.is_staff:
return redirect('myadmin:admin-dashboard')
else:
return view_function(request, *args, **kwargs)
return wrapper_function
|
[
"trithatimalsina@gmail.com"
] |
trithatimalsina@gmail.com
|
a1e31fcbf4ad9e98d814b560160471a3f4c219ac
|
1d36cce83aa5fd4ef09fd80361a6ea5a30237226
|
/create_batch_info.py
|
21304d2721dc6db36ce1296826d27c37f4ded8b5
|
[] |
no_license
|
HyunbinCho/cell_perturbation
|
3d994ba77fbe8e7a8ea8435d4ef069a9bf936d56
|
66686140c859383ca7d61f075e3b2649886e5e83
|
refs/heads/master
| 2022-07-23T06:33:10.448560
| 2019-08-28T14:28:15
| 2019-08-28T14:28:15
| 198,537,790
| 0
| 0
| null | 2022-06-21T22:25:08
| 2019-07-24T01:50:21
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,927
|
py
|
import os
import glob
import cv2
import numpy as np
import pandas as pd
from collections import defaultdict
from utils.load_dataset import *
sys.path.append("/home/hyunbin/utils/rxrx1-utils")
import rxrx.io as rio
import yaml
import numba
@numba.jit(nopython=True, parallel=True)
def calculate_subfunc(image_array):
mean = np.mean(image_array)
std = np.std(image_array)
return mean, std
def create_batch_info(datapath, outpath, metadata_path):
"""
calculates of mean and stddev per batch samples only using Negative Control
Examples of batch_info_dict
HEPG2-01:
- mean: (0.5, 0.5, 0.5, 0.5, 0.5, 0.5) ------> 6-channel
- std: (0.5, 0.5, 0.5, 0.5, 0.5, 0.5)
HEPG2-02:
- mean: (0.499, 0.323, 0.276, 0.301, 0.399, 0.501)
- std:: (0.501, 0.333, 0.255, 0.532, 0.444, 0.333)
...(skip)
U2OS-01:
- mean: (0.375, 0.376, 0.377, 0.378, 0.379, 0.380)
- std: (0.222, 0.223, 0.224, 0.225, 0.226, 0.227)
...(skip)
"""
#data_path = "/data2/cell_perturbation/"
traindata = load_data_cell_perturbation(base_path=os.path.join(datapath, "train"))
testdata = load_data_cell_perturbation(base_path=os.path.join(datapath, "test"))
metadata = load_metadata(from_server=True, path=metadata_path)
merged_data = merge_all_data_to_metadata([traindata, testdata], metadata)
print(merged_data)
temp_batchname_list = [i.split("_")[0] for i in merged_data.loc[:, 'id_code'].values.tolist()]
batch_nameset = set(temp_batchname_list)
batch_nameset = list(batch_nameset)
batch_info_dict = defaultdict(dict)
for batch_name in batch_nameset:
print(batch_name)
batch_info_dict[batch_name] = defaultdict(dict)
batch_info_dict[batch_name]['mean'] = list()
batch_info_dict[batch_name]['std'] = list()
temp_df = merged_data[merged_data['id_code'].str.match(batch_name)]
temp_df = temp_df[temp_df['well_type'] == 'negative_control']
#iterates channel 1 ~ 6
for channel in range(1, 7):
temp_df_per_channel = temp_df.loc[:, 'c{}'.format(channel)]
img_arr = np.array([cv2.imread(i) for i in temp_df_per_channel.values.tolist()])
#calculates mean, std each from all pixel values
#TODO: make a subfunction with numba
mean, std = calculate_subfunc(img_arr)
batch_info_dict[batch_name]['mean'].append(mean)
batch_info_dict[batch_name]['std'].append(std)
with open(outpath, 'w', encoding="utf-8") as yaml_file:
dump = yaml.dump(batch_info_dict, default_flow_style=False, allow_unicode=True, encoding=None)
yaml_file.write(dump)
if __name__ == "__main__":
data_path = "/hdd/cell_perturbation"
create_batch_info(data_path, "./batch_info.yaml")
|
[
"ehrm0rh@gmail.com"
] |
ehrm0rh@gmail.com
|
ca15f8c807dc55a92e8237c20f88a21a73b45caa
|
c29dc8e9f73876df29e98203c562958cefafd5a9
|
/env/Scripts/django-admin.py
|
6c58a702ae126bb95f44adcb508f466e20872a95
|
[] |
no_license
|
Rakshitha-masali/pro1234
|
594109f1d37b45b020d22507635811172dc2f5f6
|
d66cb43c0e0e887e802ee11b3d2e71ce0f8cb71d
|
refs/heads/main
| 2023-04-02T02:14:12.137713
| 2021-04-02T16:15:48
| 2021-04-02T16:15:48
| 354,068,119
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 690
|
py
|
#!c:\users\hp\desktop\project100\env\scripts\python.exe
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
|
[
"rakshithamasali@gmail.com"
] |
rakshithamasali@gmail.com
|
8ae1fdf9ab7bd682171cd3f8dbcf5d308883094f
|
9a8f34fe61ec7ffe014ab7e1d2a52d300c5f354e
|
/python/multithread_multiprocess/venv/bin/f2py
|
7daa6293e960fdad2ef9741534b96e440de29a97
|
[] |
no_license
|
PacYang/interview_prepare
|
d0995fd1a51793232d4b8348340c67a2ab2fb405
|
92e411cb9956fa4d20ac399d9bbfe4f64dae3769
|
refs/heads/master
| 2021-04-07T22:45:45.608083
| 2020-09-27T09:22:59
| 2020-09-27T09:22:59
| 248,714,077
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 264
|
#!/Users/larry.yang/Documents/git/multithread/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"larry.yang@yingzt.com"
] |
larry.yang@yingzt.com
|
|
dccc13354e31ce5cae3be0b081de0f4522049eea
|
be91d8ece2543ac8979519f54ca6ca2e2b156aaf
|
/filehandling_using_pandas.py
|
84fa5c5c197a9219e822dbd1df64d0c513590efb
|
[] |
no_license
|
muhammadfaraz6640/pandas-work
|
70cb3023f62a236ccc7b514c1fcb18a74242a039
|
0861f173c0cc869703fdcbcaddd5bfb56b237f83
|
refs/heads/master
| 2020-09-14T16:51:31.657516
| 2019-11-21T14:24:10
| 2019-11-21T14:24:10
| 223,190,875
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,539
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 3 13:34:30 2019
@author: Haroon Traders
"""
'''names=['name','age','dept']'''
#data loading storage and file formats
import pandas as pd
#sentinels={'2':['not found','nan']}
#ab=pd.read_csv("C:/Users/Haroon Traders/source/repos/filing/filing/data.csv",skiprows=1,header=None,na_values=sentinels,nrows=3)
#print(ab)
#s2_df=pd.read_csv("C:/Users/Haroon Traders/source/repos/filing/filing/data.csv",index_col=['name','age'])
#print(s2_df),nrows=3,na_values=sentinels
#new=pd.read_csv("C:/Users/Haroon Traders/Anaconda3/pkgs/scikit-learn-0.20.3-py37h343c172_0/Lib/site-packages/sklearn/datasets/data/wine_data.csv",skiprows=1,header=None,chunksize=50)
#print(type(new))
#print(new)
#chunklist=[]
#for chunk in new:
# print(len(chunk)) #178 record 3 chunks of 50 4th chunk will be of 28
#print(chunk.head)
# chunklist.append(chunk)
#print(chunklist)
#df=chunklist[2]
#print(df.head())
#print(df)
#ab=pd.read_csv("C:/Users/Haroon Traders/source/repos/filing/filing/data.csv",skiprows=1,header=None,na_values=sentinels,nrows=3)
#ab.to_csv("C:/Users/Haroon Traders/Anaconda3/pkgs/scikit-learn-0.20.3-py37h343c172_0/Lib/site-packages/sklearn/datasets/data/wine_data.csv")
#abc=pd.read_html("C:/Users\Haroon Traders/Downloads/[FreeTutorials.Us] Udemy - the-complete-web-developer-course-2/04 Javascript/108 Loops - Files.html")
#print(len(abc))
import numpy as np
'''
frame=pd.DataFrame({'a':np.random.randn(100),'b':np.random.randn(100)})
store=pd.HDFStore("mydata.h5")
#print(store)
#print(frame)
store['obj1']=frame
store['obj1_col']=frame['a']
print(store)
x=store.obj1
print(x.head())
#print(x)
y=store.obj1_col
#print(y.head())
store.put('obj2',frame,format='table')
store.select('obj2',where=['index>10']) # if table format i can condition here
print(store.obj2.head())
'''
import requests
import json
'''
url="http://search.twitter.com/search.json?q=python%20pandas"
resp=requests.get(url)
print(resp)
data=resp.json()
print(data)
res=json.load(resp)
'''
import sqlite3 # rdbms
#creating table
query="""CREATE TABLE test (a VARCHAR(20),b VARCHAR(20),c REAL,d INTEGER);"""
con=sqlite3.connect('mydata1.sqlite3')
#con.execute(query)
con.commit()
data=[('atlanata','georgia',1.25,6)]
smtp="INSERT INTO test Values(?,?,?,?)"
con.executemany(smtp,data)
cursor=con.execute("SELECT * FROM test")
print(cursor.description)
file=pd.read_csv("C:/Users/Haroon Traders/Desktop/Assignment-2/Euro_2012_stats_TEAM.csv")
file
|
[
"noreply@github.com"
] |
muhammadfaraz6640.noreply@github.com
|
6fd7363389f6b9bfc67823959049f44b95b6f19e
|
83aa59c568d0b84eee76226776da7f1d49384f10
|
/tccli/services/ape/ape_client.py
|
a2fa953563d4f64679515e3129265264be734748
|
[
"Apache-2.0"
] |
permissive
|
jjandnn/tencentcloud-cli
|
c492f5f19cacaee85fc2e7b0de0d8aa8ef5dd009
|
ac3c697fb06acdd2e3e711b11069d33754889340
|
refs/heads/master
| 2023-05-05T00:27:54.406654
| 2021-05-27T00:40:52
| 2021-05-27T00:40:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,552
|
py
|
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.ape.v20200513 import ape_client as ape_client_v20200513
from tencentcloud.ape.v20200513 import models as models_v20200513
def doDescribeAuthUsers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAuthUsersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAuthUsers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDownloadInfos(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDownloadInfosRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDownloadInfos(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchDescribeOrderCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchDescribeOrderCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchDescribeOrderCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeImages(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeImagesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeImages(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateOrderAndDownloads(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateOrderAndDownloadsRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateOrderAndDownloads(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeImageRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateOrderAndPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateOrderAndPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateOrderAndPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchDescribeOrderImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchDescribeOrderImageRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchDescribeOrderImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20200513": ape_client_v20200513,
}
MODELS_MAP = {
"v20200513": models_v20200513,
}
ACTION_MAP = {
"DescribeAuthUsers": doDescribeAuthUsers,
"DescribeDownloadInfos": doDescribeDownloadInfos,
"BatchDescribeOrderCertificate": doBatchDescribeOrderCertificate,
"DescribeImages": doDescribeImages,
"CreateOrderAndDownloads": doCreateOrderAndDownloads,
"DescribeImage": doDescribeImage,
"CreateOrderAndPay": doCreateOrderAndPay,
"BatchDescribeOrderImage": doBatchDescribeOrderImage,
}
AVAILABLE_VERSION_LIST = [
"v20200513",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["ape"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["ape"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
|
[
"tencentcloudapi@tenent.com"
] |
tencentcloudapi@tenent.com
|
25955ebc5f0917edaefe7116d2150d8499521ed9
|
0198b58c6450f1bbe5f9b0c5ff60b530bafb7518
|
/webgraph_factory/src/utils/datagen.py
|
66ea70a78d68cc5c613f20b8add34e6e62c2fadd
|
[] |
no_license
|
andrei-ars/web_graph_emb
|
58d56b7ca98807a8b82d8f3a109556b77b9b65e2
|
8c2e7bf94f291130b82d746076c2f7f5e148bd4a
|
refs/heads/main
| 2023-08-16T03:24:41.310804
| 2021-09-19T23:16:17
| 2021-09-19T23:16:17
| 385,738,766
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,338
|
py
|
import json
import numpy as np
import numpy.random as random
import string
class DataGenerator():
def __init__(self,datagen_path):
with open(datagen_path) as fp:
self.fields = json.load(fp)['fields']
self.bisect_idx = 0
def infer_random_like(self,value,range_str=None):
to_type = type(value)
rng = parse_range(value,range_str)
if isinstance(value,int):
start,stop,step = rng
interval = np.arange(start,stop,step)
elif isinstance(value,float):
start,stop,step = rng
interval = np.arange(start,stop,step)
elif isinstance(value,str):
interval = rng
else:
interval = ['0']
return to_type(random.choice(interval))
def infer_out_of_range(self,value,range_str=None):
to_type = type(value)
rng = parse_range(value,range_str)
if isinstance(value,int):
start,stop,_ = rng
interval = np.arange(start-100,start).tolist() + np.arange(stop,stop+100).tolist()
elif isinstance(value,float):
start,stop,_ = rng
interval = np.arange(start-100,start).tolist() + np.arange(stop,stop+100).tolist()
elif isinstance(value,str):
interval = [''.join(random.choice([c for c in (string.ascii_letters + string.digits)], size=10))]
else:
interval = ['0']
return to_type(random.choice(interval))
def infer_bisected(self,value,range_str=None):
to_type = type(value)
rng = parse_range(value,range_str)
if isinstance(value,(int,float)):
start,stop,_ = rng
interval = get_bisected_range(start,stop)
else:
return None
if self.bisect_idx >= len(interval):
self.bisect_idx = 0
result = to_type(interval[self.bisect_idx])
self.bisect_idx +=1
return result
def infer(self, driver, element):
"""
WebDriver element
"""
url = driver.get_current_url()
data = self.filter_url(url)
data = self.get_field_like_element(driver,data,element)
if data:
return data['value']
return data
def get_field_like_element(self,driver,data,element):
result = [d for d in data if element == driver.get_element('xpath',d['xpath'])]
if len(result) == 1:
return result[0]
return None
def filter_url(self,url):
return [field for field in self.fields if field['url'] == url]
def gen_uniform(start,stop):
return random.randint(start,stop)
def get_bisected_range(start,stop, n=9):
result = []
mid = (start+stop) // 2
result.append(start)
result.append(stop)
result.append(mid)
mid_l,mid_r = mid,mid
for _ in range((n-2)//2):
mid_l = (start+mid_l) // 2
mid_r = (mid_r+stop) // 2
result.append(mid_l)
result.append(mid_r)
return sorted(result)
def parse_range(initial,range_str):
if isinstance(initial,(int,float)):
if range_str is None:
return 0,100,None
to_type = type(initial)
splitted = range_str.split(',')
assert len(splitted) == 2,f'Cannot parse range {range_str}'
first,last = splitted
if first[0] == '[':
first = to_type(first[1:])
elif first[0] == '(':
first = to_type(first[1:]) + 1
else:
raise ValueError(f'Cannot parse range {range_str}')
step = None
last = last.split(':')
if len(last) == 2:
step = to_type(last[1][:-1])
last = last[0] + last[1][-1]
else:
last = last[0]
if last[-1] == ']':
last = to_type(last[:-1]) + 1
elif last[-1] == ')':
last = to_type(last[:-1])
else:
raise ValueError(f'Cannot parse range {range_str}')
return first, last, step
elif isinstance(initial,str):
if range_str is None:
return [''.join(random.choice([c for c in (string.ascii_letters + string.digits )], size=10))]
strings = range_str[1:-1].split(',')
return strings
else:
raise ValueError(f'Cannot parse range {range_str}')
|
[
"z@dell.sp"
] |
z@dell.sp
|
8a5c13bc009949c7c42ef0a619409870a73c4a61
|
e421e7c4b4e40c921a027bb863bac08c729ae764
|
/blog/main_blog/models.py
|
b0162946f1538a5236f2eff24bf5992a4c2c639e
|
[] |
no_license
|
abrhamgebremedhin/django
|
a97e30ee086eb5af0cf565dac4f4f1b88a11c9be
|
b19712b89d1012f19a102549134a47d06148ede0
|
refs/heads/master
| 2021-02-10T13:49:36.309877
| 2020-03-03T13:19:30
| 2020-03-03T13:19:30
| 244,387,190
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 291
|
py
|
from django.db import models
# Create your models here.
class Article(models.Model):
title =models.CharField(max_length=120)
content =models.TextField(blank=False,null=False)
date =models.DateField()#pub_date=date.today()
publish =models.BooleanField(default=False,null=False)
|
[
"abrhamgebremedhin8@gmail.com"
] |
abrhamgebremedhin8@gmail.com
|
20e10dee2476cb446eac70e4873787dc387fa6a6
|
c8b535407ddf3551ca16d21bd9f2c4f991028010
|
/assesment/settings.py
|
6b2f049d4a5c29ccaef367a74a85ed6c6e8fe050
|
[] |
no_license
|
manish3135/pre-assesment
|
148355708a387ba456ce6a0c0a8a5bbfd79f1018
|
0ffc96a900e58ec06862333c7ab98d5f1cdcc049
|
refs/heads/master
| 2020-03-28T01:07:59.915836
| 2018-09-05T11:12:53
| 2018-09-05T11:12:53
| 147,479,279
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,381
|
py
|
"""
Django settings for assesment project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR = os.path.join(BASE_DIR,'templates')
STATIC_DIR = os.path.join(BASE_DIR,'static')
MEDIA_DIR = os.path.join(BASE_DIR,'media')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dvm7ar1a($q6g^j=k$3t65p!l_^5ajq1=3)c)c#8wni4=bd1l^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'login',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'assesment.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR,],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'assesment.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [STATIC_DIR,]
MEDIA_ROOT = MEDIA_DIR
MEDIA_URL = '/media/'
LOGIN_URL = '/login/user_login/'
|
[
"manishkumar@manishs-MacBook-Pro.local"
] |
manishkumar@manishs-MacBook-Pro.local
|
06be6e8732d456b755862323cca79a311dc6998f
|
4307bbf68a7716bf276bab34cacffd519294d3f1
|
/taglie-parser/parser.py
|
62457c0a3e1e398129f964fe93e01c24c12dc972
|
[] |
no_license
|
alessandro308/Aggregatore-Ordini-AGESCI
|
711b37b85436ac271b29a3ac3ab11c30e2ce24bb
|
4b4bfbc13044f93fb655ac7851f32f79203baf37
|
refs/heads/master
| 2021-01-12T22:13:17.985995
| 2017-04-16T15:44:57
| 2017-04-16T15:44:57
| 43,705,159
| 0
| 2
| null | 2016-01-02T10:09:08
| 2015-10-05T18:31:04
|
PHP
|
UTF-8
|
Python
| false
| false
| 2,300
|
py
|
#!/usr/bin/python3
# Script per l'importazione delle taglie
# delle uniformi AGESCI su DB per Aggregatore Ordini AGESCI
import sqlite3
import sys
def printquery(name, price, size):
query = "INSERT INTO " + db_table + " ( "
query += db_name_field + " , "
query += db_size_field + " , "
query += db_price_field + " ) "
query += " VALUES ('"
query += name + "','"
query += size + "','"
query += price.replace(",", ".") + "');"
print(query)
return query
def execquery(cursor, query):
cursor.execute(query)
def cleartable(cursor):
execquery(cursor, "DELETE FROM " + db_table + ";")
execquery(cursor, "VACUUM;")
db_table = "prezzitaglie"
db_name_field = "nomeOggetto"
db_size_field = "taglia"
db_price_field = "prezzo"
if (len(sys.argv) != 3):
print("Usage: ./parser.py SOURCE DATABASE")
exit(1)
print("Uniformi agesci parser")
print("SOURCE FILE: " + sys.argv[1])
print("DATABASE FILE: " + sys.argv[2])
target = open(sys.argv[1], 'r')
conn = sqlite3.connect(sys.argv[2])
c = conn.cursor()
cleartable(c)
for line in target:
if line == "\n":
continue
tokens = line.split()
foundtg = False
send_name = ""
send_price = tokens[-1]
for tk in tokens:
if (tk == "tg."):
foundtg = True
continue
if (foundtg == False):
send_name += tk + " "
continue
if (foundtg == True):
if ("-" not in tk):
# Taglia unica
send_taglia = tk
execquery(c, printquery(send_name[:-1], send_price, tk))
elif (tk.count("-") > 1):
# Range semplice
for taglies in tk.split("-"):
execquery(c, printquery(send_name[:-1], send_price, taglies))
elif (tk.count("-") == 1):
# Range da interpolare
start = int(tk.split("-")[0])
stop = int(tk.split("-")[1])
step = 0
if (int(start) % 2 == 0):
step = 2
else :
step = 1
while (start <= stop):
execquery(c, printquery(send_name[:-1], send_price, str(start)))
start += step
break
|
[
"corti.nico@gmail.com"
] |
corti.nico@gmail.com
|
39015aa1bae7e4b6b0a20766f355a20b4604d309
|
5ce61d9fc575912bc6eff17716264d1325578972
|
/A2-1003092468/kmeans.py
|
e7bdb6926f8319dccf3e7314e869a71ffb5ac606
|
[] |
no_license
|
jde10/CSC2515---Intro-to-ML
|
117530979df6a1eae3ca0ad7f7e154c70cc14ce5
|
eb57b57bf8953732774218738403e63c511132c8
|
refs/heads/master
| 2020-04-06T04:41:43.184579
| 2017-02-28T16:59:27
| 2017-02-28T16:59:27
| 82,753,409
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,727
|
py
|
from util import *
import matplotlib.pyplot as plt
plt.ion()
def distmat(p, q):
"""Computes pair-wise L2-distance between columns of p and q."""
d, pn = p.shape
d, qn = q.shape
pmag = np.sum(p**2, axis=0).reshape(1, -1)
qmag = np.sum(q**2, axis=0).reshape(1, -1)
dist = qmag + pmag.T - 2 * np.dot(p.T, q)
dist = (dist >= 0) * dist # Avoid small negatives due to numerical errors.
return np.sqrt(dist)
def KMeans(x, K, iters):
"""Cluster x into K clusters using K-Means.
Inputs:
x: Data matrix, with one data vector per column.
K: Number of clusters.
iters: Number of iterations of K-Means to run.
Outputs:
means: Cluster centers, with one cluster center in each column.
"""
N = x.shape[1]
perm = np.arange(N)
np.random.shuffle(perm)
means = x[:, perm[:K]]
dist = np.zeros((K, N))
for ii in xrange(iters):
print('Kmeans iteration = %04d' % (ii+1))
for k in xrange(K):
dist[k, :] = distmat(means[:, k].reshape(-1, 1), x)
assigned_class = np.argmin(dist, axis=0)
for k in xrange(K):
means[:, k] = np.mean(x[:, (assigned_class == k).nonzero()[0]], axis=1)
return means
def ShowMeans(means, number=0):
"""Show the cluster centers as images."""
plt.figure(number)
plt.clf()
for i in xrange(means.shape[1]):
plt.subplot(1, means.shape[1], i+1)
plt.imshow(means[:, i].reshape(48, 48), cmap=plt.cm.gray)
plt.savefig('Graph %s' %(number))
plt.draw()
raw_input('Press Enter.')
def main():
K = 7
iters = 200
inputs_train, inputs_valid, inputs_test, target_train, target_valid, target_test = LoadData('../toronto_face.npz')
means = KMeans(inputs_train, K, iters)
ShowMeans(means, 0)
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
jde10.noreply@github.com
|
1a0c04adcfd792bf56fda3703659c4610f36e3cf
|
e440f1e8136f79f59c472ecf450d6676395dbb05
|
/modules/tasks/led_task.py
|
d8a43e77d5acc43b68746fbc8291a3f336ac8b72
|
[] |
no_license
|
henrynester/FlightComputer-sw
|
5e56a3a25d8986b625229254f8ea45ed9ca204d3
|
c9254e340e53022bfd9ebdaf783900124a17ebc0
|
refs/heads/master
| 2023-07-02T20:45:57.408041
| 2021-08-12T14:05:38
| 2021-08-12T14:05:38
| 376,691,813
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 934
|
py
|
from modules.tasks.task import Task
from modules.mcl.system_state import SystemState, State
# import threading
from modules.drivers.led import LEDDriver, LEDColor
# class KeyboardThread(threading.Thread):
# def __init__(self, input_cbk=None, name='keyboard-input-thread'):
# self.input_cbk = input_cbk
# super(KeyboardThread, self).__init__(name=name)
# self.start()
# def run(self):
# while True:
# try:
# self.input_cbk(input()) # waits to get input + Return
# except (EOFError):
# return
class LEDTask(Task):
def __init__(self, state: SystemState):
# self.next_phase: Phase = None
self.driver = LEDDriver()
super().__init__('LED', state)
def actuate(self):
# print(self.state.phase)
self.driver.color = LEDColor.RED
def deinitialize(self):
self.driver.deinitialize()
|
[
"henrynester@gmail.com"
] |
henrynester@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.