hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bb87fa54ef182344fda1ae0ba9713c3ff055e11e
| 9,337
|
py
|
Python
|
Tools/Builder/build.py
|
hung0913208/Base
|
420b4ce8e08f9624b4e884039218ffd233b88335
|
[
"BSD-3-Clause"
] | null | null | null |
Tools/Builder/build.py
|
hung0913208/Base
|
420b4ce8e08f9624b4e884039218ffd233b88335
|
[
"BSD-3-Clause"
] | null | null | null |
Tools/Builder/build.py
|
hung0913208/Base
|
420b4ce8e08f9624b4e884039218ffd233b88335
|
[
"BSD-3-Clause"
] | 2
|
2020-11-04T08:00:37.000Z
|
2020-11-06T08:33:33.000Z
|
#!/usr/bin/python3
#
# Project: build
# Description: this is a very simple build tool which imitates from Bazel
#
import subprocess
import argparse
import shutil
import glob
import sys
import os
from core import *
from languages import *
from plugins import *
class Build(Plugin):
def __init__(self, root, rebuild=False, **kwargs):
super(Build, self).__init__()
if root[0] != '/':
root = '%s/%s' % (os.getcwd(), root)
# @NOTE: load optional parameters
self._output = kwargs.get('build') or ('%s/build' % root)
self._root = root
# @NOTE: force Builder to remove and build again
if rebuild is False and os.path.exists(self._output):
shutil.rmtree(self._output)
# @NOTE: load our builder's objection
self._manager = Manager(root, **kwargs)
self._manager.install([
self,
Git(**kwargs),
Http(**kwargs)
])
self._manager.support([
C(**kwargs),
D(**kwargs)
])
def prepare(self):
""" prepare everything before building this repository
"""
workspace = '%s/.workspace' % self._root
try:
if os.path.exists(workspace):
Logger.debug("found .workspace file %s -> going to parse this file now" % workspace)
if self.parse_workspace_file(workspace) is False:
return False
else:
return self._manager.perform(root=self._root, output=self._output)
else:
return False
except Exception as error:
Logger.error('Got an exception: %s -> going to teardown this project' % str(error))
Logger.exception()
self._manager.teardown(self._root)
def derived(self):
""" list derived classes of Build
"""
result = super(Build, self).derived()
if not result is None:
result.append('Build')
return result
def define(self):
pass
@staticmethod
def run(command):
try:
cmd = subprocess.Popen(command.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
error_console = cmd.stderr.read()
output_console = cmd.stdout.read()
cmd.communicate()
cmd.wait()
return True
except Exception as error:
Logger.error('Error when perform %s: %s' % (command, str(error)))
return False
def analyze(self, path=None):
""" analyze a repository
"""
path = self._root if path is None else path
need_performing = False
try:
for path in glob.glob('%s/*' % path):
if os.path.isdir(path):
exclusive = '%s/.excluse' % path
build = '%s/.build' % path
if os.path.exists(exclusive):
Logger.debug("found .excluse file %s -> going to run it now" % exclusive)
if Build.run(exclusive) is False:
return False
else:
continue
elif os.path.exists(build) and not os.path.exists(exclusive):
Logger.debug("found .build file %s -> going to parse this file now" % build)
if self.parse_build_file(build) is False:
return False
elif self.analyze(path) is False:
return False
elif self.analyze(path) is False:
return False
else:
return True
except Exception as error:
# @NOTE: got an exception teardown now
Logger.error('Got an exception: %s -> going to teardown this project' % str(error))
Logger.exception()
return False
def build(self):
""" build a repository
"""
return self._manager.perform(root=self._root, output=self._output)
def release(self):
self._manager.teardown(self._root, self._output)
def parse_workspace_file(self, workspace_file):
""" parse file .workspace
"""
# @NOTE: we must use dir that contains the 'workspace_file' since .workspace
# usually define its resouce with this dir
self._manager.set_current_dir('workspace', os.path.dirname(workspace_file))
with open(workspace_file) as fp:
source = fp.read()
for item in iter_function(source):
function = self._manager.find_function(item['function'], 'workspace')
variables = {}
if 'variables' in item:
for var in item['variables']:
if isinstance(var, dict):
variables[list(var.keys())[0]] = list(var.values())[0]
if function is None:
raise AssertionError('can\'t determine %s' % item['function'])
else:
function(**variables)
return True
def parse_build_file(self, build_file):
""" parse file .build
"""
# @NOTE: we must use dir that contains the 'build_file' since .build
# usually define its resouce with this dir
self._manager.set_current_dir('build', os.path.dirname(build_file))
with open(build_file) as fp:
source = fp.read()
for item in iter_function(source):
function = self._manager.find_function(item['function'], 'build')
variables = {}
if 'variables' in item:
for var in item['variables']:
if isinstance(var, dict):
variables[list(var.keys())[0]] = list(var.values())[0]
if function is None:
Logger.warning('can\'t determine %s -> ignore it now' % item['function'])
continue
else:
function(**variables)
return True
class Serving(Plugin):
def __init__(self, **kwargs):
super(Serving, self).__init__()
self._error = False
@property
def error(self):
return self._error
def define(self):
pass
def check(self):
pass
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--rebuild', type=int, default=1,
help='build everything from scratch')
parser.add_argument('--silence', type=int, default=0,
help='make Builder more quieted')
parser.add_argument('--root', type=str, default=os.getcwd(),
help='where project is defined')
parser.add_argument('--debug', type=int, default=0,
help='enable debug info')
parser.add_argument('--stacktrace', type=str, default=None,
help='enable stacktrace info')
parser.add_argument('--use_package_management', type=int, default=1,
help='enable using package management')
parser.add_argument('--auto_update_packages', type=int,
default=0, help='enable auto update packages')
parser.add_argument('--on_serving', type=int, default=0,
help='use Builder on serving mode when they receive '
'tasks from afar')
parser.add_argument('--mode', type=int, default=0,
help='select mode of this process if on_serving is on')
return parser.parse_args()
if __name__ == '__main__':
flags = parse()
if flags.debug != 0 and flags.silence == 0:
# @NOTE: by default we only use showing stacktrace if flag debug is on
Logger.set_level(DEBUG)
if not flags.stacktrace is None:
if flags.stacktrace.lower() == 'debug':
Logger.set_stacktrace(DEBUG)
elif flags.stacktrace.lower() == 'warning':
Logger.set_stacktrace(WARN)
elif flags.stacktrace.lower() == 'error':
Logger.set_stacktrace(FATAL)
Logger.silence(flags.silence == 1)
if flags.on_serving == 0:
builder = Build(flags.root,
auto_update_packages=flags.auto_update_packages==1,
use_package_management=flags.use_package_management==1,
silence=(flags.silence == 1),
rebuild=(flags.rebuild == 1))
code = 255
if builder.prepare() is False:
Logger.debug('prepare fail -> exit with code 255')
elif builder.analyze() is False:
Logger.debug('build fail -> exit with code 255')
elif builder.build() is False:
Logger.debug('build fail -> exit with code 255')
else:
code = 0
builder.release()
sys.exit(code)
else:
recepter = Serving(root=flags.root,
auto_update_packages=flags.auto_update_packages==1,
use_package_management=flags.use_package_management==1,
silence=(flags.silence == 1))
sys.exit(255 if recepter.error is True else 0)
| 34.076642
| 100
| 0.543108
| 6,157
| 0.65942
| 0
| 0
| 588
| 0.062975
| 0
| 0
| 1,938
| 0.207561
|
bb8aaea1d863f144dd7a710dd878ed727beb22e5
| 414
|
py
|
Python
|
label.py
|
dotrungkien/face_recognition
|
52c552c4f73850e62db88d0dc7271d73e4150180
|
[
"MIT"
] | null | null | null |
label.py
|
dotrungkien/face_recognition
|
52c552c4f73850e62db88d0dc7271d73e4150180
|
[
"MIT"
] | null | null | null |
label.py
|
dotrungkien/face_recognition
|
52c552c4f73850e62db88d0dc7271d73e4150180
|
[
"MIT"
] | null | null | null |
import cv2
import sys
import numpy as np
from scipy.io import loadmat
def convert():
labels = loadmat('tmp/data/devkit/cars_meta.mat')
car_labels = []
for label in labels['class_names'][0]:
car_labels.append(label[0])
labels_file = open("tmp/data/devkit/car_labels.txt", "w")
labels_file.write("\n".join(car_labels))
labels_file.close()
if __name__ == '__main__':
convert()
| 21.789474
| 61
| 0.673913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 93
| 0.224638
|
bb8abe691f8096772a0b83004f7d364cbc981a7d
| 154
|
py
|
Python
|
app/helper_functions.py
|
shane-kercheval/dash-ml-explorer
|
c1664d733e037b7e00b918b5bbdbd0b7cbdc3c2d
|
[
"Apache-2.0"
] | null | null | null |
app/helper_functions.py
|
shane-kercheval/dash-ml-explorer
|
c1664d733e037b7e00b918b5bbdbd0b7cbdc3c2d
|
[
"Apache-2.0"
] | null | null | null |
app/helper_functions.py
|
shane-kercheval/dash-ml-explorer
|
c1664d733e037b7e00b918b5bbdbd0b7cbdc3c2d
|
[
"Apache-2.0"
] | null | null | null |
import yaml
def read_yaml(file_name) -> dict:
with open(file_name, "r") as stream:
yaml_dict = yaml.safe_load(stream)
return yaml_dict
| 17.111111
| 42
| 0.675325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3
| 0.019481
|
bb8bea5e83277cccd81727fc97f2634b237f783d
| 707
|
py
|
Python
|
materialApoio.py
|
adriano-pacheco/atividades-mentorama
|
45d3d8b65994eb12aaaa025426e3a230693e25f4
|
[
"MIT"
] | null | null | null |
materialApoio.py
|
adriano-pacheco/atividades-mentorama
|
45d3d8b65994eb12aaaa025426e3a230693e25f4
|
[
"MIT"
] | null | null | null |
materialApoio.py
|
adriano-pacheco/atividades-mentorama
|
45d3d8b65994eb12aaaa025426e3a230693e25f4
|
[
"MIT"
] | null | null | null |
def titulo(txt): #criando linha com texto
print('_'*30)
print(txt)
print('_'*30)
def soma(a, b): #somando 2 numeros
s = a + b
print('~~'*15)
print(f'A soma A + B = {s}')
print('~~'*15)
def contador(*num): #inserindo varios numeros em uma tupla
tam = len(num)
print(f'Recebi os valores {num} e são ao todo {tam} números')
#isso cria uma tupla
def dobra(lst): #dobrando os valores de uma lista
pos = 0
while pos < len(lst):
lst[pos]*= 2
pos +=1
def soma2(*valores):#somando todos os valores de uma tupla
s = 0
for num in valores:
s+= num
print(f'Somando os valores {valores} temos {s}')
def lin():
print('~'*30)
| 23.566667
| 65
| 0.577086
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 306
| 0.431594
|
bb8d90bc55457ae6e3a765f4679f3b20738e394c
| 581
|
py
|
Python
|
leetcode/medium/single-number-ii.py
|
rainzhop/cumulus-tank
|
09ebc7858ea53630e30606945adfea856a80faa3
|
[
"MIT"
] | null | null | null |
leetcode/medium/single-number-ii.py
|
rainzhop/cumulus-tank
|
09ebc7858ea53630e30606945adfea856a80faa3
|
[
"MIT"
] | null | null | null |
leetcode/medium/single-number-ii.py
|
rainzhop/cumulus-tank
|
09ebc7858ea53630e30606945adfea856a80faa3
|
[
"MIT"
] | null | null | null |
# https://leetcode.com/problems/single-number-ii/
#
# Given an array of integers, every element appears three times except for one. Find that single one.
#
# Note:
# Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
d = {}
for i in nums:
d.setdefault(i, 0)
d[i] = d[i] + 1
if d[i] == 3:
d.pop(i)
return d.keys()[0]
| 27.666667
| 108
| 0.562823
| 306
| 0.526678
| 0
| 0
| 0
| 0
| 0
| 0
| 332
| 0.571429
|
bb8db06160aa8c394dde6ee5900fec9ece4ddde7
| 5,347
|
py
|
Python
|
wiki/test/test_wikisection.py
|
IgalMilman/DnDHelper
|
334822a489e7dc2b5ae17230e5c068b89c6c5d10
|
[
"MIT"
] | null | null | null |
wiki/test/test_wikisection.py
|
IgalMilman/DnDHelper
|
334822a489e7dc2b5ae17230e5c068b89c6c5d10
|
[
"MIT"
] | null | null | null |
wiki/test/test_wikisection.py
|
IgalMilman/DnDHelper
|
334822a489e7dc2b5ae17230e5c068b89c6c5d10
|
[
"MIT"
] | null | null | null |
import os
import uuid
from datetime import datetime
import mock
import pytz
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import reverse
from utils.widget import quill
from wiki.models import wikipage, wikisection
from wiki.models.wikipage import Keywords, WikiPage
from wiki.models.wikisection import WikiSection
class WikiSectionTestCase(TestCase):
def setUp(self):
self.firstUser = User(is_superuser=True, username='test1', password='test1', email='test1@example.com', first_name='testname1', last_name='testlast2')
self.secondUser = User(is_superuser=False, username='test2', password='test2', email='test2@example.com', first_name='testname2', last_name='testlast2')
self.firstUser.save()
self.secondUser.save()
self.wikiuuid1 = uuid.uuid4()
self.wikiuuid2 = uuid.uuid4()
self.wikisqtext = '{"ops":[{"insert":"123123\\n"}]}'
self.wikistext = 'text'
self.wikisuuid1 = uuid.uuid4()
self.wikisuuid2 = uuid.uuid4()
self.wikisuuid3 = uuid.uuid4()
self.wikipath = 'wiki'
self.createdtime = datetime.now(pytz.utc)
self.wikiPage1 = WikiPage(unid=self.wikiuuid1, createdon=self.createdtime, updatedon=self.createdtime, createdby=self.firstUser, updatedby=self.secondUser, title='testpage1')
self.wikiPage2 = WikiPage(unid=self.wikiuuid2, createdon=self.createdtime, updatedon=self.createdtime, createdby=self.firstUser, updatedby=self.secondUser, title='testpage2')
self.wikiPage1.save()
self.wikiPage2.save()
self.wikisection1 = WikiSection(unid=self.wikisuuid1, createdon=self.createdtime, updatedon=self.createdtime, createdby=self.firstUser, updatedby=self.secondUser, title='testsec1', pageorder=1, text=self.wikisqtext, wikipage=self.wikiPage1)
self.wikisection2 = WikiSection(unid=self.wikisuuid2, createdon=self.createdtime, updatedon=self.createdtime, createdby=None, updatedby=None, title='testsec2', pageorder=2, text=self.wikistext, wikipage=self.wikiPage1)
self.wikisection3 = WikiSection(unid=self.wikisuuid3, createdon=self.createdtime, updatedon=self.createdtime, createdby=self.firstUser, updatedby=self.secondUser, title='testsec3', pageorder=3, text=self.wikistext, wikipage=self.wikiPage1)
self.wikisection1.save()
self.wikisection1.createdon=self.createdtime
self.wikisection1.updatedon=self.createdtime
self.wikisection1.save()
self.wikisection2.save()
self.wikisection3.save()
def test_wiki_section_get_files_folder(self):
settings.WIKI_SECTION_FILES = self.wikipath
os.makedirs = mock.Mock(return_value=None, spec='os.makedirs')
os.path.exists = mock.Mock(return_value=False, spec='os.path.exists')
self.assertEqual(self.wikisection1.get_files_folder(), os.path.join(self.wikipath, str(self.wikisuuid1)))
os.path.exists.assert_called_once_with(os.path.join(self.wikipath, str(self.wikisuuid1)))
os.makedirs.assert_called_once()
def test_wiki_section_generate_link(self):
wikisection.reverse = mock.Mock(return_value=self.wikipath, spec='django.urls.reverse')
self.assertEqual(self.wikisection1.generate_link(), self.wikipath)
wikisection.reverse.assert_called_once_with('wiki_page', kwargs={'wikipageuuid': self.wikiPage1.unid})
def test_wiki_section_get_link(self):
wikisection.reverse = mock.Mock(return_value=self.wikipath, spec='django.urls.reverse')
self.assertEqual(self.wikisection1.get_link(), self.wikipath)
wikisection.reverse.assert_called_once_with('wiki_page', kwargs={'wikipageuuid': self.wikiPage1.unid})
def test_wiki_section_createtime(self):
self.assertEqual(self.wikisection1.createtime(), self.createdtime.astimezone(pytz.timezone('America/New_York')))
def test_wiki_section_updatetime(self):
self.assertEqual(self.wikisection1.updatetime(), self.createdtime.astimezone(pytz.timezone('America/New_York')))
def test_wiki_section_createuser(self):
self.assertEqual(self.wikisection1.createuser(), self.firstUser.get_full_name())
def test_wiki_section_updateuser(self):
self.assertEqual(self.wikisection1.updateuser(), self.secondUser.get_full_name())
def test_wiki_section_createuser_none(self):
self.assertIsNone(self.wikisection2.createuser())
def test_wiki_section_updateuser_none(self):
self.assertIsNone(self.wikisection2.updateuser())
def test_wiki_section_str(self):
self.assertEqual(str(self.wikisection1), 'Wiki section: testsec1. UNID: ' + str(self.wikisuuid1))
def test_wiki_section_is_quil_content_true(self):
self.assertTrue(self.wikisection1.is_quill_content())
def test_wiki_section_is_quil_content_false(self):
self.assertTrue(self.wikisection2.is_quill_content())
def test_wiki_section_get_quill_content(self):
self.assertEqual(self.wikisection1.get_quill_content(), quill.get_quill_text(self.wikisqtext))
def test_wiki_page_get_sections_number_3(self):
self.assertEqual(len(self.wikiPage1.wikisection_set.all()), 3)
def test_wiki_page_get_sections_number_0(self):
self.assertEqual(len(self.wikiPage2.wikisection_set.all()), 0)
| 53.47
| 248
| 0.747522
| 4,948
| 0.925379
| 0
| 0
| 0
| 0
| 0
| 0
| 397
| 0.074247
|
bb8dd51539cdab9536825a57382500f121007993
| 524
|
py
|
Python
|
api/applications/migrations/0046_partyonapplication_flags.py
|
uktrade/lite-ap
|
4e1a57956bd921992b4a6e2b8fbacbba5720960d
|
[
"MIT"
] | 3
|
2019-05-15T09:30:39.000Z
|
2020-04-22T16:14:23.000Z
|
api/applications/migrations/0046_partyonapplication_flags.py
|
uktrade/lite-ap
|
4e1a57956bd921992b4a6e2b8fbacbba5720960d
|
[
"MIT"
] | 85
|
2019-04-24T10:39:35.000Z
|
2022-03-21T14:52:12.000Z
|
api/applications/migrations/0046_partyonapplication_flags.py
|
uktrade/lite-ap
|
4e1a57956bd921992b4a6e2b8fbacbba5720960d
|
[
"MIT"
] | 1
|
2021-01-17T11:12:19.000Z
|
2021-01-17T11:12:19.000Z
|
# Generated by Django 2.2.17 on 2021-01-27 14:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("flags", "0010_flaggingrule_excluded_values"),
("applications", "0045_remove_denialmatchonapplication_denial"),
]
operations = [
migrations.AddField(
model_name="partyonapplication",
name="flags",
field=models.ManyToManyField(related_name="parties_on_application", to="flags.Flag"),
),
]
| 26.2
| 97
| 0.65458
| 430
| 0.820611
| 0
| 0
| 0
| 0
| 0
| 0
| 212
| 0.40458
|
bb8e03933f18743e4789f0bc3df9d4b4ca88a87c
| 2,205
|
py
|
Python
|
Shivarth_Project(2).py
|
rodincode/python
|
5bcc53b6103e53b37a3e40635502cbca53fec43e
|
[
"MIT"
] | 1
|
2021-02-11T04:42:28.000Z
|
2021-02-11T04:42:28.000Z
|
Shivarth_Project(2).py
|
rodincode/python
|
5bcc53b6103e53b37a3e40635502cbca53fec43e
|
[
"MIT"
] | null | null | null |
Shivarth_Project(2).py
|
rodincode/python
|
5bcc53b6103e53b37a3e40635502cbca53fec43e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 20 13:26:46 2019
@author: LENOVO
"""
import pandas as pd
filename = r"C:\Users\LENOVO\Downloads\Tweets.csv"
df = pd.read_csv(filename,encoding="unicode_escape")
all_data = df.drop_duplicates(keep='first', inplace=False)
cleaned_data = all_data.dropna()
sentences = cleaned_data['text']
y = cleaned_data['airline_sentiment']
numerical_outcomes=y.replace(["positive","negative","neutral"],[1,0,2])
import nltk
nltk.download('stopwords')
from nltk.corpus import stopwords
eng_stops = set(stopwords.words('english'))
# Create word tokens
def removing_stop_words(sentences):
no_stops=[]
for word in sentences:
if word not in eng_stops:
new_sentences=no_stops.append(word)
new_sentences=removing_stop_words(sentences)
from sklearn.model_selection import train_test_split # imports module from package
x_train, x_test, y_train, y_test = train_test_split(new_sentences, y, test_size=0.25, random_state=1000)
from sklearn.feature_extraction.text import CountVectorizer
#from io import StringIO
vectorizer = CountVectorizer()
vectorizer.fit(x_train)
#docs_new_train = [ StringIO.StringIO(x) for x in x_train]
#docs_new_test = [ StringIO.StringIO(x) for x in x_test]
X_train = vectorizer.transform(x_train)
X_test = vectorizer.transform(x_test)
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression()
classifier.fit(X_train, y_train)
score = classifier.score(X_test, y_test)
print("\n Accuracy:", score) #model accuracy
#
#########################################################
###Predicting the sentiment of user input
#########################################################
txt = input("Enter expression: ")
test_sentences = [txt]
test_bag = vectorizer.transform(test_sentences)
result_label = classifier.predict(test_bag) #predicting the class
result_score = classifier.predict_proba(test_bag) #Probobilities of belonging to both classes
#
if result_label==1:
print("Positive", result_score)
else:
print("Negative", result_score)
| 22.05
| 105
| 0.676644
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 714
| 0.32381
|
bb8fa677b509d4b926b1c8e7fd1bc0528332c98d
| 909
|
py
|
Python
|
pinax/lms/activities/migrations/0007_migrate.py
|
pinax/pinax-lms-activities
|
e73109038e1e0a8c71cc52f278e03bf645f3a16a
|
[
"MIT"
] | 10
|
2015-03-04T01:37:02.000Z
|
2019-06-04T04:59:44.000Z
|
pinax/lms/activities/migrations/0007_migrate.py
|
pinax/pinax-lms-activities
|
e73109038e1e0a8c71cc52f278e03bf645f3a16a
|
[
"MIT"
] | 8
|
2016-01-16T14:58:16.000Z
|
2020-06-22T20:30:14.000Z
|
pinax/lms/activities/migrations/0007_migrate.py
|
pinax/pinax-lms-activities
|
e73109038e1e0a8c71cc52f278e03bf645f3a16a
|
[
"MIT"
] | 4
|
2015-09-18T02:04:39.000Z
|
2020-10-14T20:10:57.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
if not schema_editor.connection.alias == "default":
return
ActivityState = apps.get_model("pinax_lms_activities", "ActivityState")
ActivitySessionState = apps.get_model("pinax_lms_activities", "ActivitySessionState")
for activity_session_state in ActivitySessionState.objects.all():
activity_state = ActivityState.objects.get(
user=activity_session_state.user,
activity_key=activity_session_state.activity_key,
)
activity_session_state.activity_state = activity_state
activity_session_state.save()
class Migration(migrations.Migration):
dependencies = [
("pinax_lms_activities", "0006_auto_20160206_2029"),
]
operations = [
migrations.RunPython(forwards),
]
| 30.3
| 89
| 0.716172
| 193
| 0.212321
| 0
| 0
| 0
| 0
| 0
| 0
| 160
| 0.176018
|
bb929e575f401e612607d5e41db450d40dd8a099
| 2,430
|
py
|
Python
|
django-ingredient-field/dj_ingredient_field/widgets.py
|
makspll/django-ingredient-field
|
8f2067d1546a45d0c8b9c5dc551571b1f5ccf7b2
|
[
"MIT"
] | null | null | null |
django-ingredient-field/dj_ingredient_field/widgets.py
|
makspll/django-ingredient-field
|
8f2067d1546a45d0c8b9c5dc551571b1f5ccf7b2
|
[
"MIT"
] | 10
|
2021-08-29T11:21:29.000Z
|
2021-09-14T17:09:45.000Z
|
django-ingredient-field/dj_ingredient_field/widgets.py
|
makspll/django-ingredient-field
|
8f2067d1546a45d0c8b9c5dc551571b1f5ccf7b2
|
[
"MIT"
] | null | null | null |
"""
The list of ingredient choices is quite large, so this module provides lazy loaded widgets which help
reducing loading times.
"""
from django import forms
class LazyWidget():
"""
Base class for lazy loaded, choice based widgets.
offloads option generation to the client, relies on correct endpoint setup.
"""
template_name="dj_ingredient_field/lazy_choice.html"
def __init__(self, lazy_endpoint, attrs=None):
""" Creates a new LazyWidget
Args:
lazy_endpoint (str): the relative or absolute url which retrieves json choices (corresponding to the underlying choices) in the following format: { values: [...[<db value>, <user readable value>]...]}
attrs: the attributes to pass down to the widget html. Defaults to None.
"""
# lazy endpoint should provide all of the option values as json
self.lazy_endpoint = lazy_endpoint
if attrs == None:
attrs = {}
attrs.update({
"lazy_url": lazy_endpoint
})
super().__init__(attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['value'] = value
context['widget']['checked_attribute'] = list(self.checked_attribute.keys())[0]
context['widget']['allow_multiple_selected'] = self.allow_multiple_selected
return context
class Media:
js = ('dj_ingredient_field/js/lazy_dropdown.js',)
class LazySelectWidget(LazyWidget,forms.widgets.Select):
"""
Single option selection widget corresponding exactly to `Select` from django widgets.
Example usage (requires dj_ingredient_field.urls to be included into your urls somewhere)::
from django import forms
from dj_ingredient_field.widgets import LazySelectWidget
from django.urls import reverse_lazy
class IngredientQuantityAdminForm(forms.ModelForm):
class Meta:
model = IngredientQuantity
widgets = {
'ingredient': LazySelectWidget(reverse_lazy('dj_ingredient_field:ingredients'))
}
fields = '__all__' # required for Django 3.x
"""
pass
class LazySelectMultipleWidget(LazyWidget,forms.widgets.SelectMultiple):
"""
WIP
"""
pass
| 33.287671
| 216
| 0.635802
| 2,263
| 0.931276
| 0
| 0
| 0
| 0
| 0
| 0
| 1,619
| 0.666255
|
bb93be6fbacaf91fef33d78741e67ae984dd8a0a
| 6,830
|
py
|
Python
|
pycap/ethernet.py
|
Blueswing/pycap
|
19e579ec0c362939f1c7ebe87773e24e36ccdec2
|
[
"MIT"
] | null | null | null |
pycap/ethernet.py
|
Blueswing/pycap
|
19e579ec0c362939f1c7ebe87773e24e36ccdec2
|
[
"MIT"
] | null | null | null |
pycap/ethernet.py
|
Blueswing/pycap
|
19e579ec0c362939f1c7ebe87773e24e36ccdec2
|
[
"MIT"
] | null | null | null |
import struct
import subprocess
from abc import ABCMeta
from functools import lru_cache
from typing import Union, Tuple, Optional
from .base import Header, Protocol
from .constants import *
ETH_TYPE_IP = 0x0800
ETH_TYPE_ARP = 0x0806
ETH_TYPE_RARP = 0x8035
ETH_TYPE_SNMP = 0x814c
ETH_TYPE_IPV6 = 0x086dd
ETH_TYPE_MPLS_UNICAST = 0x8847
ETH_TYPE_MPLS_MULTICAST = 0x8848
ETH_TYPE_PPPOE_DISCOVERY = 0x8864
ETH_TYPE_PPPOE_SESSION = 0x8864
_ETH_TYPE_MAP = {
ETH_TYPE_IP: PROTOCOL_IP,
ETH_TYPE_ARP: PROTOCOL_ARP,
ETH_TYPE_RARP: PROTOCOL_RARP,
ETH_TYPE_SNMP: PROTOCOL_SNMP,
ETH_TYPE_IPV6: PROTOCOL_IPV6,
ETH_TYPE_MPLS_UNICAST: PROTOCOL_MPLS,
ETH_TYPE_MPLS_MULTICAST: PROTOCOL_MPLS,
ETH_TYPE_PPPOE_DISCOVERY: PROTOCOL_PPPOE,
ETH_TYPE_PPPOE_SESSION: PROTOCOL_PPPOE
}
ETH_P_ALL = 0x3 # capture all ethernet types
ETH_P_NOT_SET = 0x0 # only receive
_ETH_II_FMT = '>BBBBBBBBBBBBH'
_ETH_802_3_FMT = '>BBBBBBBBBBBBHL'
"""
This packet structure describes the pseudo-header added by Linux system.
+---------------------------+
| Packet type |
| (2 Octets) |
+---------------------------+
| ARPHRD_ type |
| (2 Octets) |
+---------------------------+
| Link-layer address length |
| (2 Octets) |
+---------------------------+
| Link-layer address |
| (8 Octets) |
+---------------------------+
| Protocol type |
| (2 Octets) |
+---------------------------+
The packet type field is in network byte order (big-endian); it contains a value that is one of:
0, if the packet was specifically sent to us by somebody else;
1, if the packet was broadcast by somebody else;
2, if the packet was multicast, but not broadcast, by somebody else;
3, if the packet was sent to somebody else by somebody else;
4, if the packet was sent by us.
reference:
https://www.tcpdump.org/linktypes/LINKTYPE_LINUX_SLL.html
"""
_LINK_LAYER_PACKET_TYPE_MAP = {
0x0: 'unicast to us',
0x1: 'boardcast to us',
0x2: 'multicast to us',
0x3: 'not sent to us',
0x4: 'sent by us'
}
_interfaces = None
def get_interface_names():
global _interfaces
if _interfaces is None:
import os
_interfaces = os.listdir('/sys/class/net')
return _interfaces
class MACAddress:
def __init__(self, mac: Union[int, bytes, str]):
if isinstance(mac, str):
self._mac_s = mac
tmp = mac.split(':')
if len(tmp) != 6:
raise Exception('invalid mac address')
mac_i = 0
for x in tmp:
mac_i <<= 8
mac_i += int(x, 16)
self._mac_i = mac_i
self._mac_b = self._mac_i.to_bytes(6, BYTE_ORDER_NET)
elif isinstance(mac, bytes):
self._mac_b = mac[:6]
self._mac_i = int.from_bytes(self._mac_b, BYTE_ORDER_NET)
self._mac_s = ':'.join('{:02x}'.format(a) for a in self._mac_b)
else:
self._mac_i = mac
self._mac_b = mac.to_bytes(6, BYTE_ORDER_NET)
self._mac_s = ':'.join('{:02x}'.format(a) for a in self._mac_b)
def as_int(self):
return self._mac_i
def as_bytes(self):
return self._mac_b
def as_str(self):
return self._mac_s
def __str__(self):
return f'MACAddress(\'{self._mac_s}\')'
def __repr__(self):
return self.__str__()
@lru_cache(10)
def get_mac_address(interface_name) -> MACAddress:
res = subprocess.getoutput(f'cat /sys/class/net/{interface_name}/address')
if len(res.split(':')) != 6:
raise Exception('MAC address not found')
return MACAddress(res)
def describe_eth_type(eth_type: int):
if eth_type in _ETH_TYPE_MAP:
return _ETH_TYPE_MAP[eth_type]
return f'Unknown {eth_type}'
def describe_packet_type(packet_type: int):
if packet_type in _LINK_LAYER_PACKET_TYPE_MAP:
return _LINK_LAYER_PACKET_TYPE_MAP[packet_type]
return f'Unknown {packet_type}'
class EthernetPacketInfo(Header):
def __init__(self):
self.net_if = ''
self.protocol = 0
self.src_mac = 0
self.packet_type = 0
self.address_type = 0
def describe(self) -> dict:
return {
'network_interface': self.net_if,
'protocol': describe_eth_type(self.protocol),
'src_mac': MACAddress(self.src_mac),
'packet_type': describe_packet_type(self.packet_type),
'address_type': self.address_type
}
def parse_ethernet_packet_info(raw_data):
net_if, proto, packet_type, address_type, mac = raw_data
obj = EthernetPacketInfo()
obj.net_if = net_if
obj.protocol = proto
obj.src_mac = int.from_bytes(mac, BYTE_ORDER_NET)
obj.packet_type = packet_type
obj.address_type = address_type
return obj
class EthernetHeader(Header, metaclass=ABCMeta):
def __init__(self, dst_mac, src_mac):
self.dst_mac = dst_mac
self.src_mac = src_mac
class EthernetIIHeader(EthernetHeader):
def __init__(self, dst_mac, src_mac):
super().__init__(dst_mac, src_mac)
self.eth_type = 0
@property
def upper_layer_protocol(self) -> Optional[str]:
return describe_eth_type(self.eth_type)
def describe(self) -> dict:
return {
'src_mac': MACAddress(self.src_mac),
'dst_mac': MACAddress(self.dst_mac),
'eth_type': describe_eth_type(self.eth_type)
}
class Ethernet802_3Header(EthernetHeader):
def __init__(self, dst_mac, src_mac):
super().__init__(dst_mac, src_mac)
self.length = 0
self.llc = 0
self.snap = 0
def describe(self) -> dict:
return {}
class Ethernet(Protocol):
def unpack_data(self, data: bytes) -> Tuple[Union[EthernetIIHeader, Ethernet802_3Header], bytes]:
"""
Ethernet II header, RFC 894
6 bytes destination MAC address
6 bytes source MAC address
2 bytes Ethernet type
46 ~ 1500 bytes payload
Ethernet 802.3 header, RFC 1042, IEEE 802
6 bytes destination MAC address
6 bytes source MAC address
2 bytes length
3 bytes LLC
5 bytes SNAP
38 ~ 1492 bytes payload
"""
header, payload = data[:14], data[14:]
res = struct.unpack(_ETH_II_FMT, header)
dst_mac = int.from_bytes(res[:6], BYTE_ORDER_NET)
src_mac = int.from_bytes(res[6:12], BYTE_ORDER_NET)
if res[12] > 1500:
hdr = EthernetIIHeader(dst_mac, src_mac)
hdr.eth_type = res[12]
else:
hdr = Ethernet802_3Header(dst_mac, src_mac)
# todo
return hdr, payload
| 28.22314
| 101
| 0.615959
| 3,570
| 0.522694
| 0
| 0
| 363
| 0.053148
| 0
| 0
| 1,938
| 0.283748
|
bb949682762af25e0d9e85b1dcc96637afca8f13
| 60
|
py
|
Python
|
doc/example.py
|
jtpio/jupyterlab-python-bytecode
|
23c3d4e228829743b65685d8796cf1bc95fd88ba
|
[
"BSD-3-Clause"
] | 58
|
2018-09-30T00:08:57.000Z
|
2022-01-15T20:54:02.000Z
|
doc/example.py
|
jtpio/jupyterlab-python-bytecode
|
23c3d4e228829743b65685d8796cf1bc95fd88ba
|
[
"BSD-3-Clause"
] | 7
|
2018-09-30T09:55:32.000Z
|
2021-04-20T09:50:36.000Z
|
doc/example.py
|
jtpio/jupyterlab-python-bytecode
|
23c3d4e228829743b65685d8796cf1bc95fd88ba
|
[
"BSD-3-Clause"
] | 4
|
2019-03-03T07:18:50.000Z
|
2022-02-27T01:19:12.000Z
|
for i in range(10):
pass
j = 0
while j < 10:
j += 1
| 10
| 19
| 0.483333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bb94c39fa96235061eee688defe888eb7cca4d44
| 238
|
py
|
Python
|
utils/schedulers.py
|
Shawn-Guo-CN/EmergentNumerals
|
ef9786e5bd6c8c456143ad305742340e510f5edb
|
[
"MIT"
] | 2
|
2019-08-16T21:37:55.000Z
|
2019-08-18T18:11:28.000Z
|
utils/schedulers.py
|
Shawn-Guo-CN/EmergentNumerals
|
ef9786e5bd6c8c456143ad305742340e510f5edb
|
[
"MIT"
] | null | null | null |
utils/schedulers.py
|
Shawn-Guo-CN/EmergentNumerals
|
ef9786e5bd6c8c456143ad305742340e510f5edb
|
[
"MIT"
] | null | null | null |
from utils.conf import args
def tau_scheduler(acc):
return args.tau
# if acc > 0.95:
# return 0.1
# elif acc > 0.9:
# return 0.5
# elif acc > 0.8:
# return 1.0
# else:
# return args.tau
| 19.833333
| 27
| 0.512605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 126
| 0.529412
|
bb94df94114e25069a2e0c94c2a3c7a8d9c17d5d
| 408
|
py
|
Python
|
pymongo_cursor_json_encoder/__init__.py
|
rogervila/python-mongo-json-encoder
|
63dfc9bbe6702c6bacca8b6aa04b17a6f6fbd918
|
[
"MIT"
] | 1
|
2021-01-14T16:08:45.000Z
|
2021-01-14T16:08:45.000Z
|
pymongo_cursor_json_encoder/__init__.py
|
rogervila/python-mongo-json-encoder
|
63dfc9bbe6702c6bacca8b6aa04b17a6f6fbd918
|
[
"MIT"
] | null | null | null |
pymongo_cursor_json_encoder/__init__.py
|
rogervila/python-mongo-json-encoder
|
63dfc9bbe6702c6bacca8b6aa04b17a6f6fbd918
|
[
"MIT"
] | null | null | null |
from datetime import datetime, date
from isodate import datetime_isoformat
from bson import ObjectId
from json import JSONEncoder
class MongoJSONEncoder(JSONEncoder):
def default(self, output):
if isinstance(output, (datetime, date)):
return datetime_isoformat(output)
if isinstance(output, ObjectId):
return str(output)
return super().default(output)
| 25.5
| 48
| 0.708333
| 275
| 0.67402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bb965bb20b20b11f20bb0fdf749f18c3050f9707
| 893
|
py
|
Python
|
Intermediate+/57/notes/server.py
|
Matthew1906/100DaysOfPython
|
94ffff8f5535ce5d574f49c0d7971d64a4575aad
|
[
"MIT"
] | 1
|
2021-12-25T02:19:18.000Z
|
2021-12-25T02:19:18.000Z
|
Intermediate+/57/notes/server.py
|
Matthew1906/100DaysOfPython
|
94ffff8f5535ce5d574f49c0d7971d64a4575aad
|
[
"MIT"
] | null | null | null |
Intermediate+/57/notes/server.py
|
Matthew1906/100DaysOfPython
|
94ffff8f5535ce5d574f49c0d7971d64a4575aad
|
[
"MIT"
] | 1
|
2021-11-25T10:31:47.000Z
|
2021-11-25T10:31:47.000Z
|
from flask import Flask, render_template
import random, datetime as dt, requests
app = Flask(__name__)
# Jinja = templating language
@app.route('/')
def home():
random_number = random.randint(1,3)
return render_template("index.html", random_number = random_number, year = dt.datetime.now().year)
@app.route('/guess/<name>')
def guess(name):
gender_response = requests.get('https://api.genderize.io', params = {'name':name}).json()['gender']
age_response = requests.get('https://api.agify.io', params = {'name':name}).json()['age']
return render_template("guess.html", name = name ,gender = gender_response, age = age_response)
@app.route('/blog/<num>')
def blog(num):
blogs = requests.get('https://api.npoint.io/7bce33b15a477a7a6c81').json()
return render_template("blog.html", blogs = blogs, idx = int(num))
if __name__ == '__main__':
app.run(debug=True)
| 37.208333
| 103
| 0.693169
| 0
| 0
| 0
| 0
| 701
| 0.784994
| 0
| 0
| 222
| 0.2486
|
bb9676589fb7e0a374aed04eb4cfbe0922559c82
| 3,041
|
py
|
Python
|
game.py
|
MrEliptik/game_of_life
|
e0ff937ac1cf1a879e20c109a69700c77db71fcc
|
[
"MIT"
] | null | null | null |
game.py
|
MrEliptik/game_of_life
|
e0ff937ac1cf1a879e20c109a69700c77db71fcc
|
[
"MIT"
] | null | null | null |
game.py
|
MrEliptik/game_of_life
|
e0ff937ac1cf1a879e20c109a69700c77db71fcc
|
[
"MIT"
] | null | null | null |
import pygame
import random
import time
import numpy as np
WHITE = 255, 255, 255
BLACK = 0, 0, 0
size = width, height = 480, 320
row = 32
col = 48
cell_width = (width//col)
cell_height = (height//row)
font_size = 60
FPS = 30
LIVE_P_MAX = 0.5;
LIVE_P_MIN = 0.01;
_grid = np.full((row, col), None)
screen = None
refresh_start_time = 0
def init_screen():
pygame.init()
screen = pygame.display.set_mode(size, pygame.FULLSCREEN)
screen.fill(BLACK)
return screen
def refresh():
pygame.display.update()
def display(grid):
screen.fill(BLACK)
for i in range(row):
for j in range(col):
if grid[i][j] == 1:
# left, top, width, height
pygame.draw.rect(screen, WHITE, (j*cell_width, i*cell_height, cell_width, cell_height), False)
refresh()
def random_init_grid(grid):
for i in range(row):
for j in range(col):
p = random.random() * (LIVE_P_MAX - LIVE_P_MIN) + LIVE_P_MIN
if(random.random() < p): grid[i][j] = 1
else: grid[i][j] = None
def get_cell(grid, cell):
val = None
try:
val = grid[cell[0]][cell[1]]
except:
val = None
return val
def get_neighbors(grid, cell):
x, y = cell
return (get_cell(grid, (x, y-1)), get_cell(grid, (x-1, y-1)),
get_cell(grid, (x-1, y)), get_cell(grid, (x-1, y+1)),
get_cell(grid, (x, y+1)), get_cell(grid, (x+1, y+1)),
get_cell(grid, (x+1, y)), get_cell(grid, (x+1, y-1)))
def get_living_neighbors(neighbors):
living_count = 0
for neighbor in neighbors:
if neighbor == 1: living_count += 1
return living_count
def update_grid(grid):
new_grid = np.full((row, col), None)
for i in range(row):
for j in range(col):
neighbors = get_neighbors(grid, (i,j))
living = get_living_neighbors(neighbors)
# Any live cell with three live neighbors survivesl
if ((living == 2 or living == 3) and grid[i][j] == 1): new_grid[i][j] = 1
# Any dead cell with three live neighbors becomes a live cell
if (living == 3 and grid[i][j] == None): new_grid[i][j] = 1
# All others cells are dead (tmp is initialized at 0)
return new_grid
if __name__ == "__main__":
refresh_start_time = time.time()
running = True
inpt = "y"
screen = init_screen()
random_init_grid(_grid)
display(_grid)
while(running):
start = time.time()
if ((time.time() - refresh_start_time) > 60):
random_init_grid(_grid)
display(_grid)
refresh_start_time = time.time()
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
elif event.type == pygame.MOUSEBUTTONUP:
random_init_grid(_grid)
display(_grid)
# Copy new grid
_grid[:] = update_grid(_grid)
display(_grid)
while(time.time() - start < (1/FPS)):
pass
| 25.771186
| 110
| 0.577442
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 219
| 0.072016
|
bb9746ac2f24608c4e049924c7fcb26f2cfddb65
| 794
|
py
|
Python
|
azext_iot/monitor/models/target.py
|
lucadruda/azure-iot-cli-extension
|
9d2f677d19580f8fbac860e079550167e743a237
|
[
"MIT"
] | 79
|
2017-09-25T19:29:17.000Z
|
2022-03-30T20:55:57.000Z
|
azext_iot/monitor/models/target.py
|
lucadruda/azure-iot-cli-extension
|
9d2f677d19580f8fbac860e079550167e743a237
|
[
"MIT"
] | 305
|
2018-01-17T01:12:10.000Z
|
2022-03-23T22:38:11.000Z
|
azext_iot/monitor/models/target.py
|
lucadruda/azure-iot-cli-extension
|
9d2f677d19580f8fbac860e079550167e743a237
|
[
"MIT"
] | 69
|
2017-11-14T00:30:46.000Z
|
2022-03-01T17:11:45.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
class Target:
def __init__(
self,
hostname: str,
path: str,
partitions: list,
auth, # : uamqp.authentication.SASTokenAsync,
):
self.hostname = hostname
self.path = path
self.auth = auth
self.partitions = partitions
self.consumer_group = None
def add_consumer_group(self, consumer_group: str):
self.consumer_group = consumer_group
| 33.083333
| 94
| 0.492443
| 431
| 0.542821
| 0
| 0
| 0
| 0
| 0
| 0
| 394
| 0.496222
|
bb9985f1334655b6b9bebcdea894cb35e74ef811
| 8,551
|
py
|
Python
|
py2dm/_parser/_pyparser.py
|
leonhard-s/Py2DM
|
a2c4c193dfa4494f2c9117f580f99f0dbdc579fc
|
[
"MIT"
] | 6
|
2021-01-28T10:59:21.000Z
|
2022-03-30T08:00:06.000Z
|
py2dm/_parser/_pyparser.py
|
leonhard-s/Py2DM
|
a2c4c193dfa4494f2c9117f580f99f0dbdc579fc
|
[
"MIT"
] | 7
|
2020-10-28T13:01:13.000Z
|
2022-03-08T19:21:05.000Z
|
py2dm/_parser/_pyparser.py
|
leonhard-s/Py2DM
|
a2c4c193dfa4494f2c9117f580f99f0dbdc579fc
|
[
"MIT"
] | null | null | null |
"""Python implementation of the 2DM card parser."""
from typing import IO, List, Optional, Tuple, Union
from ..errors import CardError, FormatError, ReadError
_MetadataArgs = Tuple[
int, # num_nodes
int, # num_elements
int, # num_node_strings
Optional[str], # name
Optional[int], # num_materials_per_elem
int, # nodes start
int, # elements start
int] # node strings start
_ELEMENT_CARDS = [
'E2L',
'E3L',
'E3T',
'E4Q',
'E6T',
'E8Q',
'E9Q'
]
def parse_element(line: str, allow_float_matid: bool = True,
allow_zero_index: bool = False
) -> Tuple[int, Tuple[int, ...], Tuple[Union[int, float], ...]]:
"""Parse a string into an element.
This converts a valid element definition string into a tuple that
can be used to instantiate the corresponding
:class:`py2dm.Element` subclass.
"""
# Parse line
chunks = line.split('#', maxsplit=1)[0].split()
# Length (generic)
if len(chunks) < 4:
raise CardError('Element definitions require at least 3 fields '
f'(id, node_1, node_2), got {len(chunks)-1}')
# 2DM card
card = chunks[0]
if not _card_is_element(card):
raise CardError(f'Invalid element card "{card}"')
# Length (card known)
num_nodes = _nodes_per_element(card)
assert num_nodes > 0
if len(chunks) < num_nodes + 2:
raise CardError(
f'{card} element definition requires at least {num_nodes-1} '
f'fields (id, node_1, ..., node_{num_nodes-1}), got {len(chunks)-1}')
# Element ID
id_ = int(chunks[1])
if id_ <= 0 and not (id_ == 0 and allow_zero_index):
raise FormatError(f'Invalid element ID: {id_}')
# Node IDs
nodes: List[int] = []
for node_str in chunks[2:num_nodes+2]:
node_id = int(node_str)
if node_id < 0 and not (node_id == 0 and allow_zero_index):
raise FormatError(f'Invalid node ID: {node_id}')
nodes.append(node_id)
# Material IDs
materials: List[Union[int, float]] = []
for mat_str in chunks[num_nodes+2:]:
mat_id: Union[int, float]
try:
mat_id = int(mat_str)
except ValueError as err:
if not allow_float_matid:
raise err from err
mat_id = float(mat_str)
materials.append(mat_id)
return id_, tuple(nodes), tuple(materials)
def parse_node(line: str, allow_zero_index: bool = False
) -> Tuple[int, float, float, float]:
"""Parse a string into a node.
This converts a valid node definition string into a tuple that can
be used to isntantiate the corresponding :class:`py2dm.Node`
object.
"""
# Parse line
chunks = line.split('#', maxsplit=1)[0].split()
# Length
if len(chunks) < 5:
raise CardError(f'Node definitions require at least 4 fields '
f'(id, x, y, z), got {len(chunks)-1}')
# 2DM card
card = chunks[0]
if card != "ND":
raise CardError(f'Invalid node card "{card}"')
# Node ID
id_ = int(chunks[1])
if id_ <= 0 and not (id_ == 0 and allow_zero_index):
raise FormatError(f'Invalid node ID: {id_}')
# Coordinates
pos_x, pos_y, pos_z = tuple((float(s) for s in chunks[2:5]))
# TODO: Warn about unused fields
return id_, pos_x, pos_y, pos_z
def parse_node_string(line: str, allow_zero_index: bool = False,
nodes: Optional[List[int]] = None
) -> Tuple[List[int], bool, str]:
"""Parse a string into a node string.
This converts a valid node string definition string into a tuple
that can be used to instantiate the corresponding
:class:`py2dm.NodeString`.
As nodestring can span multiple lines, the node string should only
be created once the `done` flag (second entry in the returned
tuple) is set to True.
"""
# Set default value
if nodes is None:
nodes = []
# Parse line
chunks = line.split('#', maxsplit=1)[0].split()
# Length
if len(chunks) < 2:
raise CardError('Node string definitions require at least 1 field '
f'(node_id), got {len(chunks)-1}')
# 2DM card
card = chunks[0]
if card != 'NS':
raise CardError(f'Invalid node string card "{card}"')
# Node IDs
is_done: bool = False
name = ''
for index, node_str in enumerate(chunks[1:]):
node_id = int(node_str)
if node_id == 0 and not allow_zero_index:
raise FormatError(f'Invalid node ID: {node_id}')
if node_id < 0:
# End of node string
is_done = True
nodes.append(abs(node_id))
# Check final identifier
if index+2 < len(chunks):
name = chunks[index+2]
break
nodes.append(node_id)
return nodes, is_done, name
def scan_metadata(file_: IO[str], filename: str,
allow_zero_index: bool = False) -> _MetadataArgs:
num_materials_per_elem: Optional[int] = None
name: Optional[str] = None
num_nodes = 0
num_elements = 0
num_node_strings = 0
mesh2d_found: bool = False
# Consecutive numbering validation
last_node = -1
last_element = -1
# File seek offsets
nodes_start = 0
elements_start = 0
node_strings_start = 0
file_.seek(0)
for index, line_raw in enumerate(iter(file_.readline, '')):
# Skip blank lines
line = line_raw.split('#', maxsplit=1)[0].strip()
if not line:
continue
if not mesh2d_found:
if line.startswith('MESH2D'):
mesh2d_found = True
else:
raise ReadError(
'File is not a 2DM mesh file', filename)
if line.startswith('ND'):
id_ = int(line.split(maxsplit=2)[1])
if id_ == 0 and not allow_zero_index:
raise FormatError(
'Zero index encountered in non-zero-indexed file',
filename, index+1)
num_nodes += 1
if last_node != -1 and last_node+1 != id_:
raise FormatError('Node IDs have holes',
filename, index+1)
last_node = id_
if nodes_start == 0:
nodes_start = file_.tell() - len(line_raw) - 1
continue
if line.split(maxsplit=1)[0] in _ELEMENT_CARDS:
id_ = int(line.split(maxsplit=2)[1])
if id_ == 0 and not allow_zero_index:
raise FormatError(
'Zero index encountered in non-zero-indexed file',
filename, index+1)
num_elements += 1
if last_element != -1 and last_element+1 != id_:
raise FormatError('Element IDs have holes',
filename, index+1)
last_element = id_
if elements_start == 0:
elements_start = file_.tell() - len(line_raw) - 1
continue
if (line.startswith('NS')
and '-' in line.split('#', maxsplit=1)[0]):
num_node_strings += 1
if node_strings_start == 0:
node_strings_start = file_.tell() - len(line_raw) - 1
elif line.startswith('MESHNAME') or line.startswith('GM'):
# NOTE: This fails for meshes with double quotes in their
# mesh name, but that is an unreasonable thing to want to
# do anyway. "We'll fix it later" (tm)
chunks = line.split('"', maxsplit=2)
if len(chunks) < 2:
chunks = line.split(maxsplit=2)
name = chunks[1]
elif line.startswith('NUM_MATERIALS_PER_ELEM'):
num_materials_per_elem = int(line.split(maxsplit=2)[1])
if not mesh2d_found:
raise ReadError('MESH2D tag not found', filename)
return (num_nodes, num_elements, num_node_strings, name,
num_materials_per_elem, nodes_start, elements_start,
node_strings_start)
def _card_is_element(card: str) -> bool:
return card in ('E2L', 'E3L', 'E3T', 'E4Q', 'E6T', 'E8Q', 'E9Q')
def _nodes_per_element(card: str) -> int:
if card == 'E2L':
return 2
if card in ('E3L', 'E3T'):
return 3
if card == 'E4Q':
return 4
if card == 'E6T':
return 6
if card == 'E8Q':
return 8
if card == 'E9Q':
return 9
return -1
| 34.479839
| 82
| 0.571161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,413
| 0.282189
|
bb9b59ff879eaecfcc8190f0acec7f2068109681
| 1,024
|
py
|
Python
|
src/commands/refactor/refactor_preview.py
|
PranjalPansuriya/JavaScriptEnhancements
|
14af4162e86585153cbd4614ad96dff64a0d3192
|
[
"MIT"
] | 690
|
2017-04-11T06:45:01.000Z
|
2022-03-21T23:20:29.000Z
|
src/commands/refactor/refactor_preview.py
|
PranjalPansuriya/JavaScriptEnhancements
|
14af4162e86585153cbd4614ad96dff64a0d3192
|
[
"MIT"
] | 74
|
2017-11-22T18:05:26.000Z
|
2021-05-05T16:25:31.000Z
|
src/commands/refactor/refactor_preview.py
|
PranjalPansuriya/JavaScriptEnhancements
|
14af4162e86585153cbd4614ad96dff64a0d3192
|
[
"MIT"
] | 42
|
2017-04-13T10:22:40.000Z
|
2021-05-27T19:19:04.000Z
|
import sublime, sublime_plugin
from ...libs import util
class RefactorPreview():
view = None
title = None
window = None
def __init__(self, title):
self.title = title
self.window = sublime.active_window()
self.view = None
for v in self.window.views():
if v.name() == self.title:
self.view = v
self.view.run_command("javascript_enhancements_erase_text_view")
self.window.focus_view(self.view)
break
if not self.view:
self.window.focus_group(1)
self.view = self.window.new_file()
self.view.set_name(self.title)
self.view.set_syntax_file('Packages/Default/Find Results.hidden-tmLanguage')
self.view.set_scratch(True)
def append_text(self, text):
if self.view:
self.view.run_command("javascript_enhancements_append_text_view", args={"text": text})
@staticmethod
def close(title):
window = sublime.active_window()
for v in window.views():
if v.name() == title:
v.close()
break
| 27.675676
| 92
| 0.657227
| 967
| 0.944336
| 0
| 0
| 159
| 0.155273
| 0
| 0
| 138
| 0.134766
|
bb9bcfeb2dd370b17ad9dca9fef16bbcfa8e0721
| 583
|
py
|
Python
|
2020/08-1.py
|
matteodelabre/advent-of-code
|
a6f51222b52f948cec6aa94cb7c50bcfd4f53dc1
|
[
"CC0-1.0"
] | 1
|
2021-12-02T05:16:11.000Z
|
2021-12-02T05:16:11.000Z
|
2020/08-1.py
|
matteodelabre/advent-of-code
|
a6f51222b52f948cec6aa94cb7c50bcfd4f53dc1
|
[
"CC0-1.0"
] | null | null | null |
2020/08-1.py
|
matteodelabre/advent-of-code
|
a6f51222b52f948cec6aa94cb7c50bcfd4f53dc1
|
[
"CC0-1.0"
] | null | null | null |
import re
instruction_regex = re.compile(r'(nop|acc|jmp) ([+-][0-9]+)')
program = []
try:
while True:
name, arg = instruction_regex.match(input()).groups()
program.append([name, arg, 0])
except EOFError:
pass
pc = 0
accu = 0
while program[pc][2] == 0:
program[pc][2] = 1
if program[pc][0] == 'nop':
pc += 1
elif program[pc][0] == 'acc':
accu += int(program[pc][1])
pc += 1
elif program[pc][0] == 'jmp':
pc += int(program[pc][1])
else:
print('invalid instruction:', program[pc][0])
print(accu)
| 19.433333
| 61
| 0.535163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 66
| 0.113208
|
bb9be141e5411cc766db75440f10f9dcc54c52eb
| 736
|
py
|
Python
|
Apps/aulas/migrations/0017_alter_planejamento_data_envio_alter_resposta_data.py
|
arthur-asilva/rc_plataforma
|
7e6f7eb7f9a3b9089c02db98518b60d8e481ce4c
|
[
"BSD-2-Clause"
] | null | null | null |
Apps/aulas/migrations/0017_alter_planejamento_data_envio_alter_resposta_data.py
|
arthur-asilva/rc_plataforma
|
7e6f7eb7f9a3b9089c02db98518b60d8e481ce4c
|
[
"BSD-2-Clause"
] | null | null | null |
Apps/aulas/migrations/0017_alter_planejamento_data_envio_alter_resposta_data.py
|
arthur-asilva/rc_plataforma
|
7e6f7eb7f9a3b9089c02db98518b60d8e481ce4c
|
[
"BSD-2-Clause"
] | null | null | null |
# Generated by Django 4.0 on 2022-01-14 13:45
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aulas', '0016_alter_planejamento_data_envio_alter_resposta_data'),
]
operations = [
migrations.AlterField(
model_name='planejamento',
name='data_envio',
field=models.DateTimeField(blank=True, default=datetime.datetime(2022, 1, 14, 10, 45, 11, 659992), null=True),
),
migrations.AlterField(
model_name='resposta',
name='data',
field=models.DateTimeField(blank=True, default=datetime.datetime(2022, 1, 14, 10, 45, 11, 660638), null=True),
),
]
| 29.44
| 122
| 0.629076
| 629
| 0.85462
| 0
| 0
| 0
| 0
| 0
| 0
| 150
| 0.203804
|
bba2237b5b7c85261e503e0da56569ea8a05972a
| 5,018
|
py
|
Python
|
models/lenet.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
models/lenet.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
models/lenet.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
'''
A LeNet-5 in PyTorch.
Reference:
Yann LeCun et al, 1998.
Gradient-Based Learning Applied to Document Recognition.
[Paper] Available at: <http://yann.lecun.com/exdb/publis/pdf/lecun-01a.pdf>
[Accessed 26 December 2021].
'''
import torch.nn as nn
import torch.nn.functional as F
# Defining the Network (LeNet-5)
class LeNet5(nn.Module):
def __init__(self):
super(LeNet5, self).__init__()
# A convolutional layer (in LeNet-5, 32x32 images are given as input,
# so we need padding of 2 for MNIST), followed by max-pooling
self.conv1 = nn.Conv2d(
in_channels=1,
out_channels=32,
kernel_size=5,
stride=1,
padding=2,
bias=True,
)
self.max_pool_1 = nn.MaxPool2d(kernel_size=2)
# Second layer
self.conv2 = nn.Conv2d(
in_channels=32,
out_channels=64,
kernel_size=5,
stride=1,
padding=2,
bias=True,
)
self.max_pool_2 = nn.MaxPool2d(kernel_size=2)
# Third layer (fully connected layer)
self.fc1 = nn.Linear(
64 * 7 * 7, 1024
)
# Output layer
self.fc2 = nn.Linear(
1024, 10
)
def forward(self, x):
input = x
# First layer (ReLU activation), max-pool with 2x2 grid
output = self.conv1(input)
output = F.relu(output)
output = self.max_pool_1(output)
# Second Layer
output = self.conv2(output)
output = F.relu(output)
output = self.max_pool_2(output)
# Flatten to match network (16 * 5 * 5), given
# https://stackoverflow.com/a/42482819/7551231
output = output.view(-1, 64 * 7 * 7)
# Third layer
output = self.fc1(output)
output = F.relu(output)
# Output
output = self.fc2(output)
return output
# Returns an array of mostly the shape of the features
def feature_list(self, x):
input = x
out_list = []
# First layer (ReLU activation), max-pool with 2x2 grid
output = self.conv1(input)
output = F.relu(output)
output = self.max_pool_1(output)
out_list.append(output)
# Second Layer
output = self.conv2(output)
output = F.relu(output)
output = self.max_pool_2(output)
out_list.append(output)
# Flatten to match network (16 * 5 * 5), given
# https://stackoverflow.com/a/42482819/7551231
output = output.view(-1, 64 * 7 * 7)
# Third layer
output = self.fc1(output)
output = F.relu(output)
out_list.append(output)
# Output
output = self.fc2(output)
return output, out_list
# Returns a feature extracted at a specific layer
def intermediate_forward(self, x, layer_index):
input = x
if (layer_index == 0):
# First layer (ReLU activation), max-pool with 2x2 grid
output = self.conv1(input)
output = F.relu(output)
output = self.max_pool_1(output)
return output
elif (layer_index == 1):
# First layer (ReLU activation), max-pool with 2x2 grid
output = self.conv1(input)
output = F.relu(output)
output = self.max_pool_1(output)
# Second Layer
output = self.conv2(output)
output = F.relu(output)
output = self.max_pool_2(output)
return output
elif (layer_index == 2):
# First layer (ReLU activation), max-pool with 2x2 grid
output = self.conv1(input)
output = F.relu(output)
output = self.max_pool_1(output)
# Second Layer
output = self.conv2(output)
output = F.relu(output)
output = self.max_pool_2(output)
# Flatten to match network (16 * 5 * 5), given
# https://stackoverflow.com/a/42482819/7551231
output = output.view(-1, 64 * 7 * 7)
# Third layer
output = self.fc1(output)
output = F.relu(output)
return output
raise Exception('Invalid layer index')
# Returns features of penultimate layer (should coincide with 3rd layer for lenet)
def penultimate_forward(self, x):
input = x
# First layer (ReLU activation), max-pool with 2x2 grid
output = self.conv1(input)
output = F.relu(output)
output = self.max_pool_1(output)
# Second Layer
output = self.conv2(output)
output = F.relu(output)
output = self.max_pool_2(output)
# Flatten to match network (16 * 5 * 5), given
# https://stackoverflow.com/a/42482819/7551231
output = output.view(-1, 64 * 7 * 7)
# Third layer
output = self.fc1(output)
output = F.relu(output)
return output
| 28.191011
| 86
| 0.559785
| 4,698
| 0.93623
| 0
| 0
| 0
| 0
| 0
| 0
| 1,497
| 0.298326
|
bba24784bd9ee9a55803728f5cef4460717a8929
| 7,228
|
py
|
Python
|
tests/env/experiments_tools_2.py
|
weifanjiang/CSSPy
|
361d18d7b9c08bcff11a18524a718b3522c48786
|
[
"MIT"
] | 3
|
2018-10-04T14:00:50.000Z
|
2021-12-11T08:57:26.000Z
|
tests/env/experiments_tools_2.py
|
weifanjiang/CSSPy
|
361d18d7b9c08bcff11a18524a718b3522c48786
|
[
"MIT"
] | null | null | null |
tests/env/experiments_tools_2.py
|
weifanjiang/CSSPy
|
361d18d7b9c08bcff11a18524a718b3522c48786
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0, '..')
import numpy as np
import pandas as pd
from itertools import combinations
from scipy.stats import binom
import scipy.special
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from IPython.display import display, HTML
#sys.path.append("../")
from FrameBuilder.eigenstepsbuilder import *
from decimal import *
from copy import deepcopy
import matplotlib.lines as mlines
import matplotlib.transforms as mtransforms
from env.numerical_analysis_dpp import *
def plot_results_of_multi_experiments(N,real_dim,r,T_,k_,mean,cov_,static_list_):
print(np.diag(cov_))
lv_scores_vector = k_/real_dim*np.ones(real_dim) # The vector of leverage scores (the last one)
T = deepcopy(T_) # The number of experiments
versions_number = 1
k = deepcopy(k_)
cov_1 = deepcopy(cov_)
volume_sampling_fro_list = []
projection_dpp_fro_list = []
p_eff_list = []
cardinal_list = []
avoiding_proba_list = []
static_list = deepcopy(static_list_)
volume_sampling_fro_list = []
projection_dpp_fro_list = []
#derandomized_projection_dpp_fro_list = []
greedy_selection_fro_list = []
effective_kernel_fro_list = []
p_eff_list = []
cardinal_list = []
for t in range(T):
print("t")
print(t)
#print("real_dim")
#print(real_dim)
random_cardinal_list = list(np.random.choice(static_list, 1))
NAL_1 = Numrerical_Analysis_DPP(N,real_dim,r,k,versions_number,mean,cov_1,lv_scores_vector,random_cardinal_list)
projection_DPP_res_fro_1 = NAL_1.get_expected_error_fro_for_projection_DPP()
volume_sampling_res_fro_1 = NAL_1.get_expected_error_fro_for_volume_sampling()
#derandomized_DPP_res_fro_1 = NAL_1.get_error_fro_for_derandomized_projection_DPP_selection()
greedy_selection_res_fro_1 = NAL_1.get_error_fro_for_deterministic_selection()
effective_kernel_sampling_res_fro_1 = NAL_1.get_expected_error_fro_for_effective_kernel_sampling()
# upper_tight_bound_projection_DPP_res_fro_1 = NAL_1.get_tight_upper_bound_error_fro_for_projection_DPP()
# alpha_sum_res_1 = NAL_1.get_alpha_sum_k_leverage_scores(1)
# sum_U_res_1 = NAL_1.get_sum_k_leverage_scores()
p_eff_res_1 = NAL_1.get_p_eff_leverage_scores()
avoiding_proba_res_1 = NAL_1.get_avoiding_probability()
avoiding_proba_list.append(avoiding_proba_res_1)
greedy_selection_fro_list.append(greedy_selection_res_fro_1)
#derandomized_projection_dpp_fro_list.append(derandomized_DPP_res_fro_1)
effective_kernel_fro_list.append(list(effective_kernel_sampling_res_fro_1))
volume_sampling_fro_list.append(list(volume_sampling_res_fro_1))
projection_dpp_fro_list.append(list(projection_DPP_res_fro_1))
p_eff_list.append(list(p_eff_res_1))
cardinal_list.append(random_cardinal_list)
print("next")
flattened_cardinal_list= [item for items in cardinal_list for item in items]
flattened_p_eff_list= [item for items in p_eff_list for item in items]
theoretical_projection_DPP_error_bound_list = from_p_eff_to_error_bound(flattened_cardinal_list,k,real_dim)
plt.scatter(cardinal_list,projection_dpp_fro_list,label="Projection DPP Sampling",marker='_')
plt.scatter(cardinal_list,volume_sampling_fro_list,label="Volume Sampling",marker='_')
#plt.scatter(cardinal_list,derandomized_projection_dpp_fro_list,label="derandomized projection dpp", marker='_')
plt.scatter(cardinal_list,greedy_selection_fro_list,label = "greedy", marker='_',color = 'purple')
plt.scatter(cardinal_list,theoretical_projection_DPP_error_bound_list,color='red',marker='_',label="Theoretical bound for Projection DPP Sampling")
plt.xlabel(r'$p$', fontsize=12)
plt.ylabel(r'$\mathbb{E}_{S \sim \mathcal{P}}(\|X- \pi_{S}(X)\|_{Fr}^{2})$', fontsize=12)
plt.title('The case k = '+str(k)+', '+str(T)+' matrices, flat spectrum after k+1')
#plt.xticks(map(int, Y_cov[:-1]))
plt.legend(bbox_to_anchor=(1.04,1), loc="upper left")
plt.show()
theoretical_effective_kernel_error_bound_list = from_p_eff_to_error_bound_2(flattened_p_eff_list,k,real_dim)
#theoretical_effective_kernel_error_bound_list = from_p_eff_to_error_bound(flattened_p_eff_list,k,real_dim)
plt.scatter(p_eff_list,effective_kernel_fro_list,label="Effective Kernel Sampling",marker='_')
plt.scatter(p_eff_list,volume_sampling_fro_list,label="Volume Sampling",marker='_')
#plt.scatter(p_eff_list,derandomized_projection_dpp_fro_list,label="derandomized projection dpp", marker='_')
plt.scatter(p_eff_list,theoretical_effective_kernel_error_bound_list,color='red',marker='_',label="Theoretical bound for Effective Kernel Sampling")
plt.scatter(p_eff_list,greedy_selection_fro_list,label = "greedy", marker='_',color = 'purple')
plt.xlabel(r'$p_{eff}(\frac{1}{2})$', fontsize=12)
plt.ylabel(r'$\mathrm{\mathbb{E}_{S \sim \mathcal{P}}(\|X- \pi_{S}(X)\|_{Fr}^{2})$', fontsize=12)
plt.title('The case k = '+str(k)+', '+str(T)+' matrices, flat spectrum after k+1')
plt.legend(bbox_to_anchor=(1.04,1), loc="upper left")
plt.show()
plt.scatter(cardinal_list,projection_dpp_fro_list,label="Projection DPP Sampling",marker='_')
plt.scatter(cardinal_list,volume_sampling_fro_list,label="Volume Sampling",marker='_')
#plt.scatter(cardinal_list,derandomized_projection_dpp_fro_list,label="derandomized projection dpp", marker='_')
plt.scatter(cardinal_list,theoretical_projection_DPP_error_bound_list,color='red',marker='_',label="Theoretical bound for Projection DPP Sampling")
plt.xlabel(r'$p$', fontsize=12)
plt.ylabel(r'$\mathbb{E}_{S \sim \mathcal{P}}(\|X- \pi_{S}(X)\|_{Fr}^{2})$', fontsize=12)
plt.title('The case k = '+str(k)+', '+str(T)+' matrices, flat spectrum after k+1')
#plt.xticks(map(int, Y_cov[:-1]))
plt.legend(bbox_to_anchor=(1.04,1), loc="upper left")
plt.show()
plt.scatter(p_eff_list,effective_kernel_fro_list,label="Effective Kernel Sampling",marker='_')
plt.scatter(p_eff_list,volume_sampling_fro_list,label="Volume Sampling",marker='_')
#plt.scatter(p_eff_list,derandomized_projection_dpp_fro_list,label="derandomized projection dpp", marker='_')
plt.scatter(p_eff_list,theoretical_effective_kernel_error_bound_list,color='red',marker='_',label="Theoretical bound for Effective Kernel Sampling")
plt.xlabel(r'$p_{eff}(\frac{1}{2})$', fontsize=12)
plt.ylabel(r'$\mathbb{E}_{S \sim \mathcal{P}}(\|X- \pi_{S}(X)\|_{Fr}^{2})$', fontsize=12)
plt.title('The case k = '+str(k)+', '+str(T)+' matrices, flat spectrum after k+1')
plt.legend(bbox_to_anchor=(1.04,1), loc="upper left")
plt.show()
plt.scatter(p_eff_list,avoiding_proba_list,label="Avoiding Probability")
plt.xlabel(r'$p_{eff}(\frac{1}{2})$', fontsize=12)
plt.ylabel(r'$\mathbb{P}(S\cap T_{eff} = \emptyset)$', fontsize=12)
plt.title('The case k = '+str(k)+', '+str(T)+' matrices, flat spectrum after k+1')
plt.legend(bbox_to_anchor=(1.04,1), loc="upper left")
plt.show()
print("N")
print(N)
| 45.175
| 152
| 0.731046
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,399
| 0.331904
|
bba40b13a90f7230a2307e2b965c7e2e96ab0207
| 1,562
|
py
|
Python
|
utils/relationship_tree/population.py
|
rohern/attila
|
e876af57ee3b77144343ac3c22e798733753a23f
|
[
"MIT"
] | null | null | null |
utils/relationship_tree/population.py
|
rohern/attila
|
e876af57ee3b77144343ac3c22e798733753a23f
|
[
"MIT"
] | null | null | null |
utils/relationship_tree/population.py
|
rohern/attila
|
e876af57ee3b77144343ac3c22e798733753a23f
|
[
"MIT"
] | 1
|
2020-02-21T20:08:43.000Z
|
2020-02-21T20:08:43.000Z
|
from person import Person
class Population:
def __init__(self, family_info, null_parent_value='0'):
self.persons = {}
# Initialize the persons data structure with Person objects
for fid in family_info:
self.persons[fid] = {}
for iid in family_info[fid]:
info = family_info[fid][iid]
father_id = info['father_id']
if father_id == null_parent_value:
father_id = None
mother_id = info['mother_id']
if mother_id == null_parent_value:
mother_id = None
self.persons[fid][iid] = Person(fid, iid, father_id, mother_id, info['sex'], info['birthday'], info['datapresent'])
# Create link structure between persons based on relationship
for fid in self.persons:
family_member_ids = self.persons[fid].keys()
for iid in family_member_ids:
person = self.persons[fid][iid]
if person.father_id is not None:
if person.father_id in family_member_ids:
person.set_father(self.persons[fid][person.father_id])
self.persons[fid][person.father_id].add_child(person)
# else:
# print "%s's father %s is not in their family." % (person.iid, person.father_id)
if person.mother_id is not None:
if person.mother_id in family_member_ids:
person.set_mother(self.persons[fid][person.mother_id])
self.persons[fid][person.mother_id].add_child(person)
# else:
# print "%s's mother %s is not in their family." % (person.iid, person.mother_id)
| 38.097561
| 123
| 0.640845
| 1,534
| 0.982074
| 0
| 0
| 0
| 0
| 0
| 0
| 353
| 0.225992
|
bba4b6f4a62fce3404ab57f827ef1473938bae93
| 348
|
py
|
Python
|
cmdline.py
|
30emeciel/freshdesk-token-exchange
|
3e449735c20d53061941c77443da6dc7b8ea9e24
|
[
"MIT"
] | null | null | null |
cmdline.py
|
30emeciel/freshdesk-token-exchange
|
3e449735c20d53061941c77443da6dc7b8ea9e24
|
[
"MIT"
] | null | null | null |
cmdline.py
|
30emeciel/freshdesk-token-exchange
|
3e449735c20d53061941c77443da6dc7b8ea9e24
|
[
"MIT"
] | null | null | null |
if __name__ == "__main__":
# export GOOGLE_APPLICATION_CREDENTIALS="trentiemeciel.json"
# get the token using postman
from dotenv import load_dotenv
load_dotenv()
import main
freshdesk_token = main.convert_auth0_token_to_freshdesk_token("gtxiNvJMjuDGec7GUziM2qSupsnCu74I")
print(f"freshdesk_token: {freshdesk_token}")
| 31.636364
| 101
| 0.767241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 170
| 0.488506
|
bba5ec5ee218ef30daab10fe172b51c78e3cf3a4
| 4,040
|
py
|
Python
|
auto_encoder.py
|
kredy/Keras-Projects
|
44c9a7b27f31a8d3eaa7b3bc7a0396d2eb0bf430
|
[
"MIT"
] | 1
|
2021-06-30T13:25:35.000Z
|
2021-06-30T13:25:35.000Z
|
auto_encoder.py
|
kredy/Keras-Projects
|
44c9a7b27f31a8d3eaa7b3bc7a0396d2eb0bf430
|
[
"MIT"
] | null | null | null |
auto_encoder.py
|
kredy/Keras-Projects
|
44c9a7b27f31a8d3eaa7b3bc7a0396d2eb0bf430
|
[
"MIT"
] | 2
|
2020-08-04T01:52:55.000Z
|
2021-03-16T19:12:20.000Z
|
'''
Convolutional autoencoder on MNIST dataset using Keras functional API
'''
from keras.datasets import mnist
from keras.models import Model
from keras.layers import Activation, Input, BatchNormalization
from keras.layers import Conv2D, Conv2DTranspose
from keras.callbacks import TensorBoard
from keras.optimizers import Adam
from keras.utils import to_categorical
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
# Parameters
batch_size = 128
epochs = 3
Tboard = TensorBoard(log_dir='./autoencoder_graph')
# Load the MNIST data
def load_data():
(x_train, y_train), (x_test, y_test) = mnist.load_data()
y_train = to_categorical(y_train, num_classes=10)
y_test = to_categorical(y_test, num_classes=10)
x_train = x_train.reshape(-1, 28, 28, 1)
x_test = x_test.reshape(-1, 28, 28, 1)
x_train = x_train/255.0
x_test = x_test/255.0
return x_train, y_train, x_test, y_test
# Autoencoder
def auto_encoder():
# Encoder
inputs = Input(name='inputs', shape=[28,28,1,])
layer = Conv2D(filters=6, kernel_size=(5,5), strides=(1,1), padding='valid', name='Conv2D_1')(inputs)
layer = BatchNormalization(name='BN_1')(layer)
layer = Activation('relu', name='relu_1')(layer)
layer = Conv2D(filters=6, kernel_size=(5,5), strides=(1,1), padding='valid', name='Conv2D_2')(layer)
layer = BatchNormalization(name='BN_2')(layer)
layer = Activation('relu', name='relu_2')(layer)
layer = Conv2D(filters=6, kernel_size=(3, 3), strides=(1, 1), padding='valid', name='Conv2D_3')(layer)
layer = BatchNormalization(name='BN_3')(layer)
layer = Activation('relu', name='relu_3')(layer)
encoder = Model(inputs=inputs, outputs=layer)
# Decoder
l_inputs = Input(name='l_inputs', shape=[18,18,6,])
layer = Conv2DTranspose(filters=6, kernel_size=(3,3), strides=(1,1), padding='valid', name='deconv2d_1')(l_inputs)
layer = BatchNormalization(name='BN_4')(layer)
layer = Activation('relu', name='relu_4')(layer)
layer = Conv2DTranspose(filters=6, kernel_size=(5, 5), strides=(1, 1), padding='valid', name='deconv2d_2')(layer)
layer = BatchNormalization(name='BN_5')(layer)
layer = Activation('relu', name='relu_5')(layer)
layer = Conv2DTranspose(filters=1, kernel_size=(5, 5), strides=(1, 1), padding='valid', name='deconv2d_3')(layer)
layer = Activation('relu', name='relu_6')(layer)
decoder = Model(inputs=l_inputs, outputs=layer)
# Encoder + Decoder
model = Model(inputs=inputs, outputs=decoder(encoder(inputs)))
return encoder, decoder, model
def main():
x_train, y_train, x_test, y_test = load_data()
encoder, decoder, model = auto_encoder()
encoder.summary()
decoder.summary()
model.summary()
model.compile(optimizer=Adam(), loss='mse')
model.fit(x_train, x_train, batch_size=batch_size, epochs=epochs, callbacks=[Tboard])
gen_imgs = model.predict(x_test, batch_size=batch_size)
# Visualisation of the generation images and comparision with the test images
rn_num = np.random.randint(10000)
gen_imgs = gen_imgs*255.0
gen_img = gen_imgs[rn_num]
x_test = x_test*255.0
test_img = x_test[rn_num]
test_img = test_img.reshape(28,28)
gen_img = gen_img.reshape(28,28)
# Show generated image
plt.imshow(gen_img)
plt.show()
# Show test image
plt.imshow(test_img)
plt.show()
# Save weights of encoder, decoder and the whole model
encoder.save_weights('encoder_weights.hdf5')
decoder.save_weights('decoder_weights.hdf5')
model.save_weights('autoencoder_weights.hdf5')
# Save architecture
encoder_yaml = encoder.to_yaml()
with open('encoder_string.yaml', 'w') as fo:
fo.write(encoder_yaml)
decoder_yaml = decoder.to_yaml()
with open('decoder_string.yaml', 'w') as fo:
fo.write(decoder_yaml)
model_yaml = model.to_yaml()
with open('model_string.yaml', 'w') as fo:
fo.write(model_yaml)
if __name__ == '__main__':
main()
| 35.438596
| 118
| 0.693069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 767
| 0.189851
|
bba73b50d8937afbf151ac7cc18f80271ca8fda7
| 499
|
py
|
Python
|
test/unit/tools/msvc/common.py
|
jimporter/bfg9000
|
c206646ecfed0d1a510e993b93e6a15677f45a14
|
[
"BSD-3-Clause"
] | 72
|
2015-06-23T02:35:13.000Z
|
2021-12-08T01:47:40.000Z
|
test/unit/tools/msvc/common.py
|
jimporter/bfg9000
|
c206646ecfed0d1a510e993b93e6a15677f45a14
|
[
"BSD-3-Clause"
] | 139
|
2015-03-01T18:48:17.000Z
|
2021-06-18T15:45:14.000Z
|
test/unit/tools/msvc/common.py
|
jimporter/bfg9000
|
c206646ecfed0d1a510e993b93e6a15677f45a14
|
[
"BSD-3-Clause"
] | 19
|
2015-12-23T21:24:33.000Z
|
2022-01-06T04:04:41.000Z
|
from bfg9000.languages import Languages
known_langs = Languages()
with known_langs.make('c') as x:
x.vars(compiler='CC', flags='CFLAGS')
with known_langs.make('c++') as x:
x.vars(compiler='CXX', flags='CXXFLAGS')
def mock_which(*args, **kwargs):
return ['command']
def mock_execute(args, **kwargs):
if '-?' in args:
return ('Microsoft (R) C/C++ Optimizing Compiler Version ' +
'19.12.25831 for x86')
raise OSError('unknown command: {}'.format(args))
| 26.263158
| 68
| 0.639279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 140
| 0.280561
|
bba7b25014664dee8876feca1599b36413d6ce96
| 93
|
py
|
Python
|
Gate_bases.py
|
jlfly12/qrsim
|
1f5340cdc4f6cc0ca7ecbebd49ba8c6f78afdb8c
|
[
"Apache-2.0"
] | null | null | null |
Gate_bases.py
|
jlfly12/qrsim
|
1f5340cdc4f6cc0ca7ecbebd49ba8c6f78afdb8c
|
[
"Apache-2.0"
] | null | null | null |
Gate_bases.py
|
jlfly12/qrsim
|
1f5340cdc4f6cc0ca7ecbebd49ba8c6f78afdb8c
|
[
"Apache-2.0"
] | 1
|
2019-06-21T17:55:00.000Z
|
2019-06-21T17:55:00.000Z
|
x = "X"
y = "Y"
z = "Z"
xx = "XX"
h = "H"
cnot = "CNOT"
s_phi = "S_phi"
identity = "Identity"
| 11.625
| 21
| 0.494624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 39
| 0.419355
|
bbabb955113863d40c2a80d48e3d2cd18a7d9577
| 164
|
py
|
Python
|
flake8diff/test_flake8.py
|
miki725/flake8-diff
|
bec04390ce9263fc7957ccb7fcf39d582fa8d4ad
|
[
"MIT"
] | 18
|
2015-03-10T20:05:57.000Z
|
2020-08-12T20:46:00.000Z
|
flake8diff/test_flake8.py
|
miki725/flake8-diff
|
bec04390ce9263fc7957ccb7fcf39d582fa8d4ad
|
[
"MIT"
] | 20
|
2015-03-21T19:39:12.000Z
|
2020-05-26T15:25:33.000Z
|
flake8diff/test_flake8.py
|
miki725/flake8-diff
|
bec04390ce9263fc7957ccb7fcf39d582fa8d4ad
|
[
"MIT"
] | 7
|
2015-03-21T19:12:31.000Z
|
2018-09-28T20:56:45.000Z
|
from unittest import TestCase
from flake8diff import flake8
class Flake8DiffTestCase(TestCase):
def test_flake8diff(self):
flake8.Flake8Diff("", {})
| 18.222222
| 35
| 0.737805
| 100
| 0.609756
| 0
| 0
| 0
| 0
| 0
| 0
| 2
| 0.012195
|
bbabba632a1d8ac671dc7f863d9ffae0e405f07a
| 1,266
|
py
|
Python
|
algorithm/linear-regression/gradientDescentLR.py
|
mk43/machine-learning
|
1ca1baf797fe6f593a88ad4e0d7ac7e5c24ce139
|
[
"Apache-2.0"
] | 6
|
2018-02-22T00:27:44.000Z
|
2019-11-21T18:12:48.000Z
|
algorithm/linear-regression/gradientDescentLR.py
|
mk43/machine-learning
|
1ca1baf797fe6f593a88ad4e0d7ac7e5c24ce139
|
[
"Apache-2.0"
] | null | null | null |
algorithm/linear-regression/gradientDescentLR.py
|
mk43/machine-learning
|
1ca1baf797fe6f593a88ad4e0d7ac7e5c24ce139
|
[
"Apache-2.0"
] | 4
|
2018-02-19T05:59:23.000Z
|
2020-04-08T08:53:02.000Z
|
# coding: utf-8
import matplotlib.pyplot as plt
import numpy as np
N = 200
X = np.linspace(0, 10, N * 2)
noise = np.random.normal(0, 0.5, X.shape)
Y = X * 0.5 + 3 + noise
def calcLoss(train_X, train_Y, W, b):
return np.sum(np.square(train_Y - (train_X * W + b)))
def gradientDescent(train_X, train_Y, W, b, learningrate=0.001, trainingtimes=500):
global loss
global W_trace
global b_trace
size = train_Y.size
for _ in range(trainingtimes):
prediction = W * train_X + b
tempW = W + learningrate * np.sum(train_X * (train_Y - prediction)) / size
tempb = b + learningrate * np.sum(train_Y - prediction) / size
W = tempW
b = tempb
loss.append(calcLoss(train_X, train_Y, W, b))
W_trace.append(W)
b_trace.append(b)
Training_Times = 100
Learning_Rate = 0.002
loss = []
W_trace = [-1]
b_trace = [1]
gradientDescent(X, Y, W_trace[0], b_trace[0], learningrate=Learning_Rate, trainingtimes=Training_Times)
print(W_trace[-1], b_trace[-1])
fig = plt.figure()
plt.title(r'$loss\ function\ change\ tendency$')
plt.xlabel(r'$learning\ times$')
plt.ylabel(r'$loss\ value$')
plt.plot(np.linspace(1, Training_Times, Training_Times), loss)
plt.savefig("gradientDescentLR.png")
plt.show()
| 26.375
| 103
| 0.661137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 111
| 0.087678
|
bbac3dea77b9a3981684ddd7952fdf41e36843fc
| 6,343
|
py
|
Python
|
lambda-email-parser/lambda_function.py
|
aws-samples/serverless-mail
|
a002dd90817c9eb2090ca0ad36114c51a0490d61
|
[
"MIT-0"
] | null | null | null |
lambda-email-parser/lambda_function.py
|
aws-samples/serverless-mail
|
a002dd90817c9eb2090ca0ad36114c51a0490d61
|
[
"MIT-0"
] | null | null | null |
lambda-email-parser/lambda_function.py
|
aws-samples/serverless-mail
|
a002dd90817c9eb2090ca0ad36114c51a0490d61
|
[
"MIT-0"
] | null | null | null |
import os
import boto3
import email
import logging
import json
import re
import uuid
s3 = boto3.client("s3")
workmail_message_flow = boto3.client('workmailmessageflow')
logger = logging.getLogger()
def lambda_handler(event, context):
logger.error(json.dumps(event))
destination_bucket = os.environ.get('destination_bucket')
key_prefix = None
if not destination_bucket:
logger.error("Environment variable missing: destination_bucket")
return
# keep track of how many MIME parts are parsed and saved to S3
saved_parts = 0
msg = None
parts = None
workmail_mutate = None
# event is from workmail
if event.get('messageId'):
message_id = event['messageId']
key_prefix = message_id
raw_msg = workmail_message_flow.get_raw_message_content(messageId=message_id)
msg = email.message_from_bytes(raw_msg['messageContent'].read())
if os.environ.get('modify_workmail_message'):
workmail_mutate = True
# event is from s3
else:
records = event.get('Records', [])
record = records[0]
# TODO: for record in records:
# get the S3 object information
s3_info = record['s3']
object_info = s3_info['object']
if s3_info['bucket']['name'] == destination_bucket:
logger.error("To prevent recursive file creation this function will not write back to the same bucket")
return {
'statusCode': 400,
'body': 'To prevent recursive file creation this function will not write back to the same bucket'
}
# get the email message stored in S3 and parse it using the python email library
# TODO: error condition - if the file isn't an email message or doesn't parse correctly
fileObj, object_key = [None] * 2
object_key = object_info['key']
key_prefix = object_key
fileObj = s3.get_object(Bucket = s3_info['bucket']['name'], Key = object_key)
msg = email.message_from_bytes(fileObj['Body'].read())
# save the headers of the message to the bucket
headers_to_save = None
# By default saving all headers, but use environment vairables to be more specific
if os.environ.get('select_headers','ALL'):
headers_to_save = re.split(',\s*', str(os.environ.get('select_headers', 'ALL')))
all_headers = msg.items()
if "ALL" in headers_to_save:
s3.put_object(Bucket = destination_bucket, Key = key_prefix + "/headers.json", Body = json.dumps(all_headers))
elif len(headers_to_save) > 0:
saved_headers = []
i = 0
while i < len(all_headers):
this_header = all_headers[i]
if this_header[0].upper() in (header.upper() for header in headers_to_save):
saved_headers.append(this_header)
i += 1
s3.put_object(Bucket = destination_bucket, Key = key_prefix + "/headers.json", Body = json.dumps(saved_headers))
# parse the mime parts out of the message
parts = msg.walk()
# walk through each MIME part from the email message
part_idx = 0
for part in parts:
part_idx += 1
# get information about the MIME part
content_type, content_disposition, content, charset, filename = [None] * 5
content_type = part.get_content_type()
content_disposition = str(part.get_content_disposition())
content = part.get_payload(decode=True)
charset = part.get_content_charset()
filename = part.get_filename()
logger.error(f"Part: {part_idx}. Content charset: {charset}. Content type: {content_type}. Content disposition: {content_disposition}. Filename: {filename}");
# make file name for body, and untitled text or html parts
# add additional content types that we want to support non-existent filenames
if not filename:
if content_type == 'text/plain':
if 'attachment' not in content_disposition:
filename = "body.txt"
else:
filename = "untitled.txt"
elif content_type == 'text/html':
if 'attachment' not in content_disposition:
filename = "body.html"
else:
filename = "untitled.html"
else:
filename = "untitled"
# TODO: consider overriding or sanitizing the filenames since that is tainted data and might be subject to abuse in object key names
# technically, the entire message is tainted data, so it would be the responsibility of downstream parsers to ensure protection from interpreter abuse
# skip parts that aren't attachment parts
if content_type in ["multipart/mixed", "multipart/related", "multipart/alternative"]:
continue
if content:
# decode the content based on the character set specified
# TODO: add error handling
if charset:
content = content.decode(charset)
# store the decoded MIME part in S3 with the filename appended to the object key
s3.put_object(Bucket = destination_bucket, Key = key_prefix + "/mimepart" + str(part_idx) + "_" + filename, Body = content)
saved_parts += 1
else:
logger.error(f"Part {part_idx} has no content. Content type: {content_type}. Content disposition: {content_disposition}.");
if workmail_mutate:
email_subject = event['subject']
modified_object_key = key_prefix + "/" + str(uuid.uuid4())
new_subject = f"[PROCESSED] {email_subject}"
msg.replace_header('Subject', new_subject)
msg.add_header('X-AWS-Mailsploder-Bucket-Prefix', "s3://" + destination_bucket + "/" + key_prefix)
msg.add_header('X-AWS-Mailsploder-Parts-Saved', str(saved_parts))
# Store updated email in S3
s3.put_object(Bucket = destination_bucket, Key = modified_object_key, Body = msg.as_bytes())
# Update the email in WorkMail
s3_reference = {
'bucket': destination_bucket,
'key': modified_object_key
}
content = {
's3Reference': s3_reference
}
workmail_message_flow.put_raw_message_content(messageId=message_id, content=content)
return {
'statusCode': 200,
'body': 'Number of parts saved to S3 bucket: ' + destination_bucket + ': ' + str(saved_parts)
}
| 40.922581
| 164
| 0.653003
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,408
| 0.379631
|
bbadddc2fe8f7e7eaa30c11c13f2a1d11f2721db
| 1,757
|
py
|
Python
|
scripts/latency/filebench.py
|
huangvincent170/cyclone
|
737af617ab1472dfb16e6c20a079e88dccf85850
|
[
"Apache-2.0"
] | 2
|
2019-04-16T01:33:36.000Z
|
2021-02-23T08:34:38.000Z
|
scripts/latency/filebench.py
|
huangvincent170/cyclone
|
737af617ab1472dfb16e6c20a079e88dccf85850
|
[
"Apache-2.0"
] | null | null | null |
scripts/latency/filebench.py
|
huangvincent170/cyclone
|
737af617ab1472dfb16e6c20a079e88dccf85850
|
[
"Apache-2.0"
] | 4
|
2020-03-27T18:06:33.000Z
|
2021-03-24T09:56:17.000Z
|
#!/usr/bin/env python
import argparse
import sys
import os
import shutil
DBG = 1
__home = os.getcwd()
__fbench_home = '/home/pfernando/filebench' #binary
__empty = ''
__micro_rread = 'micro_rread'
__workload_l = []
__workload_l.append(__micro_rread)
parser = argparse.ArgumentParser(prog="runscript", description="script to run filebench")
parser.add_argument('-w', dest='workload', default=__empty, help='', choices=__workload_l)
parser.add_argument('-io', dest='iosize', default=__empty, help='')
try:
args = parser.parse_args()
except:
sys.exit(0)
def dbg(s):
if DBG == 1:
print s
def msg(s):
print '\n' + '>>>' + s + '\n'
def cd(dirt):
dbg(dirt)
if dirt == __home:
os.chdir(__home);
else:
path = __home + '/' + dirt
dbg(path)
try:
os.chdir(path)
except:
print 'invalid directory ', path
sys.exit(0)
def sh(cmd):
msg(cmd)
try:
os.system(cmd)
except:
print 'invalid cmd', cmd
sys.exit(0)
def fb(wl,dataobj):
__t = wl + '.template'
__out = wl + '.f'
# generate workload file from template
cd('wrklds')
template = open(__t, "rt").read()
with open(__out, "wt") as output:
output.write(template % dataobj)
cd(__home)
cmd = __fbench_home +'/filebench'
cmd += ' -f ' + 'wrklds/' + __out
msg(cmd)
sh(cmd)
# delete the generated file
os.remove('wrklds/' + __out)
cd(__home)
if __name__ == '__main__':
w = args.workload
io = args.iosize
if w == __micro_rread:
__iosize = io
msg("io size : " + io)
dataobj = {"iosize": io}
fb(__micro_rread,dataobj)
else:
sys.exit(0)
| 17.928571
| 90
| 0.575413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 348
| 0.198065
|
bbaeca43c2d4bafe283a3a22b25235f71d730c45
| 12,685
|
py
|
Python
|
python_modules/dagster-airflow/dagster_airflow/operators.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster-airflow/dagster_airflow/operators.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster-airflow/dagster_airflow/operators.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | null | null | null |
'''The dagster-airflow operators.'''
import ast
import datetime
import json
import logging
import os
from contextlib import contextmanager
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator, SkipMixin
from airflow.operators.docker_operator import DockerOperator
from airflow.operators.python_operator import PythonOperator
from airflow.utils.file import TemporaryDirectory
from docker import APIClient, from_env
from dagster import check, seven, DagsterEventType
from dagster.core.events import DagsterEvent
from dagster_graphql.client.mutations import execute_start_pipeline_execution_query
from dagster_graphql.client.query import START_PIPELINE_EXECUTION_QUERY
from .util import airflow_storage_exception, construct_variables, parse_raw_res
DOCKER_TEMPDIR = '/tmp'
DEFAULT_ENVIRONMENT = {
'AWS_ACCESS_KEY_ID': os.getenv('AWS_ACCESS_KEY_ID'),
'AWS_SECRET_ACCESS_KEY': os.getenv('AWS_SECRET_ACCESS_KEY'),
}
LINE_LENGTH = 100
class DagsterSkipMixin(SkipMixin):
def skip_self_if_necessary(self, events, execution_date, task):
check.list_param(events, 'events', of_type=DagsterEvent)
check.inst_param(execution_date, 'execution_date', datetime.datetime)
check.inst_param(task, 'task', BaseOperator)
skipped = any([e.event_type_value == DagsterEventType.STEP_SKIPPED.value for e in events])
if skipped:
self.skip(None, execution_date, [task])
class ModifiedDockerOperator(DockerOperator):
"""ModifiedDockerOperator supports host temporary directories on OSX.
Incorporates https://github.com/apache/airflow/pull/4315/ and an implementation of
https://issues.apache.org/jira/browse/AIRFLOW-3825.
:param host_tmp_dir: Specify the location of the temporary directory on the host which will
be mapped to tmp_dir. If not provided defaults to using the standard system temp directory.
:type host_tmp_dir: str
"""
def __init__(self, host_tmp_dir='/tmp', **kwargs):
self.host_tmp_dir = host_tmp_dir
kwargs['xcom_push'] = True
super(ModifiedDockerOperator, self).__init__(**kwargs)
@contextmanager
def get_host_tmp_dir(self):
'''Abstracts the tempdir context manager so that this can be overridden.'''
with TemporaryDirectory(prefix='airflowtmp', dir=self.host_tmp_dir) as tmp_dir:
yield tmp_dir
def execute(self, context):
'''Modified only to use the get_host_tmp_dir helper.'''
self.log.info('Starting docker container from image %s', self.image)
tls_config = self.__get_tls_config()
if self.docker_conn_id:
self.cli = self.get_hook().get_conn()
else:
self.cli = APIClient(base_url=self.docker_url, version=self.api_version, tls=tls_config)
if self.force_pull or len(self.cli.images(name=self.image)) == 0:
self.log.info('Pulling docker image %s', self.image)
for l in self.cli.pull(self.image, stream=True):
output = json.loads(l.decode('utf-8').strip())
if 'status' in output:
self.log.info("%s", output['status'])
with self.get_host_tmp_dir() as host_tmp_dir:
self.environment['AIRFLOW_TMP_DIR'] = self.tmp_dir
self.volumes.append('{0}:{1}'.format(host_tmp_dir, self.tmp_dir))
self.container = self.cli.create_container(
command=self.get_command(),
environment=self.environment,
host_config=self.cli.create_host_config(
auto_remove=self.auto_remove,
binds=self.volumes,
network_mode=self.network_mode,
shm_size=self.shm_size,
dns=self.dns,
dns_search=self.dns_search,
cpu_shares=int(round(self.cpus * 1024)),
mem_limit=self.mem_limit,
),
image=self.image,
user=self.user,
working_dir=self.working_dir,
)
self.cli.start(self.container['Id'])
res = []
line = ''
for new_line in self.cli.logs(container=self.container['Id'], stream=True):
line = new_line.strip()
if hasattr(line, 'decode'):
line = line.decode('utf-8')
self.log.info(line)
res.append(line)
result = self.cli.wait(self.container['Id'])
if result['StatusCode'] != 0:
raise AirflowException('docker container failed: ' + repr(result))
if self.xcom_push_flag:
# Try to avoid any kind of race condition?
return '\n'.join(res) + '\n' if self.xcom_all else str(line)
# This is a class-private name on DockerOperator for no good reason --
# all that the status quo does is inhibit extension of the class.
# See https://issues.apache.org/jira/browse/AIRFLOW-3880
def __get_tls_config(self):
# pylint: disable=no-member
return super(ModifiedDockerOperator, self)._DockerOperator__get_tls_config()
class DagsterDockerOperator(ModifiedDockerOperator, DagsterSkipMixin):
'''Dagster operator for Apache Airflow.
Wraps a modified DockerOperator incorporating https://github.com/apache/airflow/pull/4315.
Additionally, if a Docker client can be initialized using docker.from_env,
Unlike the standard DockerOperator, this operator also supports config using docker.from_env,
so it isn't necessary to explicitly set docker_url, tls_config, or api_version.
'''
# py2 compat
# pylint: disable=keyword-arg-before-vararg
def __init__(
self,
task_id,
environment_dict=None,
pipeline_name=None,
mode=None,
step_keys=None,
dag=None,
*args,
**kwargs
):
check.str_param(pipeline_name, 'pipeline_name')
step_keys = check.opt_list_param(step_keys, 'step_keys', of_type=str)
environment_dict = check.opt_dict_param(environment_dict, 'environment_dict', key_type=str)
tmp_dir = kwargs.pop('tmp_dir', DOCKER_TEMPDIR)
host_tmp_dir = kwargs.pop('host_tmp_dir', seven.get_system_temp_directory())
if 'storage' not in environment_dict:
raise airflow_storage_exception(tmp_dir)
check.invariant(
'in_memory' not in environment_dict.get('storage', {}),
'Cannot use in-memory storage with Airflow, must use S3',
)
self.docker_conn_id_set = kwargs.get('docker_conn_id') is not None
self.environment_dict = environment_dict
self.pipeline_name = pipeline_name
self.mode = mode
self.step_keys = step_keys
self._run_id = None
# These shenanigans are so we can override DockerOperator.get_hook in order to configure
# a docker client using docker.from_env, rather than messing with the logic of
# DockerOperator.execute
if not self.docker_conn_id_set:
try:
from_env().version()
except Exception: # pylint: disable=broad-except
pass
else:
kwargs['docker_conn_id'] = True
# We do this because log lines won't necessarily be emitted in order (!) -- so we can't
# just check the last log line to see if it's JSON.
kwargs['xcom_all'] = True
# Store Airflow DAG run timestamp so that we can pass along via execution metadata
self.airflow_ts = kwargs.get('ts')
if 'environment' not in kwargs:
kwargs['environment'] = DEFAULT_ENVIRONMENT
super(DagsterDockerOperator, self).__init__(
task_id=task_id, dag=dag, tmp_dir=tmp_dir, host_tmp_dir=host_tmp_dir, *args, **kwargs
)
@property
def run_id(self):
if self._run_id is None:
return ''
else:
return self._run_id
@property
def query(self):
# TODO: https://github.com/dagster-io/dagster/issues/1342
redacted = construct_variables(
self.mode, 'REDACTED', self.pipeline_name, self.run_id, self.airflow_ts, self.step_keys
)
self.log.info(
'Executing GraphQL query: {query}\n'.format(query=START_PIPELINE_EXECUTION_QUERY)
+ 'with variables:\n'
+ seven.json.dumps(redacted, indent=2)
)
variables = construct_variables(
self.mode,
self.environment_dict,
self.pipeline_name,
self.run_id,
self.airflow_ts,
self.step_keys,
)
return '-v \'{variables}\' \'{query}\''.format(
variables=seven.json.dumps(variables), query=START_PIPELINE_EXECUTION_QUERY
)
def get_command(self):
if self.command is not None and self.command.strip().find('[') == 0:
commands = ast.literal_eval(self.command)
elif self.command is not None:
commands = self.command
else:
commands = self.query
return commands
def get_hook(self):
if self.docker_conn_id_set:
return super(DagsterDockerOperator, self).get_hook()
class _DummyHook(object):
def get_conn(self):
return from_env().api
return _DummyHook()
def execute(self, context):
try:
from dagster_graphql.client.mutations import (
handle_start_pipeline_execution_errors,
handle_start_pipeline_execution_result,
)
except ImportError:
raise AirflowException(
'To use the DagsterPythonOperator, dagster and dagster_graphql must be installed '
'in your Airflow environment.'
)
if 'run_id' in self.params:
self._run_id = self.params['run_id']
elif 'dag_run' in context and context['dag_run'] is not None:
self._run_id = context['dag_run'].run_id
try:
raw_res = super(DagsterDockerOperator, self).execute(context)
self.log.info('Finished executing container.')
res = parse_raw_res(raw_res)
handle_start_pipeline_execution_errors(res)
events = handle_start_pipeline_execution_result(res)
self.skip_self_if_necessary(events, context['execution_date'], context['task'])
return events
finally:
self._run_id = None
# This is a class-private name on DockerOperator for no good reason --
# all that the status quo does is inhibit extension of the class.
# See https://issues.apache.org/jira/browse/AIRFLOW-3880
def __get_tls_config(self):
# pylint:disable=no-member
return super(DagsterDockerOperator, self)._ModifiedDockerOperator__get_tls_config()
@contextmanager
def get_host_tmp_dir(self):
yield self.host_tmp_dir
class DagsterPythonOperator(PythonOperator, DagsterSkipMixin):
def __init__(
self,
task_id,
handle,
pipeline_name,
environment_dict,
mode,
step_keys,
dag,
*args,
**kwargs
):
if 'storage' not in environment_dict:
raise airflow_storage_exception('/tmp/special_place')
check.invariant(
'in_memory' not in environment_dict.get('storage', {}),
'Cannot use in-memory storage with Airflow, must use filesystem or S3',
)
def python_callable(ts, dag_run, **kwargs): # pylint: disable=unused-argument
run_id = dag_run.run_id
# TODO: https://github.com/dagster-io/dagster/issues/1342
redacted = construct_variables(mode, 'REDACTED', pipeline_name, run_id, ts, step_keys)
logging.info(
'Executing GraphQL query: {query}\n'.format(query=START_PIPELINE_EXECUTION_QUERY)
+ 'with variables:\n'
+ seven.json.dumps(redacted, indent=2)
)
events = execute_start_pipeline_execution_query(
handle,
construct_variables(mode, environment_dict, pipeline_name, run_id, ts, step_keys),
)
self.skip_self_if_necessary(events, kwargs['execution_date'], kwargs['task'])
return events
super(DagsterPythonOperator, self).__init__(
task_id=task_id,
provide_context=True,
python_callable=python_callable,
dag=dag,
*args,
**kwargs
)
| 36.346705
| 100
| 0.633662
| 11,694
| 0.921876
| 284
| 0.022389
| 1,293
| 0.101931
| 0
| 0
| 3,221
| 0.253922
|
bbaf14d78f29821d3e6ffec013b3447b60e7a43a
| 293
|
py
|
Python
|
app-receiver/receiver/service/settings.py
|
makaay/devops_assignment
|
937a0877e869c3e6ca09bad35ab869901527e492
|
[
"BSD-2-Clause"
] | null | null | null |
app-receiver/receiver/service/settings.py
|
makaay/devops_assignment
|
937a0877e869c3e6ca09bad35ab869901527e492
|
[
"BSD-2-Clause"
] | null | null | null |
app-receiver/receiver/service/settings.py
|
makaay/devops_assignment
|
937a0877e869c3e6ca09bad35ab869901527e492
|
[
"BSD-2-Clause"
] | 1
|
2022-03-05T13:11:30.000Z
|
2022-03-05T13:11:30.000Z
|
import os
"""
This file contains configurations for receiver service.
The configurations can be over-ridden with Environment Variables during run time.
"""
OUTPUT_DIR = os.getenv("OUTPUT_DIR", "/usr/src/app-receiver/output")
DECRYPTION_KEY = os.getenv("DECRYPTION_KEY", "/tmp/decryption_key")
| 36.625
| 81
| 0.78157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 224
| 0.764505
|
bbaf9c88715d3ed6658c5b7cac9f3b5786ab4dad
| 908
|
py
|
Python
|
setup.py
|
patel-zeel/CGLB-1
|
6afab3631704ae4233e93c2de289b4e351f61838
|
[
"Apache-2.0"
] | 5
|
2021-07-19T09:08:15.000Z
|
2022-03-21T10:19:08.000Z
|
setup.py
|
patel-zeel/CGLB-1
|
6afab3631704ae4233e93c2de289b4e351f61838
|
[
"Apache-2.0"
] | 5
|
2021-08-30T20:24:52.000Z
|
2021-11-29T07:24:51.000Z
|
setup.py
|
patel-zeel/CGLB-1
|
6afab3631704ae4233e93c2de289b4e351f61838
|
[
"Apache-2.0"
] | 1
|
2021-11-25T22:15:27.000Z
|
2021-11-25T22:15:27.000Z
|
# Copyright 2021 The CGLB Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
pkgs = find_packages()
setup(
name="cglb",
author="Artem Artemev, David Burt",
author_email="a.artemev20@imperial.ac.uk, drb62@cam.ac.uk",
version="0.0.1",
packages=pkgs,
install_requires=["numpy", "scipy"],
dependency_links=[],
)
| 32.428571
| 74
| 0.732379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 688
| 0.757709
|
bbb379a767a5b56faec727f2a03bfb35f2d9f7df
| 5,361
|
py
|
Python
|
automation/aux_funcs.py
|
jaimiles23/hacker_rank
|
0580eac82e5d0989afabb5c2e66faf09713f891b
|
[
"Apache-2.0"
] | null | null | null |
automation/aux_funcs.py
|
jaimiles23/hacker_rank
|
0580eac82e5d0989afabb5c2e66faf09713f891b
|
[
"Apache-2.0"
] | null | null | null |
automation/aux_funcs.py
|
jaimiles23/hacker_rank
|
0580eac82e5d0989afabb5c2e66faf09713f891b
|
[
"Apache-2.0"
] | 3
|
2021-09-22T11:06:58.000Z
|
2022-01-25T09:29:24.000Z
|
"""Collection of functions related to navigating directories
"""
##########
# Imports
##########
import os
from typing import Union
from pathlib import Path
from logger.select_chall import logging
import constants
from domains import problem_domains
from git import Repo
##########
# Subdomain dir_name
##########
def get_subdomain_dirname(subdomain_num: int, total_subdomains: int, subdomain: str) -> str:
"""Returns directory name for subdirectory.
Args:
subdomain_num (int): Subdomain number
total_subdomains (int): Total number of subdomains
subdomain (str): Subdomain name.
Returns:
str: directory name
"""
logging.debug(f"Subdir info: {subdomain_num}, {total_subdomains}, {subdomain}")
subdomain_num, total_subdomains = str(subdomain_num), str(total_subdomains)
if total_subdomains == '1': # specific challenges, e.g., 10 days of stats
subdomain_num = ''
else:
while len(subdomain_num) < len(total_subdomains):
subdomain_num = '0' + subdomain_num
subdomain_num += '_'
subdir_name = subdomain_num + subdomain.strip().lower().replace(' ', '_')
logging.debug(f"subdir - {subdir_name}")
return subdir_name
##########
# Change dir
##########
def change_dir(domain_dir: str):
"""Changes the current working directory.
Creates directory if it doesn't exist.
Also creates a pre-READMEME.md file.
Args:
domain_dir: directory to change to.
"""
if not os.path.exists( domain_dir):
logging.info(f"DIR - creating {domain_dir}")
os.mkdir(domain_dir)
logging.info(f"DIR - changing to {domain_dir}")
os.chdir(domain_dir)
return
##########
# get dirname
##########
def get_dirname(dir_path: Path) -> str:
"""returns directory name from windows filepath
Args:
dir_path (Path): path oject
Returns:
str: directory name
"""
dirname = str(dir_path.resolve())
dirname = dirname[dirname.rfind('\\') + 1:]
logging.debug(f"Dirname {dirname} from {dir_path}")
return dirname
##########
# get_domain_dirs
##########
def get_domain_dirs(home_dir: object) -> list:
"""Returns list of domain directories.
Args:
home_dir (object): Home directory
Returns:
list: List of domain directories
"""
domain_dirs = []
for d in problem_domains:
domain_dir = home_dir / d.name
domain_dirs.append(domain_dir)
logging.debug("DIR - Domain dirs:" + '\n-'.join(
[str(d) for d in domain_dirs]))
return domain_dirs
##########
# get_subdomain_dirs
##########
def get_subdomain_dirs(domain_dir) -> list:
"""Returns list of subdomain dirs.
Args:
domain_dir ([type]): Domain directory.
Returns:
list: Returns list of subdomain directories.
"""
not_subdirs = (
'.ipynb_checkpoints'
)
p = Path(domain_dir)
subdirs = []
for f in p.glob('**/*'):
if f.is_dir():
dir_name = get_dirname(f)
logging.debug(f"Check dir - {dir_name}")
if dir_name not in not_subdirs:
subdirs.append(f)
logging.debug("DIR - Subdomain dirs:" + '\n-'.join(
[str(d) for d in subdirs]))
return subdirs
##########
# Challenge csv name
##########
def get_chall_csv_filename(sub_dir) -> str:
"""Returns csv name containing challenge informatin.
Args:
sub_dir ([type]): sub directory name
Returns:
str: csv filename
"""
p = Path(sub_dir)
for f in p.glob('**/*'):
filename = str(f)
if filename.endswith(".csv"):
return filename
raise Exception(f"No csv located in {sub_dir}")
##########
# pre readme
##########
def make_readme_setup(name: str, url: str):
"""Creates pre-readme in file."""
filename = constants.PRE_README_FILENAME
if not os.path.exists( filename):
with open(filename, 'w') as outfile:
outfile.write(f"# {name}")
outfile.write(f"\nContains solutions to [{name}]({url}).")
return
##########
# Make file
##########
def make_file(filename: str, name: str, url: str) -> None:
"""Checks if file exists. If it doesn't exist, creates file."""
exists = os.path.exists(filename)
logging.debug(f"{filename} - {exists}")
if os.path.exists(filename):
return
logging.debug(f"FILE - Creating {filename}")
with open(filename, 'w') as outfile:
outfile.write(f"Solution to [{name}]({url})")
return
##########
# Update github
##########
def update_github(home_dir: object, commit_msg: str) -> None:
"""Updates github directory.
Args:
home_dir (object): home dir pathlib
commit_msg (str): Commit message
"""
repo = Repo(home_dir)
repo.git.add(update = True)
repo.index.commit(commit_msg)
logging.debug(f"Committing: {commit_msg}")
origin = repo.remote(name = 'origin')
origin.push()
logging.debug("Pushed to repo.")
def get_solution_commit_msg(domain: Path, subdomain: Path, chall_name: str) -> str:
"""Returns commit message for adding solution."""
domain_name = get_dirname(domain)
subdomain_name = get_dirname(subdomain)
return f"Solution to {domain_name} {subdomain_name} - {chall_name}"
| 23.933036
| 92
| 0.610707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,406
| 0.448797
|
bbb3c58e4009e852c11d677005377bc7618636b3
| 2,581
|
py
|
Python
|
tests/examples/minlplib/ex6_2_8.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 2
|
2021-07-03T13:19:10.000Z
|
2022-02-06T10:48:13.000Z
|
tests/examples/minlplib/ex6_2_8.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 1
|
2021-07-04T14:52:14.000Z
|
2021-07-15T10:17:11.000Z
|
tests/examples/minlplib/ex6_2_8.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | null | null | null |
# NLP written by GAMS Convert at 04/21/18 13:51:45
#
# Equation counts
# Total E G L N X C B
# 2 2 0 0 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 4 4 0 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 7 4 3 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(1E-6,1),initialize=0.7154)
m.x3 = Var(within=Reals,bounds=(1E-6,1),initialize=0.00336)
m.x4 = Var(within=Reals,bounds=(1E-6,1),initialize=0.28124)
m.obj = Objective(expr=log(2.4088*m.x2 + 8.8495*m.x3 + 2.0086*m.x4)*(10.4807341082197*m.x2 + 38.5043409542885*m.x3 +
8.73945638067505*m.x4) + 0.303602206615077*m.x2 - 3.98949602721008*m.x3 + 0.0423576909050935*m.x4
+ 0.240734108219679*log(m.x2)*m.x2 + 2.64434095428848*log(m.x3)*m.x3 + 0.399456380675047*log(
m.x4)*m.x4 - 0.240734108219679*log(2.4088*m.x2 + 8.8495*m.x3 + 2.0086*m.x4)*m.x2 -
2.64434095428848*log(2.4088*m.x2 + 8.8495*m.x3 + 2.0086*m.x4)*m.x3 - 0.399456380675047*log(2.4088
*m.x2 + 8.8495*m.x3 + 2.0086*m.x4)*m.x4 + 11.24*log(m.x2)*m.x2 + 36.86*log(m.x3)*m.x3 + 9.34*log(
m.x4)*m.x4 - 11.24*log(2.248*m.x2 + 7.372*m.x3 + 1.868*m.x4)*m.x2 - 36.86*log(2.248*m.x2 + 7.372*
m.x3 + 1.868*m.x4)*m.x3 - 9.34*log(2.248*m.x2 + 7.372*m.x3 + 1.868*m.x4)*m.x4 + log(2.248*m.x2 +
7.372*m.x3 + 1.868*m.x4)*(2.248*m.x2 + 7.372*m.x3 + 1.868*m.x4) + 2.248*log(m.x2)*m.x2 + 7.372*
log(m.x3)*m.x3 + 1.868*log(m.x4)*m.x4 - 2.248*log(2.248*m.x2 + 5.82088173817021*m.x3 +
0.382446861901943*m.x4)*m.x2 - 7.372*log(0.972461133672523*m.x2 + 7.372*m.x3 + 1.1893141713454*
m.x4)*m.x3 - 1.868*log(1.86752460515164*m.x2 + 2.61699842799583*m.x3 + 1.868*m.x4)*m.x4 -
12.7287341082197*log(m.x2)*m.x2 - 45.8763409542885*log(m.x3)*m.x3 - 10.607456380675*log(m.x4)*
m.x4, sense=minimize)
m.c2 = Constraint(expr= m.x2 + m.x3 + m.x4 == 1)
| 57.355556
| 120
| 0.484696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 697
| 0.27005
|
bbb5023852d401d52fa04fa5275d3dce5dbc5d18
| 3,790
|
py
|
Python
|
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/raw/GL/NV/geometry_program4.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/raw/GL/NV/geometry_program4.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/raw/GL/NV/geometry_program4.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
'''OpenGL extension NV.geometry_program4
The official definition of this extension is available here:
http://oss.sgi.com/projects/ogl-sample/registry/NV/geometry_program4.txt
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_NV_geometry_program4'
GL_LINES_ADJACENCY_EXT = constant.Constant( 'GL_LINES_ADJACENCY_EXT', 0xA )
GL_LINE_STRIP_ADJACENCY_EXT = constant.Constant( 'GL_LINE_STRIP_ADJACENCY_EXT', 0xB )
GL_TRIANGLES_ADJACENCY_EXT = constant.Constant( 'GL_TRIANGLES_ADJACENCY_EXT', 0xC )
GL_TRIANGLE_STRIP_ADJACENCY_EXT = constant.Constant( 'GL_TRIANGLE_STRIP_ADJACENCY_EXT', 0xD )
GL_GEOMETRY_PROGRAM_NV = constant.Constant( 'GL_GEOMETRY_PROGRAM_NV', 0x8C26 )
GL_MAX_PROGRAM_OUTPUT_VERTICES_NV = constant.Constant( 'GL_MAX_PROGRAM_OUTPUT_VERTICES_NV', 0x8C27 )
GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV = constant.Constant( 'GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV', 0x8C28 )
GL_GEOMETRY_VERTICES_OUT_EXT = constant.Constant( 'GL_GEOMETRY_VERTICES_OUT_EXT', 0x8DDA )
GL_GEOMETRY_INPUT_TYPE_EXT = constant.Constant( 'GL_GEOMETRY_INPUT_TYPE_EXT', 0x8DDB )
GL_GEOMETRY_OUTPUT_TYPE_EXT = constant.Constant( 'GL_GEOMETRY_OUTPUT_TYPE_EXT', 0x8DDC )
GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT = constant.Constant( 'GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT', 0x8C29 )
GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT = constant.Constant( 'GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT', 0x8DA7 )
GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT', 0x8DA8 )
GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT', 0x8DA9 )
GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT = constant.Constant( 'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT', 0x8CD4 )
GL_PROGRAM_POINT_SIZE_EXT = constant.Constant( 'GL_PROGRAM_POINT_SIZE_EXT', 0x8642 )
glProgramVertexLimitNV = platform.createExtensionFunction(
'glProgramVertexLimitNV', dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum, constants.GLint,),
doc = 'glProgramVertexLimitNV( GLenum(target), GLint(limit) ) -> None',
argNames = ('target', 'limit',),
)
glFramebufferTextureEXT = platform.createExtensionFunction(
'glFramebufferTextureEXT', dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum, constants.GLenum, constants.GLuint, constants.GLint,),
doc = 'glFramebufferTextureEXT( GLenum(target), GLenum(attachment), GLuint(texture), GLint(level) ) -> None',
argNames = ('target', 'attachment', 'texture', 'level',),
)
glFramebufferTextureLayerEXT = platform.createExtensionFunction(
'glFramebufferTextureLayerEXT', dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum, constants.GLenum, constants.GLuint, constants.GLint, constants.GLint,),
doc = 'glFramebufferTextureLayerEXT( GLenum(target), GLenum(attachment), GLuint(texture), GLint(level), GLint(layer) ) -> None',
argNames = ('target', 'attachment', 'texture', 'level', 'layer',),
)
glFramebufferTextureFaceEXT = platform.createExtensionFunction(
'glFramebufferTextureFaceEXT', dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum, constants.GLenum, constants.GLuint, constants.GLint, constants.GLenum,),
doc = 'glFramebufferTextureFaceEXT( GLenum(target), GLenum(attachment), GLuint(texture), GLint(level), GLenum(face) ) -> None',
argNames = ('target', 'attachment', 'texture', 'level', 'face',),
)
def glInitGeometryProgram4NV():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
| 54.927536
| 129
| 0.814248
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,537
| 0.405541
|
bbb5a419508bb770bb1a305a0d8d6adcb54e823a
| 244
|
py
|
Python
|
android_notification.py
|
jbytes1027/notify-mirror
|
b4a6bb94578dd4c9130731f153f901868f26cded
|
[
"CC0-1.0"
] | null | null | null |
android_notification.py
|
jbytes1027/notify-mirror
|
b4a6bb94578dd4c9130731f153f901868f26cded
|
[
"CC0-1.0"
] | null | null | null |
android_notification.py
|
jbytes1027/notify-mirror
|
b4a6bb94578dd4c9130731f153f901868f26cded
|
[
"CC0-1.0"
] | null | null | null |
class AndroidNotification:
def __init__(self, header, body, application, package, id_num):
self.application = application
self.body = body
self.package = package
self.header = header
self.id = id_num
| 30.5
| 67
| 0.643443
| 243
| 0.995902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bbb6e0ba861fbcb599f8a5421f34367fccb32fdd
| 1,158
|
py
|
Python
|
src/redcli/application/services/logger.py
|
Zhouhao12345/redcli
|
8a8260b0799e8524d0c339df8dfe6bcfb22f1841
|
[
"MIT"
] | 6
|
2019-12-02T02:38:40.000Z
|
2021-02-05T06:40:56.000Z
|
src/redcli/application/services/logger.py
|
Zhouhao12345/redcli
|
8a8260b0799e8524d0c339df8dfe6bcfb22f1841
|
[
"MIT"
] | null | null | null |
src/redcli/application/services/logger.py
|
Zhouhao12345/redcli
|
8a8260b0799e8524d0c339df8dfe6bcfb22f1841
|
[
"MIT"
] | 1
|
2019-12-02T04:19:08.000Z
|
2019-12-02T04:19:08.000Z
|
from ..constant import Service as Service_Key
from .base import Service
import logging
class LoggerService(Service):
def init(self, services):
config_service = services.get_service(Service_Key.CONFIG_LOCAL)
self.date_format = config_service.get_config_value(
"LOGGER", "DateFormat")
self.format_str = config_service.get_config_value(
"LOGGER", "FormatString"
)
level_str = config_service.get_config_value(
"LOGGER", "LEVEL"
)
self.level = getattr(logging, level_str)
self.file_path = config_service.get_config_value(
"LOGGER", "FilePath"
)
def start(self):
self._logging = logging
self._logging.basicConfig(
filename=self.file_path,
level=self.level,
filemode="w",
format=self.format_str,
datefmt=self.date_format,
)
def close(self):
del self._logging
del self.file_path
del self.level
del self.format_str
del self.date_format
@property
def logging(self):
return self._logging
| 27.571429
| 71
| 0.607945
| 1,069
| 0.923143
| 0
| 0
| 61
| 0.052677
| 0
| 0
| 78
| 0.067358
|
bbb715e2f390dc18e086c2e84959bf7e23c5b5d0
| 324
|
py
|
Python
|
Exec/GravityTests/zeldovich_dm/analysis/zeldovich.py
|
Gosenca/axionyx_1.0
|
7e2a723e00e6287717d6d81b23db32bcf6c3521a
|
[
"BSD-3-Clause-LBNL"
] | 72
|
2017-06-16T17:20:25.000Z
|
2021-12-18T12:45:58.000Z
|
Exec/GravityTests/zeldovich_dm/analysis/zeldovich.py
|
Gosenca/axionyx_1.0
|
7e2a723e00e6287717d6d81b23db32bcf6c3521a
|
[
"BSD-3-Clause-LBNL"
] | 37
|
2017-05-07T04:20:04.000Z
|
2022-02-03T19:16:59.000Z
|
Exec/GravityTests/zeldovich_dm/analysis/zeldovich.py
|
Gosenca/axionyx_1.0
|
7e2a723e00e6287717d6d81b23db32bcf6c3521a
|
[
"BSD-3-Clause-LBNL"
] | 30
|
2017-05-18T21:14:45.000Z
|
2022-03-23T20:50:24.000Z
|
import numpy as np
def perturbed_x(q, z, z_c, k):
return (q - (1.0 + z_c) / (1.0 + z) * np.sin(k * q) / k)
def perturbed_v_x(q, z, z_c, k, H_0):
return (-H_0 * (1.0 + z_c) * np.sqrt(1.0 + z) * np.sin(k * q) / (k))
def get_scale_factor(z):
return 1.0 / (1.0 + z)
def get_redshift(a):
return 1.0 / a - 1.0
| 23.142857
| 72
| 0.530864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bbb782cb3b44f4866569b38680bb03a8e7730b26
| 10,441
|
py
|
Python
|
pgtrigger/tests/test_multi_db.py
|
tiholic/django-pgtrigger
|
ef8878115bef1a933405ab86d7f7c117252d970c
|
[
"BSD-3-Clause"
] | 135
|
2020-06-27T14:02:46.000Z
|
2021-05-27T01:07:41.000Z
|
pgtrigger/tests/test_multi_db.py
|
tiholic/django-pgtrigger
|
ef8878115bef1a933405ab86d7f7c117252d970c
|
[
"BSD-3-Clause"
] | 25
|
2021-06-19T20:28:52.000Z
|
2022-03-22T23:14:59.000Z
|
pgtrigger/tests/test_multi_db.py
|
tiholic/django-pgtrigger
|
ef8878115bef1a933405ab86d7f7c117252d970c
|
[
"BSD-3-Clause"
] | 7
|
2020-06-29T02:26:05.000Z
|
2021-04-25T14:42:33.000Z
|
"""Tests multi-database support"""
import contextlib
import io
import sys
from django.core.management import call_command
import django.test
import pgtrigger
import pgtrigger.tests.models as test_models
@contextlib.contextmanager
def capture_stdout():
old_stdout = sys.stdout
sys.stdout = out = io.StringIO()
try:
yield out
finally:
sys.stdout = old_stdout
class ToLogRouter:
"""
Route the "ToLog" model to the "other" database
"""
route_app_labels = {'auth', 'contenttypes'}
def db_for_write(self, model, **hints):
"""
Attempts to write auth and contenttypes models go to auth_db.
"""
if model == test_models.ToLogModel:
return 'other'
return None
@django.test.override_settings(
DATABASE_ROUTERS=['pgtrigger.tests.test_multi_db.ToLogRouter']
)
class MultiDB(django.test.TestCase):
databases = ['default', 'other']
def setUp(self):
# Trigger installation is originally executed during
# test case setup, before any settings are overridden. Uninstall
# and re-install all triggers to make sure they are properly installed.
# Doing so also ensures that pruning across mutliple databases works
with self.settings(
DATABASE_ROUTERS=['pgtrigger.tests.test_multi_db.ToLogRouter']
):
call_command('pgtrigger', 'uninstall')
call_command('pgtrigger', 'install')
def test_full_ls(self):
with capture_stdout() as captured:
call_command('pgtrigger', 'ls')
lines = sorted(captured.getvalue().split('\n'))
assert lines == [
'',
'tests.CustomSoftDelete:soft_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.FSM:fsm'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.SoftDelete:soft_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.TestTrigger:protect_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.TestTrigger:protect_misc_insert'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:after_update_row_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:after_update_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:update_of_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
]
def test_single_db_enable(self):
call_command('pgtrigger', 'disable')
call_command('pgtrigger', 'enable', '--database', 'other')
with capture_stdout() as captured:
call_command('pgtrigger', 'ls')
lines = sorted(captured.getvalue().split('\n'))
assert lines == [
'',
'tests.CustomSoftDelete:soft_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
'tests.FSM:fsm'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
'tests.SoftDelete:soft_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
'tests.TestTrigger:protect_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
'tests.TestTrigger:protect_misc_insert'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
'tests.ToLogModel:after_update_row_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:after_update_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:update_of_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
]
def test_single_db_disable(self):
call_command('pgtrigger', 'disable', '--database', 'other')
with capture_stdout() as captured:
call_command('pgtrigger', 'ls')
lines = sorted(captured.getvalue().split('\n'))
assert lines == [
'',
'tests.CustomSoftDelete:soft_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.FSM:fsm'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.SoftDelete:soft_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.TestTrigger:protect_delete'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.TestTrigger:protect_misc_insert'
'\tdefault'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:after_update_row_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
'tests.ToLogModel:after_update_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
'tests.ToLogModel:update_of_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[91mDISABLED\x1b[0m',
]
def test_single_db_ls(self):
"""Only list a single database"""
with capture_stdout() as captured:
call_command('pgtrigger', 'ls', '--database', 'other')
lines = sorted(captured.getvalue().split('\n'))
assert lines == [
'',
'tests.ToLogModel:after_update_row_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:after_update_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:update_of_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
]
def test_single_db_uninstall(self):
"""Uninstall a single database and verify results"""
call_command('pgtrigger', 'uninstall', '--database', 'default')
with capture_stdout() as captured:
call_command('pgtrigger', 'ls')
lines = sorted(captured.getvalue().split('\n'))
assert lines == [
'',
'tests.CustomSoftDelete:soft_delete'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.FSM:fsm' '\tdefault' '\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.SoftDelete:soft_delete'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.TestTrigger:protect_delete'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.TestTrigger:protect_misc_insert'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.ToLogModel:after_update_row_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:after_update_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:update_of_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
]
def test_single_db_install(self):
"""Install a single database and verify results"""
call_command('pgtrigger', 'uninstall')
call_command('pgtrigger', 'install', '--database', 'other')
with capture_stdout() as captured:
call_command('pgtrigger', 'ls')
lines = sorted(captured.getvalue().split('\n'))
assert lines == [
'',
'tests.CustomSoftDelete:soft_delete'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.FSM:fsm' '\tdefault' '\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.SoftDelete:soft_delete'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.TestTrigger:protect_delete'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.TestTrigger:protect_misc_insert'
'\tdefault'
'\t\x1b[91mUNINSTALLED\x1b[0m',
'tests.ToLogModel:after_update_row_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:after_update_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
'tests.ToLogModel:update_of_statement_test'
'\tother'
'\t\x1b[92mINSTALLED\x1b[0m'
'\t\x1b[92mENABLED\x1b[0m',
]
def test_invalid_args(self):
with self.assertRaises(ValueError):
pgtrigger.get('uri', database='other')
| 37.422939
| 79
| 0.514702
| 9,940
| 0.952016
| 158
| 0.015133
| 9,861
| 0.94445
| 0
| 0
| 5,172
| 0.495355
|
bbb7c8daf68cfc0db3531104ebe1585494d2cd41
| 192
|
py
|
Python
|
jude_customization/jude_customization/doctype/atlantic_fluid_configuration/test_atlantic_fluid_configuration.py
|
mymi14s/jude_customization
|
6de7173a03cedf8c712c2d05453c821dff7eec58
|
[
"MIT"
] | null | null | null |
jude_customization/jude_customization/doctype/atlantic_fluid_configuration/test_atlantic_fluid_configuration.py
|
mymi14s/jude_customization
|
6de7173a03cedf8c712c2d05453c821dff7eec58
|
[
"MIT"
] | null | null | null |
jude_customization/jude_customization/doctype/atlantic_fluid_configuration/test_atlantic_fluid_configuration.py
|
mymi14s/jude_customization
|
6de7173a03cedf8c712c2d05453c821dff7eec58
|
[
"MIT"
] | 2
|
2020-11-09T16:22:52.000Z
|
2020-12-19T20:52:10.000Z
|
# Copyright (c) 2021, Anthony Emmanuel, github.com/mymi14ss and Contributors
# See license.txt
# import frappe
import unittest
class TestAtlanticFluidConfiguration(unittest.TestCase):
pass
| 21.333333
| 76
| 0.807292
| 62
| 0.322917
| 0
| 0
| 0
| 0
| 0
| 0
| 108
| 0.5625
|
bbb7c9d675be21d26531a6e1f3de3d231a427a1e
| 1,224
|
py
|
Python
|
scale/scale/local_settings_TRAVIS-CI.py
|
stevevarner/scale
|
9623b261db4ddcf770f00df16afc91176142bb7c
|
[
"Apache-2.0"
] | null | null | null |
scale/scale/local_settings_TRAVIS-CI.py
|
stevevarner/scale
|
9623b261db4ddcf770f00df16afc91176142bb7c
|
[
"Apache-2.0"
] | null | null | null |
scale/scale/local_settings_TRAVIS-CI.py
|
stevevarner/scale
|
9623b261db4ddcf770f00df16afc91176142bb7c
|
[
"Apache-2.0"
] | null | null | null |
# Settings file for use with travis-ci
# Include all the default settings.
from settings import *
# Use the following lines to enable developer/debug mode.
DEBUG = False
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# Set the external URL context here
FORCE_SCRIPT_NAME = '/'
USE_X_FORWARDED_HOST = True
ALLOWED_HOSTS = ["*"]
STATIC_ROOT = 'static'
STATIC_URL = '/static/'
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# Not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'UTC'
SECRET_KEY = "0fnk28edjh"
# The template database to use when creating your new database.
# By using your own template that already includes the postgis extension,
# you can avoid needing to run the unit tests as a PostgreSQL superuser.
#POSTGIS_TEMPLATE = 'scale'
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'scale',
'USER': 'postgres',
'PASSWORD': '',
'HOST': 'localhost',
},
}
# Master settings
MESOS_MASTER = 'zk://localhost:2181/mesos'
# Metrics collection directory
METRICS_DIR = '/tmp'
| 26.042553
| 73
| 0.718137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 903
| 0.737745
|
bbbaccac8596eecef7a731177661a6286ed440a5
| 1,316
|
py
|
Python
|
scoring/dictionary/YAQ93.py
|
majazeh/risloo-samples
|
aadf27912a5044663698fa14fa781c644ea3f548
|
[
"Unlicense"
] | null | null | null |
scoring/dictionary/YAQ93.py
|
majazeh/risloo-samples
|
aadf27912a5044663698fa14fa781c644ea3f548
|
[
"Unlicense"
] | null | null | null |
scoring/dictionary/YAQ93.py
|
majazeh/risloo-samples
|
aadf27912a5044663698fa14fa781c644ea3f548
|
[
"Unlicense"
] | 1
|
2021-03-07T09:15:55.000Z
|
2021-03-07T09:15:55.000Z
|
f1 = 'intentionally_not_thinking_about_upsetting_things'
f2 = 'substance_abuse'
f3 = 'denial_of_unhappiness'
f4 = 'excessive_rationality_and_control'
f5 = 'suppression_of_anger'
f6 = 'psychosomatic_symptoms'
f7 = 'denial_of_memories'
f8 = 'withdrawal_from_people'
f9 = 'avoidance_through_sleep_and_lack of energy'
f10 = 'distraction_through_activity'
f11 = 'self_soothing_like_eating_shopping_etc'
f12 = 'passive_blocking_of_upsetting_emotions'
f13 = 'passive_distraction_fantasy_daydreaming_television'
f14 = 'avoidance_of_upsetting_situations'
factors_names = (f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f11,f12,f13,f14,f15)
factors = {
1 :(f1,)
, 2 :(f2,)
, 3 :(f3,)
, 4 :(f3,)
, 5 :(f4,)
, 6 :(f5,)
, 7:(f2,)
, 8 :(f7 ,)
, 9 :(f2,f11,)
, 10 :(f6 ,)
, 11 :(f12 ,)
, 12 :(f6 ,)
, 13 :(f5 , f8,)
, 14 :(f9 ,)
, 15 :(f6 ,)
, 16 :(f13 ,)
, 17 :(f4,)
, 18 :(f5 ,)
, 19 :(f4,)
, 20 :(f8 ,)
, 21 :(f7,)
, 22 :(f9 ,)
, 23 :(f10 ,)
, 24 :(f10 ,)
, 25 :(f13 ,)
, 26 :(f11 ,)
, 27 :(f7 ,)
, 28:(f10 ,)
, 29 :(f3,)
, 30 :(f8 ,)
, 31 :(f1 ,)
, 32 :(f1 ,)
, 33 :(f12 ,)
, 34 :(f3,)
, 35 :(f4,)
, 36 :(f11 ,)
, 37 :(f14 ,)
, 38 :(f6,)
, 39 :(f12 ,)
, 40 :(f4 , f12,) #
}
| 21.225806
| 68
| 0.521277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 458
| 0.348024
|
bbbae9f34a49725dac76a6810057af802194d6d9
| 327
|
py
|
Python
|
labgraph/websockets/ws_server/ws_server_stream_message.py
|
Yunusbcr/labgraph
|
a00ae7098b7b0e0eda8ce2e7e62dae86854616fb
|
[
"MIT"
] | 124
|
2021-07-14T21:25:59.000Z
|
2022-03-08T20:40:16.000Z
|
labgraph/websockets/ws_server/ws_server_stream_message.py
|
Yunusbcr/labgraph
|
a00ae7098b7b0e0eda8ce2e7e62dae86854616fb
|
[
"MIT"
] | 46
|
2021-07-16T18:41:11.000Z
|
2022-03-31T20:53:00.000Z
|
labgraph/websockets/ws_server/ws_server_stream_message.py
|
Yunusbcr/labgraph
|
a00ae7098b7b0e0eda8ce2e7e62dae86854616fb
|
[
"MIT"
] | 22
|
2021-07-16T18:34:56.000Z
|
2022-03-31T15:12:06.000Z
|
#!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import numpy as np
from ...messages import Message
class WSStreamMessage(Message):
"""
A message representing data that was/will be communicated
to WebSocket.
"""
samples: np.float64
stream_name: str
stream_id: str
| 18.166667
| 61
| 0.703364
| 192
| 0.587156
| 0
| 0
| 0
| 0
| 0
| 0
| 168
| 0.513761
|
bbbb396fd5ddce2bf132ab9fc786735f3c60216f
| 11,400
|
py
|
Python
|
scripts/lib/smart.py
|
lpenuelac/ImageAnalysis
|
a01b1278cca92e45fef6f5e41d1310cfbb041308
|
[
"MIT"
] | 93
|
2015-11-26T14:15:51.000Z
|
2022-03-10T13:54:21.000Z
|
scripts/lib/smart.py
|
lpenuelac/ImageAnalysis
|
a01b1278cca92e45fef6f5e41d1310cfbb041308
|
[
"MIT"
] | 19
|
2017-04-06T11:09:21.000Z
|
2022-03-05T20:12:39.000Z
|
scripts/lib/smart.py
|
lpenuelac/ImageAnalysis
|
a01b1278cca92e45fef6f5e41d1310cfbb041308
|
[
"MIT"
] | 30
|
2017-06-12T16:08:51.000Z
|
2022-01-28T17:34:04.000Z
|
# code to estimate world surface elevation and EKF yaw error from
# image direct pose informaation.
# - trianglulate image features (in 3d) based on camera poses
# - use essential/fundamental matrix + camera pose to estimate yaw error
# - use affine transformation + camera pose to estimate yaw error
import cv2
import math
import numpy as np
import os
from props import getNode
import props_json
from . import camera
from . import image
from .logger import log, qlog
from . import project
from . import srtm
r2d = 180 / math.pi
d2r = math.pi / 180
smart_node = getNode("/smart", True)
# compute the 3d triangulation of the matches between a pair of images
def triangulate_features(i1, i2):
# quick sanity checks
if i1 == i2:
return None
if not i2.name in i1.match_list:
return None
if len(i1.match_list[i2.name]) == 0:
return None
if not i1.kp_list or not len(i1.kp_list):
i1.load_features()
if not i2.kp_list or not len(i2.kp_list):
i2.load_features()
# camera calibration
K = camera.get_K()
IK = np.linalg.inv(K)
# get poses
rvec1, tvec1 = i1.get_proj()
rvec2, tvec2 = i2.get_proj()
R1, jac = cv2.Rodrigues(rvec1)
PROJ1 = np.concatenate((R1, tvec1), axis=1)
R2, jac = cv2.Rodrigues(rvec2)
PROJ2 = np.concatenate((R2, tvec2), axis=1)
# setup data structures for cv2 call
uv1 = []; uv2 = []; indices = []
for pair in i1.match_list[i2.name]:
p1 = i1.kp_list[ pair[0] ].pt
p2 = i2.kp_list[ pair[1] ].pt
uv1.append( [p1[0], p1[1], 1.0] )
uv2.append( [p2[0], p2[1], 1.0] )
pts1 = IK.dot(np.array(uv1).T)
pts2 = IK.dot(np.array(uv2).T)
points = cv2.triangulatePoints(PROJ1, PROJ2, pts1[:2], pts2[:2])
points /= points[3]
return points
# find (forward) affine transformation between feature pairs
def find_affine(i1, i2):
# quick sanity checks
if i1 == i2:
return None
if not i2.name in i1.match_list:
return None
if len(i1.match_list[i2.name]) == 0:
return None
if not i1.kp_list or not len(i1.kp_list):
i1.load_features()
if not i2.kp_list or not len(i2.kp_list):
i2.load_features()
# affine transformation from i2 uv coordinate system to i1
uv1 = []; uv2 = []; indices = []
for pair in i1.match_list[i2.name]:
uv1.append( i1.kp_list[ pair[0] ].pt )
uv2.append( i2.kp_list[ pair[1] ].pt )
uv1 = np.float32([uv1])
uv2 = np.float32([uv2])
affine, status = \
cv2.estimateAffinePartial2D(uv2, uv1)
return affine
# return individual components of affine transform: rot, tx, ty, sx,
# sy (units are degrees and pixels)
def decompose_affine(affine):
tx = affine[0][2]
ty = affine[1][2]
a = affine[0][0]
b = affine[0][1]
c = affine[1][0]
d = affine[1][1]
sx = math.sqrt( a*a + b*b )
if a < 0.0:
sx = -sx
sy = math.sqrt( c*c + d*d )
if d < 0.0:
sy = -sy
angle_deg = math.atan2(-b,a) * 180.0/math.pi
if angle_deg < -180.0:
angle_deg += 360.0
if angle_deg > 180.0:
angle_deg -= 360.0
return (angle_deg, tx, ty, sx, sy)
# average of the triangulated points (converted to positive elevation)
def estimate_surface_elevation(i1, i2):
points = triangulate_features(i1, i2)
(ned1, ypr1, quat1) = i1.get_camera_pose()
(ned2, ypr2, quat2) = i2.get_camera_pose()
diff = np.array(ned2) - np.array(ned1)
dist_m = np.linalg.norm( diff )
# num_matches = points.shape[1]
if points is None:
return None, None, dist_m
else:
# points are are triangulated in the NED coordinates, so
# invert the vertical (down) average before returning the
# answer.
return -np.average(points[2]), np.std(points[2]), dist_m
# Estimate image pose yaw error (based on found pairs affine
# transform, original image pose, and gps positions; assumes a mostly
# nadir camara pose.) After computering affine transform, project
# image 2 center uv into image1 uv space and compute approximate
# course in local uv space, then add this to direct pose yaw estimate
# and compare to gps course.
def estimate_yaw_error(i1, i2):
affine = find_affine(i1, i2)
if affine is None:
return None, None, None, None
# fyi ...
# print(i1.name, 'vs', i2.name)
# print(" affine:\n", affine)
(rot, tx, ty, sx, sy) = decompose_affine(affine)
# print(" ", rot, tx, ty, sx, sy)
if abs(ty) > 0:
weight = abs(ty / tx)
else:
weight = abs(tx)
# ground course between camera poses
(ned1, ypr1, quat1) = i1.get_camera_pose()
(ned2, ypr2, quat2) = i2.get_camera_pose()
diff = np.array(ned2) - np.array(ned1)
dist = np.linalg.norm( diff )
dir = diff / dist
print(" dist:", dist, 'ned dir:', dir[0], dir[1], dir[2])
crs_gps = 90 - math.atan2(dir[0], dir[1]) * r2d
if crs_gps < 0: crs_gps += 360
if crs_gps > 360: crs_gps -= 360
# center pixel of i2 in i1's uv coordinate system
(w, h) = camera.get_image_params()
cx = int(w*0.5)
cy = int(h*0.5)
print("center:", [cx, cy])
newc = affine.dot(np.float32([cx, cy, 1.0]))[:2]
cdiff = [ newc[0] - cx, cy - newc[1] ]
#print("new center:", newc)
#print("center diff:", cdiff)
# estimated course based on i1 pose and [local uv coordinate
# system] affine transform
crs_aff = 90 - math.atan2(cdiff[1], cdiff[0]) * r2d
(_, air_ypr1, _) = i1.get_aircraft_pose()
#print(" aircraft yaw: %.1f" % air_ypr1[0])
#print(" affine course: %.1f" % crs_aff)
#print(" ground course: %.1f" % crs_gps)
crs_fit = air_ypr1[0] + crs_aff
yaw_error = crs_gps - crs_fit
if yaw_error < -180: yaw_error += 360
if yaw_error > 180: yaw_error -= 360
print(" estimated yaw error: %.1f" % yaw_error)
# aircraft yaw (est) + affine course + yaw error = ground course
return yaw_error, dist, crs_aff, weight
# compute the pairwise surface estimate and then update the property
# tree records
def update_surface_estimate(i1, i2):
avg, std, dist_m = estimate_surface_elevation(i1, i2)
if avg is None:
return None, None
i1_node = smart_node.getChild(i1.name, True)
i2_node = smart_node.getChild(i2.name, True)
tri1_node = i1_node.getChild("tri_surface_pairs", True)
tri2_node = i2_node.getChild("tri_surface_pairs", True)
# update pairwise info in the property tree
#weight = len(i1.match_list[i2.name])
weight = dist_m * dist_m
pair1_node = tri1_node.getChild(i2.name, True)
pair2_node = tri2_node.getChild(i1.name, True)
pair1_node.setFloat("surface_m", float("%.1f" % avg))
pair1_node.setInt("weight", weight)
pair1_node.setFloat("stddev", float("%.1f" % std))
pair1_node.setInt("dist_m", dist_m)
pair2_node.setFloat("surface_m", float("%.1f" % avg))
pair2_node.setInt("weight", weight)
pair2_node.setFloat("stddev", float("%.1f" % std))
pair2_node.setInt("dist_m", dist_m)
# update the average surface values
cutoff_std = 25 # more than this suggests a bad set of matches
sum1 = 0
count1 = 0
for child in tri1_node.getChildren():
pair_node = tri1_node.getChild(child)
surf = pair_node.getFloat("surface_m")
weight = pair_node.getInt("weight")
stddev = pair_node.getFloat("stddev")
if stddev < cutoff_std:
sum1 += surf * weight
count1 += weight
if count1 > 0:
i1_node.setFloat("tri_surface_m", float("%.1f" % (sum1 / count1)))
sum2 = 0
count2 = 0
for child in tri2_node.getChildren():
pair_node = tri2_node.getChild(child)
surf = pair_node.getFloat("surface_m")
weight = pair_node.getInt("weight")
stddev = pair_node.getFloat("stddev")
if stddev < cutoff_std:
sum2 += surf * weight
count2 += weight
if count2 > 0:
i2_node.setFloat("tri_surface_m", float("%.1f" % (sum2 / count2)))
return avg, std
# compute the pairwise surface estimate and then update the property
# tree records
def update_yaw_error_estimate(i1, i2):
yaw_error, dist, crs_affine, weight = estimate_yaw_error(i1, i2)
if yaw_error is None:
return 0
i1_node = smart_node.getChild(i1.name, True)
yaw_node = i1_node.getChild("yaw_pairs", True)
# update pairwise info in the property tree
pair_node = yaw_node.getChild(i2.name, True)
pair_node.setFloat("yaw_error", "%.1f" % yaw_error)
pair_node.setFloat("dist_m", "%.1f" % dist)
pair_node.setFloat("relative_crs", "%.1f" % crs_affine)
pair_node.setFloat("weight", "%.1f" % weight)
sum = 0
count = 0
for child in yaw_node.getChildren():
pair_node = yaw_node.getChild(child)
yaw_error = pair_node.getFloat("yaw_error")
weight = pair_node.getInt("weight")
dist_m = pair_node.getFloat("dist_m")
if dist_m >= 0.5 and abs(yaw_error) <= 30:
sum += yaw_error * weight
count += weight
#else:
# log("yaw error ignored:", i1.name, i2.name, "%.1fm" % dist_m,
# "%.1f(deg)" % yaw_error)
if count > 0:
i1_node.setFloat("yaw_error", float("%.1f" % (sum / count)))
return sum / count
else:
return 0
def get_yaw_error_estimate(i1):
i1_node = smart_node.getChild(i1.name, True)
if i1_node.hasChild("yaw_error"):
return i1_node.getFloat("yaw_error")
else:
return 0.0
# return the average of estimated surfaces below the image pair
def get_surface_estimate(i1, i2):
i1_node = smart_node.getChild(i1.name, True)
i2_node = smart_node.getChild(i2.name, True)
tri1_node = i1_node.getChild("tri_surface_pairs", True)
tri2_node = i2_node.getChild("tri_surface_pairs", True)
count = 0
sum = 0
if i1_node.hasChild("tri_surface_m"):
sum += i1_node.getFloat("tri_surface_m")
count += 1
if i2_node.hasChild("tri_surface_m"):
sum += i2_node.getFloat("tri_surface_m")
count += 1
if count > 0:
return sum / count
# no triangulation estimate yet, fall back to SRTM lookup
g1 = i1_node.getFloat("srtm_surface_m")
g2 = i2_node.getFloat("srtm_surface_m")
ground_m = (g1 + g2) * 0.5
qlog(" SRTM ground (no triangulation yet): %.1f" % ground_m)
return ground_m
# find srtm surface altitude under each camera pose
def update_srtm_elevations(proj):
for image in proj.image_list:
ned, ypr, quat = image.get_camera_pose()
surface = srtm.ned_interp([ned[0], ned[1]])
image_node = smart_node.getChild(image.name, True)
image_node.setFloat("srtm_surface_m", float("%.1f" % surface))
def set_yaw_error_estimates(proj):
for image in proj.image_list:
image_node = smart_node.getChild(image.name, True)
yaw_node = image_node.getChild("yaw_pairs", True)
yaw_error_deg = yaw_node.getFloat("yaw_error")
image.set_aircraft_yaw_error_estimate(yaw_error_deg)
def load(analysis_dir):
surface_file = os.path.join(analysis_dir, "smart.json")
props_json.load(surface_file, smart_node)
def save(analysis_dir):
surface_file = os.path.join(analysis_dir, "smart.json")
props_json.save(surface_file, smart_node)
| 33.236152
| 78
| 0.635351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,110
| 0.272807
|
bbbbbd636a90f7a83f3583eed93d7178ff373d2b
| 162
|
py
|
Python
|
notification/urls.py
|
zaska-afk/twitterclone-zaska-afk
|
9a2c0d86f4c359fc6f943e2f0cb56c1681bc2bb2
|
[
"MIT"
] | null | null | null |
notification/urls.py
|
zaska-afk/twitterclone-zaska-afk
|
9a2c0d86f4c359fc6f943e2f0cb56c1681bc2bb2
|
[
"MIT"
] | null | null | null |
notification/urls.py
|
zaska-afk/twitterclone-zaska-afk
|
9a2c0d86f4c359fc6f943e2f0cb56c1681bc2bb2
|
[
"MIT"
] | null | null | null |
from django.urls import path
from notification import views
urlpatterns = [
path('notifications/<int:id>/', views.notification_view, name='notifications'),
]
| 27
| 83
| 0.759259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 40
| 0.246914
|
bbbdb4df5c383e2a41743d8bacdff942a6c94c7d
| 3,229
|
py
|
Python
|
portal/migrations/versions/4b1e5b7b69eb_.py
|
ivan-c/truenth-portal
|
0b9d39ae43f42ea3413ed9634f295f5d856cbc77
|
[
"BSD-3-Clause"
] | 3
|
2017-01-15T10:11:57.000Z
|
2018-10-02T23:46:44.000Z
|
portal/migrations/versions/4b1e5b7b69eb_.py
|
pep8speaks/true_nth_usa_portal
|
31ff755b0cfe61ab908e2a399e3c41ef17ca8c16
|
[
"BSD-3-Clause"
] | 876
|
2016-04-04T20:45:11.000Z
|
2019-02-28T00:10:36.000Z
|
portal/migrations/versions/4b1e5b7b69eb_.py
|
pep8speaks/true_nth_usa_portal
|
31ff755b0cfe61ab908e2a399e3c41ef17ca8c16
|
[
"BSD-3-Clause"
] | 9
|
2016-04-13T01:18:55.000Z
|
2018-09-19T20:44:23.000Z
|
"""empty message
Revision ID: 4b1e5b7b69eb
Revises: 13d1c714823a
Create Date: 2017-01-19 12:36:55.339537
"""
# revision identifiers, used by Alembic.
revision = '4b1e5b7b69eb'
down_revision = '13d1c714823a'
import re
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm import sessionmaker
from portal.models.audit import Audit
from portal.models.user import User
Session = sessionmaker()
def extract_context(comment):
contexts = [
('login', ['login', 'logout']),
('assessment', ['patient report', 'questionnaireresponse']),
('authentication', ['assuming identity', 'service',
'inadequate permission', 'identity challenge',
'access token']),
('intervention', ['intervention', r'client .* assuming role',
r'client .* releasing role', r'updated .* using']),
('account', ['register', 'merging', 'account', 'marking deleted',
'purging', 'registration']),
('user', ['time of death', 'deceased', 'demographics']),
('organization', ['organization', r'adding .* to']),
('consent', ['consent']),
('observation', ['observation', r'set codeableconcept .* on user']),
('group', ['group']),
('procedure', ['procedure']),
('relationship', ['relationship']),
('role', ['role']),
('tou', ['tou']),
('other', ['remote', 'test'])
]
for ct in contexts:
for searchterm in ct[1]:
if re.search(searchterm, comment):
return ct[0]
return 'other'
def upgrade():
op.add_column('audit', sa.Column('subject_id', sa.Integer()))
op.create_foreign_key('audit_subject_id_fkey', 'audit',
'users', ['subject_id'], ['id'])
op.add_column('audit', sa.Column('context', sa.Text(), nullable=True))
# copying user_id to subject_id for existing audit rows
bind = op.get_bind()
session = Session(bind=bind)
for audit in session.query(Audit):
# use user_id as subject_id by default
audit.subject_id = audit.user_id
# use 'other' as context by default
audit.context = "other"
if audit.comment:
# if comment references changed user, use that as subject_id
audit_comment_list = audit.comment.lower().split()
if ("user" in audit_comment_list and
len(audit_comment_list) > audit_comment_list.index("user") +
1):
subj_id = audit_comment_list[audit_comment_list.index(
"user") + 1]
if subj_id.isdigit() and session.query(User).filter_by(id=subj_id).first():
audit.subject_id = int(subj_id)
# if possible, use context extracted from comment
audit.context = extract_context(audit.comment.lower())
session.commit()
op.alter_column('audit', 'subject_id', nullable=False)
op.alter_column('audit', 'context', nullable=False)
def downgrade():
op.drop_column('audit', 'context')
op.drop_constraint('audit_subject_id_fkey', 'audit', type_='foreignkey')
op.drop_column('audit', 'subject_id')
| 33.28866
| 91
| 0.59585
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,270
| 0.393311
|
bbbf9065c2092f796276d52d4458d16e60c9b5cb
| 97
|
py
|
Python
|
tadataka/__init__.py
|
IshitaTakeshi/DVO
|
2c5a3db1db7e651bfaa7808bbf79a6c1c6a42fc5
|
[
"Apache-2.0"
] | 7
|
2019-12-20T07:19:11.000Z
|
2021-07-22T03:25:12.000Z
|
tadataka/__init__.py
|
IshitaTakeshi/DVO
|
2c5a3db1db7e651bfaa7808bbf79a6c1c6a42fc5
|
[
"Apache-2.0"
] | null | null | null |
tadataka/__init__.py
|
IshitaTakeshi/DVO
|
2c5a3db1db7e651bfaa7808bbf79a6c1c6a42fc5
|
[
"Apache-2.0"
] | null | null | null |
from tadataka.visual_odometry import VisualOdometry
from tadataka.camera import CameraParameters
| 32.333333
| 51
| 0.896907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bbc06ee508b4f1069613557ce6ed45315a87cb10
| 639
|
py
|
Python
|
bisect/36003.py
|
simonjayhawkins/pandas
|
9f571c58d7796dac8fd1aa2301cf4aa30ad7143a
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2022-02-22T17:13:16.000Z
|
2022-02-22T17:13:16.000Z
|
bisect/36003.py
|
simonjayhawkins/pandas
|
9f571c58d7796dac8fd1aa2301cf4aa30ad7143a
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
bisect/36003.py
|
simonjayhawkins/pandas
|
9f571c58d7796dac8fd1aa2301cf4aa30ad7143a
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
import datetime
import pandas as pd
import pandas.testing as tm
print(pd.__version__)
df = pd.DataFrame(
{
"A": ["X", "Y"],
"B": [
datetime.datetime(2005, 1, 1, 10, 30, 23, 540000),
datetime.datetime(3005, 1, 1, 10, 30, 23, 540000),
],
}
)
print(df)
print(df.dtypes)
result = df.groupby("A").B.max()
print(result)
expected = pd.Series(
[
pd.Timestamp("2005-01-01 10:30:23.540000"),
datetime.datetime(3005, 1, 1, 10, 30, 23, 540000),
],
index=pd.Index(["X", "Y"], dtype="object", name="A"),
name="B",
)
tm.assert_series_equal(result, expected)
| 18.257143
| 62
| 0.56025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 63
| 0.098592
|
bbc4c27f8757a25bed0fa10e2091d91ec6470472
| 1,090
|
py
|
Python
|
investment_report/urls.py
|
uktrade/pir-api
|
79747ceab042c42c287e2b7471f6dade70f68693
|
[
"MIT"
] | 1
|
2021-02-02T19:08:55.000Z
|
2021-02-02T19:08:55.000Z
|
investment_report/urls.py
|
uktrade/invest-pir-api
|
be56efddf9dfdf81c8557441a9a54d9a4dd4bab1
|
[
"MIT"
] | 21
|
2018-07-10T10:20:47.000Z
|
2022-03-24T09:36:29.000Z
|
investment_report/urls.py
|
uktrade/pir-api
|
79747ceab042c42c287e2b7471f6dade70f68693
|
[
"MIT"
] | 1
|
2021-02-04T11:28:37.000Z
|
2021-02-04T11:28:37.000Z
|
from django.conf.urls import url
from investment_report.views import utils
urlpatterns = [
url('preview/(?P<lang>[\w-]+)/(?P<market>[\w-]+)/(?P<sector>[\w-]+)/pdf',
utils.investment_report_pdf,
{'moderated': False},
name='preview_investment_report_pdf'),
url('current/(?P<lang>[\w-]+)/(?P<market>[\w-]+)/(?P<sector>[\w-]+)/pdf',
utils.investment_report_pdf,
name='investment_report_pdf'
),
url('preview/(?P<lang>[\w-]+)/(?P<market>[\w-]+)/(?P<sector>[\w-]+)/html',
utils.investment_report_html,
{'moderated': False},
name='investment_report_html'
),
url('current/(?P<lang>[\w-]+)/(?P<market>[\w-]+)/(?P<sector>[\w-]+)/html',
utils.investment_report_html,
{'moderated': True},
name='investment_report_html'
),
url('pir_csv', utils.pir_csv, name='pir_csv'),
url('devcss.css', utils.dev_css, name='dev_css'),
url('devcsslast.css', utils.dev_css_last, name='last_dev_css'),
url('devcssplain.css', utils.dev_css_plain, name='plain_dev_css'),
]
| 32.058824
| 78
| 0.589908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 510
| 0.46789
|
bbc5bdb8d114fadf0d6064c5f98954f19d27c09e
| 2,823
|
py
|
Python
|
docc/image.py
|
pirate42/docc
|
de8e27ce5ec2e1288bff8c17a1566bbc7a26ade1
|
[
"MIT"
] | 1
|
2017-08-18T08:03:55.000Z
|
2017-08-18T08:03:55.000Z
|
docc/image.py
|
pirate42/docc
|
de8e27ce5ec2e1288bff8c17a1566bbc7a26ade1
|
[
"MIT"
] | null | null | null |
docc/image.py
|
pirate42/docc
|
de8e27ce5ec2e1288bff8c17a1566bbc7a26ade1
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from docc.exceptions import APIError
class Image(object):
"""Represent an Image object (name and distribution information)"""
def __init__(self, identifier, name, distribution):
self.id = identifier
self.name = name
self.distribution = distribution
def __repr__(self):
return "<%s: %s>" % (self.id, self.name)
def __str__(self):
return "%s: %s, %s" % (self.id, self.name, self.distribution)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self.__eq__(other)
def destroy(self, service):
"""Destroy this image"""
response = service.get("images/%s/destroy" % self.id)
status = response['status']
return status == 'OK'
@staticmethod
def get(service, identifier):
"""Return the Image given an identifier and None if not found.
:param identifier: TODO
:param service: The service object for the Digital Ocean account
that holds the images
"""
try:
response = service.get('images/%s' % identifier)
except APIError as e:
return None
encoded_image = response['image']
i = Image(encoded_image['id'],
encoded_image['name'],
encoded_image['distribution']
)
return i
@staticmethod
def __images(service, my_filter=None):
"""Return the a list containing all the know images.
:param service: The service object for the Digital Ocean account that
holds the images
:param my_filter: Should be absent, 'my_images', 'global'. If 'all'
this will return all the images you have access to. 'my_images' will
return the images you stored and 'global' the images available to all
customers.
"""
if my_filter is None:
response = service.get("images")
else:
response = service.get("images", {'filter': my_filter})
encoded_images = response['images']
result = []
for encoded_image in encoded_images:
i = Image(encoded_image['id'], encoded_image['name'],
encoded_image['distribution'])
result.append(i)
return result
@staticmethod
def images(service):
"""Return all the known images included mine"""
return Image.__images(service)
@staticmethod
def my_images(service):
"""Return my images"""
return Image.__images(service, 'my_images')
@staticmethod
def global_images(service):
"""Return globally available images"""
return Image.__images(service, 'global')
| 29.715789
| 77
| 0.597237
| 2,768
| 0.980517
| 0
| 0
| 1,912
| 0.677294
| 0
| 0
| 1,010
| 0.357775
|
bbc710f8e283a69d3d2753dd21f132cb54a0681b
| 1,607
|
py
|
Python
|
ml_app_profile_analysis/ml_usage_analysis/fw_purpose_stat.py
|
RiS3-Lab/ModelXRay
|
60a98718eaa91eeb15d0737a9728cee4d714edd2
|
[
"Apache-2.0"
] | 13
|
2021-04-05T08:37:37.000Z
|
2021-12-27T13:14:02.000Z
|
ml_app_profile_analysis/ml_usage_analysis/fw_purpose_stat.py
|
RiS3-Lab/ModelXRay
|
60a98718eaa91eeb15d0737a9728cee4d714edd2
|
[
"Apache-2.0"
] | 1
|
2021-05-21T01:54:51.000Z
|
2021-11-19T07:36:47.000Z
|
ml_app_profile_analysis/ml_usage_analysis/fw_purpose_stat.py
|
RiS3-Lab/ModelXRay
|
60a98718eaa91eeb15d0737a9728cee4d714edd2
|
[
"Apache-2.0"
] | 2
|
2022-01-18T07:37:31.000Z
|
2022-03-10T19:42:35.000Z
|
#!/usr/bin/env python
import argparse
fwmap = {'tensorflow':1,'caffe':2,'sensetime':3,'ncnn':4,'other':5,'mxnet':6,'uls':7,'mace':8,'tflite':9}
def printres(th, res):
th.append('sum')
print "%15s"%(" "),
for i in th:
print i,
print ""
for use in res:
print "%15s"%(use),
fwres = res[use]
sum = 0
for key in th[:-1]:
val = fwres[key]
#print "(%s,%d)"%(fw,fwres[fw]),
#print "%d\t"%(fwres[fw]),
#sum = sum + fwres[fw]
print "%d\t"%(val),
sum = sum + val
print "%d\t"%(sum),
print ""
def process_filelist(filelist):
fdlist = []
for f in filelist:
fd = open(f,'r').readlines()
fdlist.append(fd)
fcmap = {'ocr':15,'speech':26,'idcard':37,'bankcard':48,'recog':59,'liveness':70,'track':81,'handdetect':92,'handwriting':103,'iris':114}
restable = {}
for key in fcmap:
res = {}
for fw in fwmap:
sum = 0
for fd in fdlist:
fields = fd[fcmap[key]+fwmap[fw] - 1].split(':')
#print(fields)
sum = sum + int(fields[1])
res[fw] = sum
restable[key] = res
thead = fwmap.keys()
return (thead,restable)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='modelxray')
parser.add_argument('filelist',
help = 'list of files to be processed, like a,b,c')
args = parser.parse_args()
filelist = args.filelist.split(',')
(th,res) = process_filelist(filelist)
printres(th, res)
| 29.218182
| 141
| 0.514001
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 389
| 0.242066
|
bbc76fa30b8fa88196b49d21f794c0cf68d9f17b
| 595
|
py
|
Python
|
email-service/rest_utils.py
|
Rudrarka/Alert-System
|
ed6d0c0566e70a13d13e6c65fa790f25d07fa99e
|
[
"MIT"
] | null | null | null |
email-service/rest_utils.py
|
Rudrarka/Alert-System
|
ed6d0c0566e70a13d13e6c65fa790f25d07fa99e
|
[
"MIT"
] | null | null | null |
email-service/rest_utils.py
|
Rudrarka/Alert-System
|
ed6d0c0566e70a13d13e6c65fa790f25d07fa99e
|
[
"MIT"
] | null | null | null |
import os
import requests
class RestApi:
def __init__(self, base_url: str, api_key: str):
self.url = base_url
self.headers = {'X-API-Key': api_key}
def patch_json(self, path, json):
resp = requests.patch(self.url + path, headers=self.headers, json=json)
resp.raise_for_status()
return resp.json()
class AlertApi(RestApi):
def __init__(self):
super().__init__(os.getenv('APP_URL'), os.getenv('API_KEY'))
def acknowledge_mail_sent(self, id):
params = {"email_sent": True}
return self.patch_json(f"/{id}", params)
| 28.333333
| 79
| 0.638655
| 565
| 0.94958
| 0
| 0
| 0
| 0
| 0
| 0
| 49
| 0.082353
|
bbc82f2eff9c39c28075f8ddd776994572815053
| 1,660
|
py
|
Python
|
model/documentmanifest.py
|
beda-software/fhir-py-experements
|
363cfb894fa6f971b9be19340cae1b0a3a4377d8
|
[
"MIT"
] | null | null | null |
model/documentmanifest.py
|
beda-software/fhir-py-experements
|
363cfb894fa6f971b9be19340cae1b0a3a4377d8
|
[
"MIT"
] | null | null | null |
model/documentmanifest.py
|
beda-software/fhir-py-experements
|
363cfb894fa6f971b9be19340cae1b0a3a4377d8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.1-9346c8cc45 (http://hl7.org/fhir/StructureDefinition/DocumentManifest) on 2020-02-03.
# 2020, SMART Health IT.
import sys
from dataclasses import dataclass, field
from typing import ClassVar, Optional, List
from .backboneelement import BackboneElement
from .codeableconcept import CodeableConcept
from .domainresource import DomainResource
from .fhirdate import FHIRDate
from .fhirreference import FHIRReference
from .identifier import Identifier
@dataclass
class DocumentManifestRelated(BackboneElement):
""" Related things.
Related identifiers or resources associated with the DocumentManifest.
"""
resource_type: ClassVar[str] = "DocumentManifestRelated"
identifier: Optional[Identifier] = None
ref: Optional[FHIRReference] = None
@dataclass
class DocumentManifest(DomainResource):
""" A list that defines a set of documents.
A collection of documents compiled for a purpose together with metadata
that applies to the collection.
"""
resource_type: ClassVar[str] = "DocumentManifest"
masterIdentifier: Optional[Identifier] = None
identifier: Optional[List[Identifier]] = None
status: str = None
type: Optional[CodeableConcept] = None
subject: Optional[FHIRReference] = None
created: Optional[FHIRDate] = None
author: Optional[List[FHIRReference]] = None
recipient: Optional[List[FHIRReference]] = None
source: Optional[str] = None
description: Optional[str] = None
content: List[FHIRReference] = field(default_factory=list)
related: Optional[List[DocumentManifestRelated]] = None
| 33.2
| 113
| 0.751807
| 1,108
| 0.66747
| 0
| 0
| 1,130
| 0.680723
| 0
| 0
| 493
| 0.296988
|
bbc90b64134d3ab6cf219527bed05e8373ac58a3
| 717
|
py
|
Python
|
tests/core/test_lc.py
|
atsuki-kuwata/jaxsot
|
7de5dd964c951661892c79e4447e9f314885a0a9
|
[
"MIT"
] | 2
|
2022-03-01T23:50:08.000Z
|
2022-03-22T15:25:34.000Z
|
tests/core/test_lc.py
|
atsuki-kuwata/jaxsot
|
7de5dd964c951661892c79e4447e9f314885a0a9
|
[
"MIT"
] | 8
|
2022-02-19T00:06:34.000Z
|
2022-03-31T00:09:54.000Z
|
tests/core/test_lc.py
|
atsuki-kuwata/jaxsot
|
7de5dd964c951661892c79e4447e9f314885a0a9
|
[
"MIT"
] | 1
|
2022-03-01T22:39:00.000Z
|
2022-03-01T22:39:00.000Z
|
""" test for lc
"""
import pytest
import numpy as np
from jaxsot.core.weight import comp_weight, comp_omega
from jaxsot.core.lc import gen_lightcurve
from jaxsot.io.earth import binarymap
def test_lc():
mmap=binarymap(nside=16,show=False)
nside=16
inc=0.0
Thetaeq=np.pi
zeta=np.pi/3.0
Pspin=23.9344699/24.0
wspin=2*np.pi/Pspin
Porb=40.0
worb=2.*np.pi/Porb
N=1024
obst=np.linspace(0.0,Porb,N)
Thetav=worb*obst
Phiv=np.mod(wspin*obst,2*np.pi)
omega=comp_omega(nside)
WI,WV=comp_weight(nside,zeta,inc,Thetaeq,Thetav,Phiv,omega)
W=WI*WV
lc=gen_lightcurve(W,mmap,0.0)
assert np.abs(np.sum(lc)-63856.86)<0.1
if __name__=="__main__":
test_lc()
| 21.727273
| 63
| 0.679219
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 30
| 0.041841
|
bbca0d47c796ae1504de61caad61827265203834
| 4,057
|
py
|
Python
|
src/squad/kinematics/base.py
|
douglasdaly/spot-robot
|
7a4fdd7eb5fe5fc2d31180ed6b9f7ea21647bea2
|
[
"MIT"
] | null | null | null |
src/squad/kinematics/base.py
|
douglasdaly/spot-robot
|
7a4fdd7eb5fe5fc2d31180ed6b9f7ea21647bea2
|
[
"MIT"
] | null | null | null |
src/squad/kinematics/base.py
|
douglasdaly/spot-robot
|
7a4fdd7eb5fe5fc2d31180ed6b9f7ea21647bea2
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict
from squad.config import config
from squad.exceptions import FrozenError
class BodyParameters:
"""
Storage class for (static) body data/parameters.
"""
__slots__ = (
"_frozen",
"body_length_units",
"body_angle_units",
"l_body",
"w_body",
"h_body",
"l_hip",
"l_femur",
"l_leg",
"l_rod",
"l_rod_arm",
"l_rod_femur",
"h_rod_femur",
"l_rod_leg",
"cm_dx",
"cm_dy",
"cm_dz",
"leg_alpha_min",
"leg_alpha_max",
"leg_beta_min",
"leg_beta_max",
"leg_gamma_min",
"leg_gamma_max",
)
def __init__(self, **kwargs: float) -> None:
self._frozen = False
self.body_angle_units = kwargs.pop(
"body_angle_units",
config.body_angle_units,
)
self.body_length_units = kwargs.pop(
"body_length_units",
config.body_length_units,
)
self.l_body = kwargs.pop("l_body", config.l_body)
self.w_body = kwargs.pop("w_body", config.w_body)
self.h_body = kwargs.pop("h_body", config.h_body)
self.l_hip = kwargs.pop("l_hip", config.l_hip)
self.l_femur = kwargs.pop("l_femur", config.l_femur)
self.l_leg = kwargs.pop("l_leg", config.l_leg)
self.l_rod = kwargs.pop("l_rod", config.l_rod)
self.l_rod_arm = kwargs.pop("l_rod_arm", config.l_rod_arm)
self.l_rod_femur = kwargs.pop("l_rod_femur", config.l_rod_femur)
self.h_rod_femur = kwargs.pop("h_rod_femur", config.h_rod_femur)
self.l_rod_leg = kwargs.pop("l_rod_leg", config.l_rod_leg)
self.cm_dx = kwargs.pop("cm_dx", config.cm_dx)
self.cm_dy = kwargs.pop("cm_dy", config.cm_dy)
self.cm_dz = kwargs.pop("cm_dz", config.cm_dz)
self.leg_alpha_min = kwargs.pop("leg_alpha_min", config.leg_alpha_min)
self.leg_alpha_max = kwargs.pop("leg_alpha_max", config.leg_alpha_max)
self.leg_beta_min = kwargs.pop("leg_beta_min", config.leg_beta_min)
self.leg_beta_max = kwargs.pop("leg_beta_max", config.leg_beta_max)
self.leg_gamma_min = kwargs.pop("leg_gamma_min", config.leg_gamma_min)
self.leg_gamma_max = kwargs.pop("leg_gamma_max", config.leg_gamma_max)
self._frozen = True
def __repr__(self) -> str:
return repr(self.__getstate__())
def __setattr__(self, __name: str, __value: Any) -> None:
if hasattr(self, "_frozen") and self._frozen:
raise FrozenError(
"BodyParameters objects are frozen and cannot be modified"
)
return super().__setattr__(__name, __value)
def __getitem__(self, key: str) -> float:
try:
return getattr(self, key)
except AttributeError:
raise KeyError(key)
def __getstate__(self) -> Dict[str, Any]:
state = {}
for name in (x for x in self.__slots__ if x != "_frozen"):
state[name] = getattr(self, name)
return state
def __setstate__(self, state: Dict[str, Any]) -> None:
object.__setattr__(self, "_frozen", False)
for k, v in state.items():
setattr(self, k, v)
object.__setattr__(self, "_frozen", True)
def to_dict(self) -> Dict[str, float]:
"""Gets the parameters for this body in dictionary form.
Returns
-------
dict
The data dictionary representation of this object's data.
"""
return self.__getstate__()
@classmethod
def from_dict(cls, data: Dict[str, float]) -> "BodyParameters":
"""Instantiates a new object from the given data.
Parameters
----------
data : dict
The data to use to create the new body parameters object.
Returns
-------
BodyParameters
The new instance of the body parameters from the `data`
given.
"""
return cls(**data)
| 32.198413
| 78
| 0.590338
| 3,951
| 0.973872
| 0
| 0
| 450
| 0.110919
| 0
| 0
| 1,198
| 0.295292
|
bbcac82869e4955029c31c4d5ba367911fd7fe18
| 2,833
|
py
|
Python
|
deep_image_compression/single_psnr.py
|
LichengXiao2017/deep-image-compression
|
cf6e5699bad4d7b4a0dd8db6da72aa0c56e3d1e4
|
[
"MIT"
] | 9
|
2020-01-09T21:15:17.000Z
|
2022-02-08T12:41:54.000Z
|
deep_image_compression/single_psnr.py
|
LichengXiao2017/deep-image-compression
|
cf6e5699bad4d7b4a0dd8db6da72aa0c56e3d1e4
|
[
"MIT"
] | 8
|
2019-10-15T23:50:03.000Z
|
2021-11-10T19:40:15.000Z
|
deep_image_compression/single_psnr.py
|
LichengXiao2017/enas-image-compression
|
cf6e5699bad4d7b4a0dd8db6da72aa0c56e3d1e4
|
[
"MIT"
] | 3
|
2019-10-16T06:06:49.000Z
|
2020-07-06T15:02:09.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2019 Licheng Xiao. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
import numpy
import math
import cv2
import os
import logging
from os import listdir
from os.path import isfile, join
from absl import flags
flags.DEFINE_string("original_img", default=None,
help="Path for original image file.")
flags.DEFINE_string("compressed_img", default=None,
help="Path for compressed image file.")
flags.DEFINE_string("reconstructed_img", default=None,
help="Path for reconstructed image file.")
FLAGS = flags.FLAGS
class SingleEvaluator:
def get_psnr_msssim_bpp(self, original_img, reconstructed_img, compressed_img):
psnr = 0
msssim = 0
bpp = 0
try:
sess = tf.Session()
original = cv2.imread(original_img)
contrast = cv2.imread(reconstructed_img)
original = numpy.expand_dims(original, axis=0)
contrast = numpy.expand_dims(contrast, axis=0)
original_tensor = tf.convert_to_tensor(original, dtype=tf.uint8)
contrast_tensor = tf.convert_to_tensor(contrast, dtype=tf.uint8)
msssim_tensor = tf.image.ssim_multiscale(
original_tensor, contrast_tensor, 255)
psnr_tensor = tf.image.psnr(original_tensor, contrast_tensor, 255)
msssim = sess.run(msssim_tensor)
psnr = sess.run(psnr_tensor)
first, h, w, bpp = numpy.shape(contrast)
bpp = os.path.getsize(compressed_img) * 8 / (h * w)
except Exception as e:
logging.error(e)
if psnr == 0:
logging.error('Error occurs, please check log for details.')
else:
logging.info('psnr: ', psnr, '\n',
'ms_ssim: ', msssim, '\n',
'bpp: ', bpp)
return psnr, msssim, bpp
def main(_):
single_evaluator = SingleEvaluator()
single_evaluator.get_psnr_msssim_bpp(FLAGS.original_img,
FLAGS.reconstructed_img,
FLAGS.compressed_img)
if __name__ == "__main__":
tf.app.run()
| 37.276316
| 83
| 0.614896
| 1,311
| 0.46276
| 0
| 0
| 0
| 0
| 0
| 0
| 926
| 0.326862
|
bbcbbaa39a696df212fa670accc00b3ebb116dbd
| 984
|
py
|
Python
|
message/tests/utils/creators.py
|
ThePokerFaCcCe/messenger
|
2db3d5c2ccd05ac40d2442a13d664ca9ad3cb14c
|
[
"MIT"
] | null | null | null |
message/tests/utils/creators.py
|
ThePokerFaCcCe/messenger
|
2db3d5c2ccd05ac40d2442a13d664ca9ad3cb14c
|
[
"MIT"
] | null | null | null |
message/tests/utils/creators.py
|
ThePokerFaCcCe/messenger
|
2db3d5c2ccd05ac40d2442a13d664ca9ad3cb14c
|
[
"MIT"
] | null | null | null |
from message.models import Message, DeletedMessage, TextContent
from user.tests.utils import create_active_user
from conversation.tests.utils import create_private_chat
def create_text_content(text='hello') -> TextContent:
return TextContent.objects.create(text=text)
def create_message(sender=None, chat=None, content=None,
content_type=Message.ContentTypeChoices.TEXT
) -> Message:
sender = sender if sender else create_active_user()
data = {
'sender': sender,
'chat': chat if chat else create_private_chat(sender),
'content': content if content else create_text_content(),
'content_type': content_type,
}
return Message.objects.create(**data)
def create_deleted_msg(msg=None, user=None) -> DeletedMessage:
user = create_active_user() if not user else user
return DeletedMessage.objects.create(
message=msg if msg else create_message(sender=user),
user=user)
| 31.741935
| 65
| 0.707317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.044715
|
bbcbbc848a277e8f627eea2957cd5be0baa2e598
| 350
|
py
|
Python
|
excercises/6-0001/finger_excercises/iteration-1.py
|
obsessedyouth/simulacra
|
530155664daf1aff06cb575c4c4073acbacdb32d
|
[
"MIT"
] | null | null | null |
excercises/6-0001/finger_excercises/iteration-1.py
|
obsessedyouth/simulacra
|
530155664daf1aff06cb575c4c4073acbacdb32d
|
[
"MIT"
] | null | null | null |
excercises/6-0001/finger_excercises/iteration-1.py
|
obsessedyouth/simulacra
|
530155664daf1aff06cb575c4c4073acbacdb32d
|
[
"MIT"
] | null | null | null |
"""
Replace the comment in the following code with a while loop.
numXs = int(input('How many times should I print the letter X? '))
toPrint = "
#concatenate X to toPrint numXs times
print(toPrint)
"""
numXs = int(input('How many times should I print the letter X? '))
toPrint = ""
while numXs > 0:
toPrint += "X"
numXs -= 1
print(toPrint)
| 20.588235
| 66
| 0.677143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 251
| 0.717143
|
bbcbf2b2a1cb0a8d47a18b0768f2227527626ab0
| 7,784
|
py
|
Python
|
services/temperature_bands/temperature_bands_pb2.py
|
phgupta/XBOS
|
acc59f33600943569d62c145dae11a1775296b44
|
[
"BSD-2-Clause"
] | 27
|
2016-04-26T17:26:56.000Z
|
2021-08-22T15:11:55.000Z
|
services/temperature_bands/temperature_bands_pb2.py
|
phgupta/XBOS
|
acc59f33600943569d62c145dae11a1775296b44
|
[
"BSD-2-Clause"
] | 75
|
2017-02-17T18:00:37.000Z
|
2019-06-20T04:12:08.000Z
|
services/temperature_bands/temperature_bands_pb2.py
|
vishalbelsare/XBOS
|
1fea0b024d97ae142d97b3a94510403928ed44b7
|
[
"BSD-2-Clause"
] | 20
|
2017-07-28T14:50:04.000Z
|
2020-01-16T05:04:54.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: temperature_bands.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='temperature_bands.proto',
package='temperature_bands',
syntax='proto3',
serialized_options=_b('P\001'),
serialized_pb=_b('\n\x17temperature_bands.proto\x12\x11temperature_bands\"k\n\x0fScheduleRequest\x12\x10\n\x08\x62uilding\x18\x01 \x01(\t\x12\x0c\n\x04zone\x18\x02 \x01(\t\x12\r\n\x05start\x18\x03 \x01(\x03\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x03\x12\x0e\n\x06window\x18\x05 \x01(\t\x12\x0c\n\x04unit\x18\x06 \x01(\t\"^\n\rSchedulePoint\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x17\n\x0ftemperature_low\x18\x02 \x01(\x01\x12\x18\n\x10temperature_high\x18\x03 \x01(\x01\x12\x0c\n\x04unit\x18\x04 \x01(\t2\xc3\x01\n\tSchedules\x12Z\n\x0eGetComfortband\x12\".temperature_bands.ScheduleRequest\x1a .temperature_bands.SchedulePoint\"\x00\x30\x01\x12Z\n\x0eGetDoNotExceed\x12\".temperature_bands.ScheduleRequest\x1a .temperature_bands.SchedulePoint\"\x00\x30\x01\x42\x02P\x01\x62\x06proto3')
)
_SCHEDULEREQUEST = _descriptor.Descriptor(
name='ScheduleRequest',
full_name='temperature_bands.ScheduleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='building', full_name='temperature_bands.ScheduleRequest.building', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='zone', full_name='temperature_bands.ScheduleRequest.zone', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start', full_name='temperature_bands.ScheduleRequest.start', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end', full_name='temperature_bands.ScheduleRequest.end', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='window', full_name='temperature_bands.ScheduleRequest.window', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unit', full_name='temperature_bands.ScheduleRequest.unit', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=46,
serialized_end=153,
)
_SCHEDULEPOINT = _descriptor.Descriptor(
name='SchedulePoint',
full_name='temperature_bands.SchedulePoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='temperature_bands.SchedulePoint.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='temperature_low', full_name='temperature_bands.SchedulePoint.temperature_low', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='temperature_high', full_name='temperature_bands.SchedulePoint.temperature_high', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unit', full_name='temperature_bands.SchedulePoint.unit', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=155,
serialized_end=249,
)
DESCRIPTOR.message_types_by_name['ScheduleRequest'] = _SCHEDULEREQUEST
DESCRIPTOR.message_types_by_name['SchedulePoint'] = _SCHEDULEPOINT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ScheduleRequest = _reflection.GeneratedProtocolMessageType('ScheduleRequest', (_message.Message,), dict(
DESCRIPTOR = _SCHEDULEREQUEST,
__module__ = 'temperature_bands_pb2'
# @@protoc_insertion_point(class_scope:temperature_bands.ScheduleRequest)
))
_sym_db.RegisterMessage(ScheduleRequest)
SchedulePoint = _reflection.GeneratedProtocolMessageType('SchedulePoint', (_message.Message,), dict(
DESCRIPTOR = _SCHEDULEPOINT,
__module__ = 'temperature_bands_pb2'
# @@protoc_insertion_point(class_scope:temperature_bands.SchedulePoint)
))
_sym_db.RegisterMessage(SchedulePoint)
DESCRIPTOR._options = None
_SCHEDULES = _descriptor.ServiceDescriptor(
name='Schedules',
full_name='temperature_bands.Schedules',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=252,
serialized_end=447,
methods=[
_descriptor.MethodDescriptor(
name='GetComfortband',
full_name='temperature_bands.Schedules.GetComfortband',
index=0,
containing_service=None,
input_type=_SCHEDULEREQUEST,
output_type=_SCHEDULEPOINT,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetDoNotExceed',
full_name='temperature_bands.Schedules.GetDoNotExceed',
index=1,
containing_service=None,
input_type=_SCHEDULEREQUEST,
output_type=_SCHEDULEPOINT,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_SCHEDULES)
DESCRIPTOR.services_by_name['Schedules'] = _SCHEDULES
# @@protoc_insertion_point(module_scope)
| 38.92
| 784
| 0.751413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,116
| 0.27184
|
bbd055bb557e945cf33d02d1ba1783662b82c31f
| 63
|
py
|
Python
|
bot/exchange/__init__.py
|
colticol/bold-arbitrager
|
aef6683a3f41d47a51d24367819d183ecacea3a0
|
[
"Apache-2.0"
] | null | null | null |
bot/exchange/__init__.py
|
colticol/bold-arbitrager
|
aef6683a3f41d47a51d24367819d183ecacea3a0
|
[
"Apache-2.0"
] | null | null | null |
bot/exchange/__init__.py
|
colticol/bold-arbitrager
|
aef6683a3f41d47a51d24367819d183ecacea3a0
|
[
"Apache-2.0"
] | null | null | null |
from . import exchange_controller
from . import exchange_client
| 31.5
| 33
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bbd06fd058120fffbc5ec8c48b2f8310d80c11b4
| 2,265
|
py
|
Python
|
test_proj/tests/test_runner.py
|
andrewbird2/django-data-validation
|
8c3a8e14440f0299a5f4b188dfaa7864b53b5123
|
[
"MIT"
] | 1
|
2020-08-05T16:42:48.000Z
|
2020-08-05T16:42:48.000Z
|
test_proj/tests/test_runner.py
|
andrewbird2/django-data-validation
|
8c3a8e14440f0299a5f4b188dfaa7864b53b5123
|
[
"MIT"
] | 1
|
2020-11-04T07:06:37.000Z
|
2020-11-04T07:06:37.000Z
|
test_proj/tests/test_runner.py
|
andrewbird2/django-data-validation
|
8c3a8e14440f0299a5f4b188dfaa7864b53b5123
|
[
"MIT"
] | 1
|
2020-11-04T02:16:05.000Z
|
2020-11-04T02:16:05.000Z
|
from unittest import mock
from django.core.management import call_command
import pytest
from app1.models import TestModel
from datavalidation.models import Validator
from datavalidation.results import Status
from datavalidation.runners import ModelValidationRunner, ObjectValidationRunner
def test_model_runner_with_bad_model():
""" check that ModelValidationRunner handles bad input """
try:
ModelValidationRunner(Validator).run()
assert False, "expected an exception"
except ValueError as e:
assert e.args == ("no data validation methods on model Validator",)
def test_model_runner_with_bad_method():
""" check that ModelValidationRunner handles bad input """
try:
ModelValidationRunner(TestModel, method_names=["not_a_method"]).run()
assert False, "expected as exception"
except ValueError as e:
assert e.args == ("not_a_method is not a data validator on TestModel",)
@pytest.mark.django_db
def test_model_runner_cli_success():
""" test ./manage.py validate app1.TestModel """
with mock.patch("sys.exit") as mocked_exit:
call_command("validate", "app1.TestModel")
mocked_exit.assert_called_with(0)
@pytest.mark.django_db
def test_model_runner_cli_failure():
""" test ./manage.py run_data_validation
Some tests raise an exceptions so this should fail (exit_code==1)
"""
with mock.patch("sys.exit") as mocked_exit:
call_command("validate")
mocked_exit.assert_called_with(1)
@pytest.mark.django_db
def test_object_runner():
""" test the ObjectValidationRunner """
validator = Validator.objects.get(
app_label=TestModel._meta.app_label,
model_name=TestModel.__name__,
method_name=TestModel.check_foobar.__name__,
)
assert validator.status == Status.UNINITIALIZED
obj_pass = TestModel.objects.first()
result = ObjectValidationRunner(obj_pass).run()
assert result == (1, 0, 0)
validator.refresh_from_db()
assert validator.status == Status.PASSING
obj_fail, = TestModel.objects.generate(failing=1)
result = ObjectValidationRunner(obj_fail).run()
assert result == (0, 1, 0)
validator.refresh_from_db()
assert validator.status == Status.FAILING
| 31.458333
| 80
| 0.72362
| 0
| 0
| 0
| 0
| 1,307
| 0.577042
| 0
| 0
| 537
| 0.237086
|
bbd1b55c9769411d9a683ac06b192b84c0c94cde
| 2,479
|
py
|
Python
|
FlaskApp/flask_server.py
|
pjneelam/pjneelam.eportfolio2022
|
3f55c1da6214e3eabab949ff83b34c0553c52866
|
[
"CC-BY-3.0"
] | null | null | null |
FlaskApp/flask_server.py
|
pjneelam/pjneelam.eportfolio2022
|
3f55c1da6214e3eabab949ff83b34c0553c52866
|
[
"CC-BY-3.0"
] | null | null | null |
FlaskApp/flask_server.py
|
pjneelam/pjneelam.eportfolio2022
|
3f55c1da6214e3eabab949ff83b34c0553c52866
|
[
"CC-BY-3.0"
] | null | null | null |
#to create the flask page
#import flask
#flask library was installed in the command line/computer terminal first
#Source: PythonHow https://pythonhow.com/python-tutorial/flask/How-making-a-website-with-Python-works/
#Python assigns the name "__main__" to the script when the script is executed.
#The debug parameter is set to true, to trace Python errors.
# To note: in a production environment, it must be set to False to avoid any security issues.
#returning HTML in Flask, create a homepage.html in another folder
#add render_template method
from flask import Flask, render_template
#pip install flask-mysqldb in cmd
#from flask_mysqldb import MySQL
#from mysql.connector.connection import MySQLConnection
#from sql_connection import get_sql_connection
#connection with mysql not established
app = Flask(__name__)
@app.route('/')
#to go directly to the home page, add another route
@app.route('/homepage')
def homepage():
return render_template('homepage.html')
#add another page: market page
@app.route('/flask_server')
#this python file should have been called Market (like the webpage created!!!)
#add list / dictionaries
#Iteration will be necessary - access in html
def market():
items = [
{'product_id': 1, 'product_name': 'rice', 'unit_id': '2', 'product_price_unit': 1.65},
{'product_id': 2, 'product_name': 'toothpaste', 'unit_id': '1', 'product_price_unit': 1.40},
{'product_id': 3, 'product_name': 'soap', 'unit_id': '1', 'product_price_unit': 0.45},
{'product_id': 4, 'product_name': 'toothbrush', 'unit_id': '1', 'product_price_unit': 1.20},
{'product_id': 5, 'product_name': 'flour', 'unit_id': '2', 'product_price_unit': 0.90},
{'product_id': 6, 'product_name': 'facemask', 'unit_id': '1', 'product_price_unit': 2.95}
]
#send some random data from Python to market.html: add key name 'items'
return render_template('market.html', items=items)
if __name__ == '__main__':
app.run(debug=True)
#to style your web page, can use styling framework "Bootstrap" - https://getbootstrap.com/docs/4.5/getting-started/introduction/#starter-template
#copy and page in html page created
#IP/page set up: http://127.0.0.1:5000/
#page created: http://127.0.0.1:5000/market
#to synchonise your updates in the codes and the web page, RUN the program and check if Debug mode is on in the Terminal below
#to turn it on, run code: set FLASK_DEBUG=1
| 42.741379
| 146
| 0.709964
| 0
| 0
| 0
| 0
| 1,093
| 0.440904
| 0
| 0
| 1,964
| 0.792255
|
bbd25b797957a5f28a70032817bbe3816cae3981
| 3,152
|
py
|
Python
|
mozdns/mixins.py
|
jlin/inventory
|
c098c98e570c3bf9fadfd811eb75e1213f6ea428
|
[
"BSD-3-Clause"
] | 22
|
2015-01-16T01:36:32.000Z
|
2020-06-08T00:46:18.000Z
|
mozdns/mixins.py
|
jlin/inventory
|
c098c98e570c3bf9fadfd811eb75e1213f6ea428
|
[
"BSD-3-Clause"
] | 8
|
2015-12-28T18:56:19.000Z
|
2019-04-01T17:33:48.000Z
|
mozdns/mixins.py
|
jlin/inventory
|
c098c98e570c3bf9fadfd811eb75e1213f6ea428
|
[
"BSD-3-Clause"
] | 13
|
2015-01-13T20:56:22.000Z
|
2022-02-23T06:01:17.000Z
|
from settings import MOZDNS_BASE_URL
from gettext import gettext as _
from string import Template
class DisplayMixin(object):
# Knobs
justs = {
'pk_just': 10,
'rhs_just': 1,
'ttl_just': 1,
'rdtype_just': 4,
'rdclass_just': 3,
'prio_just': 1,
'lhs_just': 40,
'extra_just': 1
}
def bind_render_record(self, pk=False, show_ttl=False):
template = Template(self.template).substitute(**self.justs)
bind_name = self.fqdn + "."
if show_ttl:
ttl_ = self.ttl
else:
ttl_ = '' if self.ttl is None else self.ttl
return template.format(
bind_name=bind_name, rdtype=self.rdtype, rdclass='IN',
ttl_=ttl_, **vars(self)
)
class ObjectUrlMixin(object):
"""
This is a mixin that adds important url methods to a model. This
class uses the ``_meta.db_table`` instance variable of an object to
calculate URLs. Because of this, you must use the app label of your
class when declaring urls in your urls.py.
"""
# TODO. using app_label breaks shit. Go through all the models and
# assign a better field. Something like "url handle". TODO2. Using
# db_table for now. It looks weird, but it works.
def get_absolute_url(self):
return self.get_fancy_edit_url()
def get_history_url(self):
return "/reversion_compare/history_view/{0}/{1}/".format(
self.rdtype, self.pk
)
def get_edit_url(self):
"""
Return the edit url of an object.
"""
return self.get_fancy_edit_url()
def get_fancy_edit_url(self):
return MOZDNS_BASE_URL + _(
"/record/update/{0}/{1}/").format(self.rdtype, self.pk)
def get_delete_url(self):
"""
Return the delete url of an object.
"""
return MOZDNS_BASE_URL + "/{0}/{1}/delete/".format(
self._meta.db_table, self.pk
)
def get_create_url(self):
"""
Return the create url of the type of object.
"""
return MOZDNS_BASE_URL + "/{0}/create/".format(self._meta.db_table)
def get_delete_redirect_url(self):
return '/core/search/'
class DBTableURLMixin(object):
def get_fancy_edit_url(self):
return self.get_edit_url()
def get_edit_url(self):
"""
Return the delete url of an object.
"""
return MOZDNS_BASE_URL + "/{0}/{1}/update/".format(
self._meta.db_table, self.pk
)
def get_delete_url(self):
"""
Return the delete url of an object.
"""
return MOZDNS_BASE_URL + "/{0}/{1}/delete/".format(
self._meta.db_table, self.pk
)
def get_absolute_url(self):
"""
Return the delete url of an object.
"""
return MOZDNS_BASE_URL + "/{0}/{1}/".format(
self._meta.db_table, self.pk
)
def get_create_url(self):
"""
Return the create url of the type of object.
"""
return MOZDNS_BASE_URL + "/{0}/create/".format(self._meta.db_table)
| 28.396396
| 75
| 0.583756
| 3,045
| 0.966053
| 0
| 0
| 0
| 0
| 0
| 0
| 1,163
| 0.368972
|
bbd2f4baf55ab29dd9d52744e70bd79da375026a
| 2,361
|
py
|
Python
|
db_cloudant2.py
|
trishasarkar/IBM-PLS-deployed
|
959fef4c43a6a33bcb6e96b14046dcbd220e0b2a
|
[
"MIT"
] | null | null | null |
db_cloudant2.py
|
trishasarkar/IBM-PLS-deployed
|
959fef4c43a6a33bcb6e96b14046dcbd220e0b2a
|
[
"MIT"
] | null | null | null |
db_cloudant2.py
|
trishasarkar/IBM-PLS-deployed
|
959fef4c43a6a33bcb6e96b14046dcbd220e0b2a
|
[
"MIT"
] | null | null | null |
from ibmcloudant.cloudant_v1 import CloudantV1, Document
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
CLOUDANT_URL="https://apikey-v2-1mfs4kqo2nmnc2sdtgp9ji8myznbgm6mivk0o93pfopt:f70c9a73c52d287d3271ddc3dba6a30a@dc1a5ff5-996b-475c-8b7e-da87f4bf33a3-bluemix.cloudantnosqldb.appdomain.cloud"
CLOUDANT_APIKEY="C8J8TcTL_T9YlMtyA6itWueAqAdkgGXbwOc8RA2omfCd"
CLOUDANT_USERNAME="apikey-v2-1mfs4kqo2nmnc2sdtgp9ji8myznbgm6mivk0o93pfopt"
CLOUDANT_PASSWORD="f70c9a73c52d287d3271ddc3dba6a30a"
authenticator = IAMAuthenticator(CLOUDANT_APIKEY)
service = CloudantV1(authenticator=authenticator)
service.set_service_url(CLOUDANT_URL)
response = service.get_server_information().get_result()
##### Create a DB
# response = service.put_database(db='usessions')
# print(response)
#####
##### Create a document
# products_doc = Document(
# id="001",
# data = get_data()
# )
#
# response = service.post_document(db='qdata', document=products_doc).get_result()
# print(response)
#####
#### Get data
response = service.get_document(
db='usessions',
doc_id='mCWSwjuLuH'
).get_result()
print(response)
_rev = response['_rev']
####
##### Update data
# new_doc = Document(
# rev=_rev,
# data=get_data()
# )
# response = service.put_document(
# db='qdata',
# doc_id = '001',
# document=new_doc
# ).get_result()
#####
# def create_question(request_dict):
# question=Document(
# id=request_dict['id'],
# type="question",
# question_text=request_dict['text'],
# options=request_dict['options']
# )
# response = service.post_document(db='questions_db', document=question).get_result()
# print(response)
#
# req={"id":"q003",
# "text":"This is the first question",
# "options":['option 1', 'option 2','option 3']
# }
#
# create_question(req)
#
# def update_question(doc_id, request_dict):
# question=Document(
# rev=request_dict['rev'],
# type="question",
# question_text=request_dict['text'],
# options=request_dict['options']
# )
# response = service.put_document(db='questions_db', doc_id=doc_id,document=question).get_result()
# print(response)
#
# req={
# "rev":"2-7b23212b63dd888e94c7379a109a30cf",
# "text":"This is not the first question",
# "options":['Noption 1', 'Noption 2','Noption 3']}
#
# doc_id="q003"
# update_question(doc_id, req)
| 26.829545
| 187
| 0.710292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,807
| 0.765354
|
bbd37b557f545c19f1a83004ba18612d1700a121
| 1,782
|
py
|
Python
|
core/migrations/0012_auto_20170423_1836.py
|
koatse/ilikethem
|
962374ff8179a533dba3b00422d11bb819d8acde
|
[
"MIT"
] | null | null | null |
core/migrations/0012_auto_20170423_1836.py
|
koatse/ilikethem
|
962374ff8179a533dba3b00422d11bb819d8acde
|
[
"MIT"
] | null | null | null |
core/migrations/0012_auto_20170423_1836.py
|
koatse/ilikethem
|
962374ff8179a533dba3b00422d11bb819d8acde
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-04-23 18:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0011_auto_20170422_1917'),
]
operations = [
migrations.AlterModelOptions(
name='propertytype',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='role',
options={'ordering': ['name']},
),
migrations.AlterField(
model_name='userprofile',
name='city_experience',
field=models.ManyToManyField(blank=True, related_name='city_experience', to='mygeo.City'),
),
migrations.AlterField(
model_name='userprofile',
name='financing_experience',
field=models.ManyToManyField(blank=True, to='experience.FinancingExperience'),
),
migrations.AlterField(
model_name='userprofile',
name='property_type',
field=models.ManyToManyField(blank=True, to='core.PropertyType'),
),
migrations.AlterField(
model_name='userprofile',
name='renovation_experience',
field=models.ManyToManyField(blank=True, to='experience.RenovationExperience'),
),
migrations.AlterField(
model_name='userprofile',
name='tax_experience',
field=models.ManyToManyField(blank=True, to='experience.TaxExperience'),
),
migrations.AlterField(
model_name='userprofile',
name='tenant_experience',
field=models.ManyToManyField(blank=True, to='experience.TenantExperience'),
),
]
| 33
| 102
| 0.597643
| 1,624
| 0.911336
| 0
| 0
| 0
| 0
| 0
| 0
| 512
| 0.287318
|
bbd438523d7877e78c030d4778fb9485bdd7f4ad
| 478
|
py
|
Python
|
cymon_v2/komand_cymon_v2/actions/__init__.py
|
killstrelok/insightconnect-plugins
|
911358925f4233ab273dbd8172e8b7b9188ebc01
|
[
"MIT"
] | 1
|
2020-03-18T09:14:55.000Z
|
2020-03-18T09:14:55.000Z
|
cymon_v2/komand_cymon_v2/actions/__init__.py
|
killstrelok/insightconnect-plugins
|
911358925f4233ab273dbd8172e8b7b9188ebc01
|
[
"MIT"
] | 1
|
2021-02-23T23:57:37.000Z
|
2021-02-23T23:57:37.000Z
|
cymon_v2/komand_cymon_v2/actions/__init__.py
|
killstrelok/insightconnect-plugins
|
911358925f4233ab273dbd8172e8b7b9188ebc01
|
[
"MIT"
] | null | null | null |
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .create_feed.action import CreateFeed
from .get_feed_details.action import GetFeedDetails
from .get_report_document.action import GetReportDocument
from .list_all_feeds.action import ListAllFeeds
from .list_user_feeds.action import ListUserFeeds
from .search.action import Search
from .submit_report.action import SubmitReport
from .submit_reports_in_bulk.action import SubmitReportsInBulk
from .update_feed.action import UpdateFeed
| 43.454545
| 62
| 0.864017
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 39
| 0.08159
|
bbd4c608ff119a6e8725d951c6333bfee210d76b
| 9,913
|
py
|
Python
|
appsite/resolver/models.py
|
inchiresolver/inchiresolver
|
6b3f080a4364ebe7499298e5a1b3cd4ed165322d
|
[
"BSD-3-Clause"
] | 3
|
2020-10-22T06:18:17.000Z
|
2021-03-19T16:49:00.000Z
|
appsite/resolver/models.py
|
inchiresolver/inchiresolver
|
6b3f080a4364ebe7499298e5a1b3cd4ed165322d
|
[
"BSD-3-Clause"
] | 11
|
2019-11-01T23:04:31.000Z
|
2022-02-10T12:32:11.000Z
|
appsite/resolver/models.py
|
inchiresolver/inchiresolver
|
6b3f080a4364ebe7499298e5a1b3cd4ed165322d
|
[
"BSD-3-Clause"
] | null | null | null |
import uuid
from urllib.parse import urljoin
from django.core.exceptions import FieldError
from multiselectfield import MultiSelectField
from rdkit import Chem
from django.db import models
from resolver import defaults
from inchi.identifier import InChIKey, InChI
class Inchi(models.Model):
id = models.UUIDField(primary_key=True, editable=False)
version = models.IntegerField(db_index=True, default=1)
block1 = models.CharField(db_index=True, max_length=14)
block2 = models.CharField(db_index=True, max_length=10)
block3 = models.CharField(db_index=True, max_length=1)
key = models.CharField(max_length=27, blank=True, null=True)
string = models.CharField(max_length=32768, blank=True, null=True)
is_standard = models.BooleanField(default=False)
safe_options = models.CharField(db_index=True, max_length=2, default=None, null=True)
entrypoints = models.ManyToManyField('EntryPoint', related_name='inchis')
added = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class JSONAPIMeta:
resource_name = 'inchis'
class Meta:
unique_together = ('block1', 'block2', 'block3', 'version', 'safe_options')
verbose_name = "InChI"
@classmethod
def create(cls, *args, **kwargs):
if 'url_prefix' in kwargs:
url_prefix = kwargs['url_prefix']
inchiargs = kwargs.pop('url_prefix')
inchi = cls(*args, inchiargs)
else:
url_prefix = None
inchi = cls(*args, **kwargs)
k = None
s = None
if 'key' in kwargs and kwargs['key']:
k = InChIKey(kwargs['key'])
if 'string' in kwargs and kwargs['string']:
s = InChI(kwargs['string'])
_k = InChIKey(Chem.InchiToInchiKey(kwargs['string']))
if k:
if not k.element['well_formatted'] == _k.element['well_formatted']:
raise FieldError("InChI key does not represent InChI string")
else:
k = _k
inchi.key = k.element['well_formatted_no_prefix']
inchi.version = k.element['version']
inchi.is_standard = k.element['is_standard']
inchi.block1 = k.element['block1']
inchi.block2 = k.element['block2']
inchi.block3 = k.element['block3']
if s:
inchi.string = s.element['well_formatted']
#if url_prefix:
# inchi.id = uuid.uuid5(uuid.NAMESPACE_URL, urljoin(url_prefix, inchi.key))
#else:
inchi.id = uuid.uuid5(uuid.NAMESPACE_URL, "/".join([
inchi.key,
str(kwargs.get('safe_options', None)),
]))
return inchi
def __str__(self):
return self.key
class Organization(models.Model):
id = models.UUIDField(primary_key=True, editable=False)
parent = models.ForeignKey('self', related_name='children', on_delete=models.SET_NULL, blank=True, null=True)
name = models.CharField(max_length=32768)
abbreviation = models.CharField(max_length=32, blank=True, null=True)
category = models.CharField(max_length=16, choices=(
('regulatory', 'Regulatory'),
('government', 'Government'),
('academia', 'Academia'),
('company', 'Company'),
('vendor', 'Vendor'),
('research', 'Research'),
('publishing', 'Publishing'),
('provider', 'Provider'),
('public', 'Public'),
('society', "Society"),
('charity', "Charity"),
('other', 'Other'),
('none', 'None'),
), default='none')
href = models.URLField(max_length=4096, blank=True, null=True)
added = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class JSONAPIMeta:
resource_name = 'organizations'
class Meta:
unique_together = ('parent', 'name')
@classmethod
def create(cls, *args, **kwargs):
organization = cls(*args, **kwargs)
organization.id = uuid.uuid5(uuid.NAMESPACE_URL, kwargs.get('name'))
return organization
def __str__(self):
return self.name
class Publisher(models.Model):
id = models.UUIDField(primary_key=True, editable=False)
parent = models.ForeignKey('self', related_name='children', on_delete=models.SET_NULL, null=True)
organization = models.ForeignKey('Organization', related_name='publishers', on_delete=models.SET_NULL, null=True)
category = models.CharField(max_length=16, choices=(
('entity', 'Entity'),
('service', 'Service'),
('network', 'Network'),
('division', 'Division'),
('group', 'Group'),
('person', 'Person'),
('other', 'Other'),
('none', 'None'),
), default='none')
name = models.CharField(max_length=1024)
email = models.EmailField(max_length=254, blank=True, null=True)
address = models.CharField(max_length=8192, blank=True, null=True)
href = models.URLField(max_length=4096, blank=True, null=True)
orcid = models.URLField(max_length=4096, blank=True, null=True)
added = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class JSONAPIMeta:
resource_name = 'publishers'
class Meta:
unique_together = ('organization', 'parent', 'name', 'href', 'orcid')
@classmethod
def create(cls, *args, **kwargs):
publisher = cls(*args, **kwargs)
publisher.id = uuid.uuid5(uuid.NAMESPACE_URL, "/".join([
str(kwargs.get('organization', None)),
str(kwargs.get('parent', None)),
str(kwargs.get('href', None)),
str(kwargs.get('orcid', None)),
kwargs.get('name')
]))
return publisher
def __str__(self):
return "%s[%s]" % (self.name, self.category)
class EntryPoint(models.Model):
id = models.UUIDField(primary_key=True, editable=False)
parent = models.ForeignKey('self', on_delete=models.SET_NULL, related_name='children', null=True)
category = models.CharField(max_length=16, choices=(
('self', 'Self'),
('site', 'Site'),
('api', 'API'),
('resolver', 'Resolver'),
), default='site')
publisher = models.ForeignKey("Publisher", related_name="entrypoints", on_delete=models.SET_NULL, null=True)
href = models.URLField(max_length=4096)
entrypoint_href = models.URLField(max_length=4096, blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
description = models.TextField(max_length=32768, blank=True, null=True)
added = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class JSONAPIMeta:
resource_name = 'entrypoints'
class Meta:
unique_together = ('parent', 'publisher', 'href')
@classmethod
def create(cls, *args, **kwargs):
entrypoint = cls(*args, **kwargs)
entrypoint.id = uuid.uuid5(uuid.NAMESPACE_URL, "/".join([
str(kwargs.get('parent', None)),
str(kwargs.get('publisher')),
kwargs.get('href'),
]))
return entrypoint
def __str__(self):
return "%s [%s]" % (self.publisher, self.href)
class EndPoint(models.Model):
id = models.UUIDField(primary_key=True, editable=False)
entrypoint = models.ForeignKey('EntryPoint', related_name='endpoints', on_delete=models.SET_NULL, null=True)
uri = models.CharField(max_length=32768)
accept_header_media_types = models.ManyToManyField('MediaType', related_name='accepting_endpoints')
content_media_types = models.ManyToManyField('MediaType', related_name='delivering_endpoints')
request_schema_endpoint = models.ForeignKey('EndPoint', related_name='schema_requesting_endpoints',
on_delete=models.SET_NULL, null=True)
response_schema_endpoint = models.ForeignKey('EndPoint', related_name='schema_responding_endpoints',
on_delete=models.SET_NULL, null=True)
category = models.CharField(max_length=16, choices=(
('schema', 'Schema'),
('uritemplate', 'URI Template (RFC6570)'),
('documentation', 'Documentation (HTML, PDF)'),
), default='uritemplate')
request_methods = MultiSelectField(choices=defaults.http_verbs, default=['GET'])
description = models.TextField(max_length=32768, blank=True, null=True)
added = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class JSONAPIMeta:
resource_name = 'endpoints'
class Meta:
unique_together = ('entrypoint', 'uri')
def full_path_uri(self):
if self.entrypoint:
return self.entrypoint.href + "/" + self.uri
else:
return self.uri
@classmethod
def create(cls, *args, **kwargs):
endpoint = cls(*args, **kwargs)
endpoint.id = uuid.uuid5(uuid.NAMESPACE_URL, "/".join([
str(kwargs.get('entrypoint')),
kwargs.get('uri'),
]))
return endpoint
def __str__(self):
return "%s[%s]" % (self.entrypoint, self.uri)
class MediaType(models.Model):
id = models.UUIDField(primary_key=True, editable=False)
name = models.CharField(max_length=1024, blank=False, null=False, unique=True)
description = models.TextField(max_length=32768, blank=True, null=True)
added = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class JSONAPIMeta:
resource_name = 'mediatypes'
@classmethod
def create(cls, *args, **kwargs):
mediatype = cls(*args, **kwargs)
mediatype.id = uuid.uuid5(uuid.NAMESPACE_URL, "/".join([
str(kwargs.get('name'))
]))
return mediatype
def __str__(self):
return "%s" % self.name
| 36.988806
| 117
| 0.635226
| 9,628
| 0.97125
| 0
| 0
| 2,877
| 0.290225
| 0
| 0
| 1,564
| 0.157773
|
bbd5dfeac5416aaa4e83e0b9d53270b2988a24ca
| 2,995
|
py
|
Python
|
kit_django/content/apps.py
|
safakoner/kit
|
aec36a70137febfb5f3e3a9205ea58879736eea4
|
[
"MIT"
] | 6
|
2020-06-29T20:36:15.000Z
|
2021-09-08T23:34:01.000Z
|
kit_django/content/apps.py
|
safakoner/kit
|
aec36a70137febfb5f3e3a9205ea58879736eea4
|
[
"MIT"
] | 9
|
2021-03-30T13:46:29.000Z
|
2022-03-12T00:38:27.000Z
|
kit_django/content/apps.py
|
safakoner/kit
|
aec36a70137febfb5f3e3a9205ea58879736eea4
|
[
"MIT"
] | 1
|
2020-07-20T18:40:24.000Z
|
2020-07-20T18:40:24.000Z
|
#
# ----------------------------------------------------------------------------------------------------
# DESCRIPTION
# ----------------------------------------------------------------------------------------------------
## @package content @brief [ PACKAGE ] - Content.
## @dir content @brief [ DIRECTORY ] - Package root directory.
## @package content.admin @brief [ MODULE ] - Admin module.
## @file content/admin.py @brief [ FILE ] - Admin module file.
## @package content.apps @brief [ MODULE ] - App module.
## @file content/apps.py @brief [ FILE ] - App module file.
## @package content.handlers @brief [ MODULE ] - Signal handlers module.
## @file content/handlers.py @brief [ FILE ] - Signal handlers module file.
## @package content.models @brief [ MODULE ] - Models module.
## @file content/models.py @brief [ FILE ] - Models module file.
## @package content.serializers @brief [ MODULE ] - Serializers module.
## @file content/serializers.py @brief [ FILE ] - Serializers module file.
## @package content.tests @brief [ MODULE ] - Tests module.
## @file content/tests.py @brief [ FILE ] - Tests module file.
## @package content.urls @brief [ MODULE ] - URLs module.
## @file content/urls.py @brief [ FILE ] - URLs module file.
## @package content.views @brief [ MODULE ] - Views module.
## @file content/views.py @brief [ FILE ] - Views module file.
#
# ----------------------------------------------------------------------------------------------------
# IMPORTS
# ----------------------------------------------------------------------------------------------------
from django.apps import AppConfig
from django.db.models.signals import pre_save, post_save, pre_delete
#
# ----------------------------------------------------------------------------------------------------
# CODE
# ----------------------------------------------------------------------------------------------------
#
## @brief [ APP CONFIG CLASS ] - App config class.
class ContentConfig(AppConfig):
## [ str ] - Name.
name = 'content'
## [ str ] - Verbose Name.
verbose_name = 'Content'
#
## @brief Ready.
#
# @exception N/A
#
# @return None - None.
def ready(self):
from .models import Content, Item
from .handlers import preSave, postSave, preDelete
pre_save.connect(preSave , sender=Content)
post_save.connect(postSave , sender=Content)
pre_delete.connect(preDelete , sender=Content)
#
pre_save.connect(preSave , sender=Item)
post_save.connect(postSave , sender=Item)
pre_delete.connect(preDelete , sender=Item)
| 42.785714
| 102
| 0.444741
| 702
| 0.234391
| 0
| 0
| 0
| 0
| 0
| 0
| 2,257
| 0.753589
|
bbd748e6a83de8ca6fc199a6dc4a8e503c1bdd1b
| 681
|
py
|
Python
|
Robot.py
|
rnandon/RobotsAndDinosaurs
|
ad83d61a800bcf307dfbbf925abe91bcad2763b4
|
[
"MIT"
] | null | null | null |
Robot.py
|
rnandon/RobotsAndDinosaurs
|
ad83d61a800bcf307dfbbf925abe91bcad2763b4
|
[
"MIT"
] | null | null | null |
Robot.py
|
rnandon/RobotsAndDinosaurs
|
ad83d61a800bcf307dfbbf925abe91bcad2763b4
|
[
"MIT"
] | null | null | null |
### IMPORTS
### ================================
from Combatant import Combatant
# Robot. Implementation of the Combatant class
class Robot(Combatant):
def __init__(self, name, weapon):
self.equipped_weapon = weapon
super().__init__(name, self.equipped_weapon.attack_power)
### METHODS
### =================================================================
def attack(self, opponent):
# Call the combatant attack method, then return damage and attack name for the user interface to use
attack_name = self.equipped_weapon.name
damage_dealt = super().attack(opponent)
return (damage_dealt, attack_name)
| 35.842105
| 108
| 0.577093
| 543
| 0.797357
| 0
| 0
| 0
| 0
| 0
| 0
| 287
| 0.421439
|
bbd76e1f33835fcc21edddd8477a6604c70dcdb3
| 5,143
|
py
|
Python
|
src/core.py
|
z62060037/ArtStationDownloader
|
f6e8a657dfd3584cbf870470f1b19dc4edf54e92
|
[
"MIT"
] | 1
|
2019-04-19T10:14:49.000Z
|
2019-04-19T10:14:49.000Z
|
src/core.py
|
z62060037/ArtStationDownloader
|
f6e8a657dfd3584cbf870470f1b19dc4edf54e92
|
[
"MIT"
] | null | null | null |
src/core.py
|
z62060037/ArtStationDownloader
|
f6e8a657dfd3584cbf870470f1b19dc4edf54e92
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""内核方法
Copyright 2018-2019 Sean Feng(sean@FantaBlade.com)
"""
import os
import re
from concurrent import futures
from multiprocessing import cpu_count
from urllib.parse import urlparse
import pafy
import requests
class Core:
def log(self, message):
print(message)
def __init__(self, log_print=None):
if log_print:
global print
print = log_print
max_workers = cpu_count()*4
self.executor = futures.ThreadPoolExecutor(max_workers)
self.executor_video = futures.ThreadPoolExecutor(1)
self.root_path = None
self.futures = []
def download_file(self, url, file_path, file_name):
file_full_path = os.path.join(file_path, file_name)
if os.path.exists(file_full_path):
self.log('[Exist][image][{}]'.format(file_full_path))
else:
r = requests.get(url)
os.makedirs(file_path, exist_ok=True)
with open(file_full_path, "wb") as code:
code.write(r.content)
self.log('[Finish][image][{}]'.format(file_full_path))
def download_video(self, id, file_path):
file_full_path = os.path.join(file_path, "{}.{}".format(id, 'mp4'))
if os.path.exists(file_full_path):
self.log('[Exist][video][{}]'.format(file_full_path))
else:
video = pafy.new(id)
best = video.getbest(preftype="mp4")
r = requests.get(best.url)
os.makedirs(file_path, exist_ok=True)
with open(file_full_path, "wb") as code:
code.write(r.content)
self.log('[Finish][video][{}]'.format(file_full_path))
def download_project(self, hash_id):
url = 'https://www.artstation.com/projects/{}.json'.format(hash_id)
r = requests.get(url)
j = r.json()
assets = j['assets']
title = j['slug'].strip()
# self.log('=========={}=========='.format(title))
username = j['user']['username']
for asset in assets:
assert(self.root_path)
user_path = os.path.join(self.root_path, username)
os.makedirs(user_path, exist_ok=True)
file_path = os.path.join(user_path, title)
if not self.no_image and asset['has_image']: # 包含图片
url = asset['image_url']
file_name = urlparse(url).path.split('/')[-1]
try:
self.futures.append(self.executor.submit(self.download_file,
url, file_path, file_name))
except Exception as e:
print(e)
if not self.no_video and asset['has_embedded_player']: # 包含视频
player_embedded = asset['player_embedded']
id = re.search(
r'(?<=https://www\.youtube\.com/embed/)[\w_]+', player_embedded).group()
try:
self.futures.append(self.executor_video.submit(
self.download_video, id, file_path))
except Exception as e:
print(e)
def get_projects(self, username):
data = []
if username is not '':
page = 0
while True:
page += 1
url = 'https://www.artstation.com/users/{}/projects.json?page={}'.format(
username, page)
r = requests.get(url)
if not r.ok:
self.log("[Error] Please input right username")
break
j = r.json()
total_count = int(j['total_count'])
if total_count == 0:
self.log("[Error] Please input right username")
break
if page is 1:
self.log('\n==========[{}] BEGIN=========='.format(username))
data_fragment = j['data']
data += data_fragment
self.log('\n==========Get page {}/{}=========='.format(page,
total_count // 50 + 1))
if page > total_count / 50:
break
return data
def download_by_username(self, username):
data = self.get_projects(username)
if len(data) is not 0:
future_list = []
for project in data:
future = self.executor.submit(
self.download_project, project['hash_id'])
future_list.append(future)
futures.wait(future_list)
def download_by_usernames(self, usernames, type):
self.no_image = type == 'video'
self.no_video = type == 'image'
# 去重与处理网址
username_set = set()
for username in usernames:
username = username.strip().split('/')[-1]
if username not in username_set:
username_set.add(username)
self.download_by_username(username)
futures.wait(self.futures)
self.log("\n========ALL DONE========")
| 37.816176
| 92
| 0.517986
| 4,929
| 0.951361
| 0
| 0
| 0
| 0
| 0
| 0
| 765
| 0.147655
|
bbd7d9dde1e4ce541f94c7b201986c3d8dd7319a
| 727
|
py
|
Python
|
home/migrations/0022_auto_20210409_1256.py
|
witty-technologies-empowerment/codeupblood
|
a0aa1725e5776d80e083b6d4e9e67476bb97e983
|
[
"MIT"
] | null | null | null |
home/migrations/0022_auto_20210409_1256.py
|
witty-technologies-empowerment/codeupblood
|
a0aa1725e5776d80e083b6d4e9e67476bb97e983
|
[
"MIT"
] | null | null | null |
home/migrations/0022_auto_20210409_1256.py
|
witty-technologies-empowerment/codeupblood
|
a0aa1725e5776d80e083b6d4e9e67476bb97e983
|
[
"MIT"
] | 1
|
2022-01-19T11:09:13.000Z
|
2022-01-19T11:09:13.000Z
|
# Generated by Django 3.1.6 on 2021-04-09 19:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0021_auto_20210409_1118'),
]
operations = [
migrations.AddField(
model_name='blog',
name='show',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='partner',
name='class_id',
field=models.CharField(default='stFkq', max_length=150),
),
migrations.AlterField(
model_name='sponsor',
name='class_id',
field=models.CharField(default='bXRRZ', max_length=150),
),
]
| 25.068966
| 68
| 0.566713
| 634
| 0.872077
| 0
| 0
| 0
| 0
| 0
| 0
| 142
| 0.195323
|
bbdb2c2c522ffff022d41a4d62480d42fe180e58
| 2,586
|
py
|
Python
|
Gui/CustomWidgets/PlottingWidgets/mayaViWidget.py
|
jpesperidiao/MOPA
|
6fbf097a22d42d82eeb36c6f247eb0d8fb6ac586
|
[
"MIT"
] | null | null | null |
Gui/CustomWidgets/PlottingWidgets/mayaViWidget.py
|
jpesperidiao/MOPA
|
6fbf097a22d42d82eeb36c6f247eb0d8fb6ac586
|
[
"MIT"
] | 1
|
2020-06-07T00:19:54.000Z
|
2020-06-07T00:19:54.000Z
|
Gui/CustomWidgets/PlottingWidgets/mayaViWidget.py
|
jpesperidiao/MOPA
|
6fbf097a22d42d82eeb36c6f247eb0d8fb6ac586
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
MOPA
An independet project
Método de Obtenção da Posição de Atirador
-------------------
begin : 2018-12-21
git sha : $Format:%H$
copyright : (C) 2018 by João P. Esperidião
email : joao.p2709@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from traits.api import HasTraits, Instance, on_trait_change
from traitsui.api import View, Item
from mayavi.core.ui.api import MayaviScene, MlabSceneModel, \
SceneEditorx
from PyQt5.QtWidgets import QWidget
class Visualization(HasTraits):
"""
Class 'as is' from MayaVi API example.
Source: http://docs.enthought.com/mayavi/mayavi/auto/example_qt_embedding.html
"""
scene = Instance(MlabSceneModel, ())
@on_trait_change('scene.activated')
def update_plot(self):
# This function is called when the view is opened. We don't
# populate the scene when the view is not yet open, as some
# VTK features require a GLContext.
# We can do normal mlab calls on the embedded scene.
self.scene.mlab.test_points3d()
# the layout of the dialog screated
view = View(Item('scene', editor=SceneEditor(scene_class=MayaviScene),
height=250, width=300, show_label=False),
resizable=True # We need this to resize with the parent widget
)
class MayaViWidget(QWidget):
"""
A widget to embed MayaVi data visualization to GUI.
"""
def __init(self, parent=None):
"""
Class constructor.
:param parent: (QtWidgets) any QtWidgets object parent to new instance of MayaViWidget.
"""
super(MayaViWidget, self).__init__(parent)
self.parent = parent
| 39.784615
| 95
| 0.496906
| 1,202
| 0.463556
| 0
| 0
| 344
| 0.132665
| 0
| 0
| 1,816
| 0.700347
|
bbdb59840ecfefbcddcb7e8ef4a69bf99648feb6
| 1,953
|
py
|
Python
|
notebooks/debug_monuseg.py
|
voreille/2d_bispectrum_cnn
|
ba8f26f6a557602bc3343c4562c83a3de914c67e
|
[
"MIT"
] | null | null | null |
notebooks/debug_monuseg.py
|
voreille/2d_bispectrum_cnn
|
ba8f26f6a557602bc3343c4562c83a3de914c67e
|
[
"MIT"
] | null | null | null |
notebooks/debug_monuseg.py
|
voreille/2d_bispectrum_cnn
|
ba8f26f6a557602bc3343c4562c83a3de914c67e
|
[
"MIT"
] | null | null | null |
from pathlib import Path
import numpy as np
from PIL import Image, ImageSequence
import matplotlib.pyplot as plt
import tensorflow as tf
import tensorflow_io as tfio
from scipy.ndimage import rotate
from src.data.monuseg import get_dataset, tf_random_rotate, tf_random_crop
ds = get_dataset()
def random_crop(image, segmentation, size=(256, 256), rotation=False):
image_height, image_width, _ = image.shape
radius = np.sqrt(size[0]**2 + size[1]**2) / 2
if rotation:
angle = np.random.uniform(-180, 180)
dx = int((2 * radius - size[0]) // 2)
dy = int((2 * radius - size[1]) // 2)
else:
dx, dy = 0, 0
offset_height = np.random.randint(dx, high=image_height - size[0] - dx)
offset_width = np.random.randint(dy, high=image_width - size[1] - dy)
if rotation:
image_cropped = image[offset_height - dx:offset_height + dx + size[0],
offset_width - dy:offset_width + dy + size[1]]
seg_cropped = segmentation[offset_height - dx:offset_height + dx +
size[0], offset_width - dy:offset_width +
dy + size[1]]
image_rotated = rotate(image_cropped, angle, reshape=False, order=1)
seg_rotated = rotate(seg_cropped, angle, reshape=False, order=1)
seg_rotated = tf.where(seg_rotated > 0.5, x=1.0, y=0.0)
return (
image_rotated[dx:dx + size[0], dy:dy + size[1]],
seg_rotated[dx:dx + size[0], dy:dy + size[1]],
)
else:
return (image[offset_height:offset_height + size[0],
offset_width:offset_width + size[1]],
segmentation[offset_height:offset_height + size[0],
offset_width:offset_width + size[1]])
image, mask = next(ds.as_numpy_iterator())
image, mask = random_crop(image, mask, rotation=True)
print(f"yo la shape de liamg cest {image.shape}")
| 39.06
| 78
| 0.612391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 42
| 0.021505
|
bbdd2e1f7b9f04c68b368c1e67ead84b604e9837
| 526
|
py
|
Python
|
amqp_2phase/migrations/0002_create_event_index.py
|
cloud-taxi/django-amqp-2phase
|
d4825fa68bfba6a21c310b45f39a740840e7cae1
|
[
"BSD-3-Clause"
] | null | null | null |
amqp_2phase/migrations/0002_create_event_index.py
|
cloud-taxi/django-amqp-2phase
|
d4825fa68bfba6a21c310b45f39a740840e7cae1
|
[
"BSD-3-Clause"
] | null | null | null |
amqp_2phase/migrations/0002_create_event_index.py
|
cloud-taxi/django-amqp-2phase
|
d4825fa68bfba6a21c310b45f39a740840e7cae1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-08 05:45
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('amqp_2phase', '0001_create_events_table'),
]
atomic = False
operations = [
migrations.RunSQL("""create index concurrently idx_amqp_events_pending on amqp_events(id) where status = 1;""",
reverse_sql="""drop index concurrently idx_amqp_events_pending;"""),
]
| 29.222222
| 119
| 0.671103
| 378
| 0.718631
| 0
| 0
| 0
| 0
| 0
| 0
| 254
| 0.48289
|
bbddc18cfd9ad9a4f3ecf0a646125463caad5351
| 870
|
py
|
Python
|
investmap/migrations/0004_investmapdescriptiontabs.py
|
30Meridian/RozumneMistoSnapshot
|
67a83b3908674d01992561dfb37596e395b4d482
|
[
"BSD-3-Clause"
] | null | null | null |
investmap/migrations/0004_investmapdescriptiontabs.py
|
30Meridian/RozumneMistoSnapshot
|
67a83b3908674d01992561dfb37596e395b4d482
|
[
"BSD-3-Clause"
] | null | null | null |
investmap/migrations/0004_investmapdescriptiontabs.py
|
30Meridian/RozumneMistoSnapshot
|
67a83b3908674d01992561dfb37596e395b4d482
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import ckeditor_uploader.fields
class Migration(migrations.Migration):
dependencies = [
('weunion', '0001_initial'),
('investmap', '0003_auto_20161205_1926'),
]
operations = [
migrations.CreateModel(
name='InvestMapDescriptionTabs',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', serialize=False, primary_key=True)),
('slug', models.CharField(unique=True, max_length=32)),
('description', ckeditor_uploader.fields.RichTextUploadingField()),
('town', models.ForeignKey(to='weunion.Town')),
],
options={
'db_table': 'investmap_descriptions',
},
),
]
| 30
| 114
| 0.589655
| 729
| 0.837931
| 0
| 0
| 0
| 0
| 0
| 0
| 189
| 0.217241
|
bbdf983b5082ed39dd9de59fc5860b7d988b933b
| 290
|
py
|
Python
|
align/cli.py
|
kba/cis-ocrd-py
|
4b6d2a29ac82cf1ec734a87314a6c774271d09bc
|
[
"MIT"
] | null | null | null |
align/cli.py
|
kba/cis-ocrd-py
|
4b6d2a29ac82cf1ec734a87314a6c774271d09bc
|
[
"MIT"
] | null | null | null |
align/cli.py
|
kba/cis-ocrd-py
|
4b6d2a29ac82cf1ec734a87314a6c774271d09bc
|
[
"MIT"
] | null | null | null |
import click
from ocrd.decorators import ocrd_cli_options, ocrd_cli_wrap_processor
from align.aligner import Aligner
@click.command()
@ocrd_cli_options
def cis_ocrd_align(*args, **kwargs):
# kwargs['cache_enabled'] = False
return ocrd_cli_wrap_processor(Aligner, *args, **kwargs)
| 26.363636
| 69
| 0.786207
| 0
| 0
| 0
| 0
| 170
| 0.586207
| 0
| 0
| 33
| 0.113793
|
bbdfbb10eaebeeb5ca5805687d46a1d1d278b849
| 1,567
|
py
|
Python
|
test/SI_to_Conc/test_SI2Conc_OG_MO_AUMC_ICR_RMH.py
|
notZaki/DCE-DSC-MRI_CodeCollection
|
9e258e10559894951c7e6c5c00876e74ce103258
|
[
"Apache-2.0"
] | null | null | null |
test/SI_to_Conc/test_SI2Conc_OG_MO_AUMC_ICR_RMH.py
|
notZaki/DCE-DSC-MRI_CodeCollection
|
9e258e10559894951c7e6c5c00876e74ce103258
|
[
"Apache-2.0"
] | 1
|
2020-11-19T04:49:28.000Z
|
2020-11-19T04:49:28.000Z
|
test/SI_to_Conc/test_SI2Conc_OG_MO_AUMC_ICR_RMH.py
|
notZaki/DCE-DSC-MRI_CodeCollection
|
9e258e10559894951c7e6c5c00876e74ce103258
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import numpy as np
from ..helpers import osipi_parametrize
from . import SI2Conc_data
from src.original.OG_MO_AUMC_ICR_RMH.ExtendedTofts.DCE import dce_to_r1eff
from src.original.OG_MO_AUMC_ICR_RMH.ExtendedTofts.DCE import r1eff_to_conc
# All tests will use the same arguments and same data...
arg_names = 'label', 'fa', 'tr', 'T1base', 'BLpts', 'r1', 's_array', 'conc_array', 'a_tol', 'r_tol'
test_data = SI2Conc_data.SI2Conc_data()
# Use the test data to generate a parametrize decorator. This causes the following
# test to be run for every test case listed in test_data...
@osipi_parametrize(arg_names, test_data, xf_labels = [])
def test_OG_MO_AUMC_ICR_RMH_dce_to_r1eff(label, fa, tr, T1base, BLpts, r1, s_array, conc_array, a_tol, r_tol):
##Prepare input data
#Convert fa to radians
fa_rad=fa * np.pi/180.
#This function uses a value for S0, which would be the mean of the s_array from 1 to BLpts.
# It's from 1 rather than 0 because the code used to do the original conversion skips the first point of the SI curve
s0=np.mean(s_array[1:BLpts])
#This function expects a signal array of shape (x,1) rather than (x,) s0 add another dimension to the signal array to make it (150,1) rather than (150,)
s_array=s_array[:,None].T
# run test
#The code uses two functions to get from SI to conc
r1_curve = dce_to_r1eff(s_array, [s0], 1/T1base, tr, fa_rad)
conc_curve = r1eff_to_conc(r1_curve, 1/T1base, r1)
np.testing.assert_allclose( conc_curve, [conc_array], rtol=r_tol, atol=a_tol )
| 40.179487
| 157
| 0.734525
| 0
| 0
| 0
| 0
| 968
| 0.617741
| 0
| 0
| 730
| 0.465858
|
bbdfc65e3dc96518486004ed92ed7315cea723ab
| 3,559
|
py
|
Python
|
src/docker_composer/runner/cmd/up.py
|
michascholl/docker-composer
|
d190f1db766e216654a4259785b9aaf802a9c64d
|
[
"Apache-2.0"
] | 4
|
2021-01-18T11:35:24.000Z
|
2021-08-30T16:19:39.000Z
|
src/docker_composer/runner/cmd/up.py
|
michascholl/docker-composer
|
d190f1db766e216654a4259785b9aaf802a9c64d
|
[
"Apache-2.0"
] | null | null | null |
src/docker_composer/runner/cmd/up.py
|
michascholl/docker-composer
|
d190f1db766e216654a4259785b9aaf802a9c64d
|
[
"Apache-2.0"
] | 1
|
2022-02-19T10:38:59.000Z
|
2022-02-19T10:38:59.000Z
|
# DO NOT EDIT: Autogenerated by src/docker_composer/_utils/generate_class.py
# for docker-compose version 1.25.0, build unknown
from typing import List, Optional
import attr
from docker_composer.base import DockerBaseRunner
@attr.s(auto_attribs=True)
class DockerComposeUp(DockerBaseRunner):
"""
Builds, (re)creates, starts, and attaches to containers for a service.
Unless they are already running, this command also starts any linked services.
The `docker-compose up` command aggregates the output of each container. When
the command exits, all containers are stopped. Running `docker-compose up -d`
starts the containers in the background and leaves them running.
If there are existing containers for a service, and the service's configuration
or image was changed after the container's creation, `docker-compose up` picks
up the changes by stopping and recreating the containers (preserving mounted
volumes). To prevent Compose from picking up changes, use the `--no-recreate`
flag.
If you want to force Compose to stop and recreate all containers, use the
`--force-recreate` flag.
Usage: up [options] [--scale SERVICE=NUM...] [SERVICE...]
"""
detach: Optional[bool] = None
"""Detached mode: Run containers in the background,
print new container names. Incompatible with
--abort-on-container-exit."""
no_color: Optional[bool] = None
"""Produce monochrome output."""
quiet_pull: Optional[bool] = None
"""Pull without printing progress information"""
no_deps: Optional[bool] = None
"""Don't start linked services."""
force_recreate: Optional[bool] = None
"""Recreate containers even if their configuration
and image haven't changed."""
always_recreate_deps: Optional[bool] = None
"""Recreate dependent containers.
Incompatible with --no-recreate."""
no_recreate: Optional[bool] = None
"""If containers already exist, don't recreate
them. Incompatible with --force-recreate and -V."""
no_build: Optional[bool] = None
"""Don't build an image, even if it's missing."""
no_start: Optional[bool] = None
"""Don't start the services after creating them."""
build: Optional[bool] = None
"""Build images before starting containers."""
abort_on_container_exit: Optional[bool] = None
"""Stops all containers if any container was
stopped. Incompatible with -d."""
timeout: Optional[int] = None
"""Use this timeout in seconds for container
shutdown when attached or when containers are
already running. (default: 10)"""
renew_anon_volumes: Optional[bool] = None
"""Recreate anonymous volumes instead of retrieving
data from the previous containers."""
remove_orphans: Optional[bool] = None
"""Remove containers for services not defined
in the Compose file."""
exit_code_from: Optional[str] = None
"""Return the exit code of the selected service
container. Implies --abort-on-container-exit."""
scale: Optional[dict] = None
"""Scale SERVICE to NUM instances. Overrides the
`scale` setting in the Compose file if present."""
_cmd: str = "up"
_options: List[str] = [
"detach",
"no_color",
"quiet_pull",
"no_deps",
"force_recreate",
"always_recreate_deps",
"no_recreate",
"no_build",
"no_start",
"build",
"abort_on_container_exit",
"renew_anon_volumes",
"remove_orphans",
]
| 37.463158
| 83
| 0.680528
| 3,302
| 0.927789
| 0
| 0
| 3,329
| 0.935375
| 0
| 0
| 2,497
| 0.701602
|
bbe0157ae7f26883dcb131a2958451cdb126b2d0
| 1,415
|
py
|
Python
|
app.py
|
hendrapaiton/mandalika
|
6ecbde9e3582d6532006e58531d5fc52feaa8854
|
[
"MIT"
] | 1
|
2020-02-19T07:45:53.000Z
|
2020-02-19T07:45:53.000Z
|
app.py
|
hendrapaiton/mandalika
|
6ecbde9e3582d6532006e58531d5fc52feaa8854
|
[
"MIT"
] | 1
|
2020-02-20T05:35:49.000Z
|
2020-02-26T08:14:41.000Z
|
app.py
|
hendrapaiton/mandalika
|
6ecbde9e3582d6532006e58531d5fc52feaa8854
|
[
"MIT"
] | 2
|
2020-02-25T16:46:39.000Z
|
2020-02-26T09:02:06.000Z
|
# Import from system libraries
from flask import Flask
from flask_bcrypt import Bcrypt
from flask_cors import CORS
from flask_jwt_extended import JWTManager
from flask_restful import Api
# Import from application modules
from errors import errors
from models.User import User
from models.db import initialize_db
from routes.api import initialize_routes
# Flask app instance with static (html, css and js) folder configuration
app = Flask(__name__)
# Flask Restful configuration with errors included
api = Api(app, errors=errors)
# Files for Configuration System in environment
app.config.from_envvar('ENV_FILE_LOCATION')
# BCrypt instances
bcrypt = Bcrypt(app)
# JWT instances
jwt = JWTManager(app)
# CORS enabled
CORS(app)
# Get roles for authenticated user
@jwt.user_claims_loader
def add_claims_to_access_token(user):
return {'roles': user.roles}
# Load user identity
@jwt.user_identity_loader
def user_identity_lookup(user):
return user.username
# Database Configuration Initialization
initialize_db(app)
# API (Routing) Configuration Initialization
initialize_routes(api)
# Admin account initialization for first uses
user = User.objects(username='admin@nj.net')
if not user:
login = User(username='admin@nj.net', password='enje123', roles=['admin'])
login.hash_password()
login.save()
# Running Flask Application when main class executed
if __name__ == '__main__':
app.run()
| 25.727273
| 78
| 0.786572
| 0
| 0
| 0
| 0
| 176
| 0.124382
| 0
| 0
| 593
| 0.419081
|
bbe0811cc9331cc6588c71ea015afd88bed83389
| 509
|
py
|
Python
|
openapi_core/validation/util.py
|
erpheus/openapi-core
|
b37510b0e306f6819971f64106617fb116974243
|
[
"BSD-3-Clause"
] | null | null | null |
openapi_core/validation/util.py
|
erpheus/openapi-core
|
b37510b0e306f6819971f64106617fb116974243
|
[
"BSD-3-Clause"
] | null | null | null |
openapi_core/validation/util.py
|
erpheus/openapi-core
|
b37510b0e306f6819971f64106617fb116974243
|
[
"BSD-3-Clause"
] | null | null | null |
"""OpenAPI core validation util module"""
from yarl import URL
def get_operation_pattern(server_url, request_url_pattern):
"""Return an updated request URL pattern with the server URL removed."""
if server_url[-1] == "/":
# operations have to start with a slash, so do not remove it
server_url = server_url[:-1]
if URL(server_url).is_absolute():
return request_url_pattern.replace(server_url, "", 1)
return URL(request_url_pattern).path_qs.replace(server_url, "", 1)
| 39.153846
| 76
| 0.70334
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 180
| 0.353635
|
bbe19a963ebbfb67a87214650a7ef4c055ac8952
| 2,071
|
py
|
Python
|
render/model.py
|
kennyngod/portfoliov2
|
3b931c35c342bfeea18fd2d97eadc65ed57c56a7
|
[
"CC-BY-3.0"
] | 1
|
2022-02-22T07:19:16.000Z
|
2022-02-22T07:19:16.000Z
|
render/model.py
|
kennyngod/portfoliov2
|
3b931c35c342bfeea18fd2d97eadc65ed57c56a7
|
[
"CC-BY-3.0"
] | null | null | null |
render/model.py
|
kennyngod/portfoliov2
|
3b931c35c342bfeea18fd2d97eadc65ed57c56a7
|
[
"CC-BY-3.0"
] | null | null | null |
"""Mostly helper functions to help with the driver."""
import json
import os
import pathlib
import sqlite3
import arrow # type: ignore
def sql_db():
"""Open a SQL connection and perform a query."""
db_path = pathlib.Path(os.getcwd())
db_path = pathlib.Path(db_path/'sql'/'portfolio.sqlite3')
con = sqlite3.connect(str(db_path))
cur = con.cursor()
skills = cur.execute(
"SELECT * "
"FROM skills "
"ORDER BY meter DESC"
)
skills = cur.fetchall()
con.close()
return skills
def create_json():
"""Create a context JSON file to render to jinja."""
skills_db = sql_db()
skills = []
for skill_db in skills_db:
skill = {}
skill['language'] = skill_db[0]
skill['time'] = get_time(skill_db[1])
skill['proficiency'] = skill_db[2]
skill['meter'] = skill_db[3]
skill['description'] = skill_db[4]
skill['filelink'] = skill_db[5]
if skill_db[6]:
skill['framework'] = skill_db[6]
skills.append(skill)
context = {"skills": skills}
data = {"template": "index.html", "context": context}
data = json.dumps(data)
# write to json file
path = pathlib.Path(os.getcwd())
path = str(path/'render/context.json')
with open(path, 'w+', encoding='utf-8') as outfile:
outfile.write(data)
def get_time(db_time):
"""Calculate the time difference from now to start time in database."""
now = arrow.now().format("YYYY-MM-DD")
arr_now = now.split('-')
arr_time = db_time.split('-')
time_diff = []
for time_now, time_time in zip(arr_now, arr_time):
time_now = int(time_now)
time_time = int(time_time)
diff = abs(time_now - time_time)
time_diff.append(round(diff))
# dont care about day
# check year
if time_diff[0] != 0:
if time_diff[0] == 1:
return f'{time_diff[0]} year'
return f'{time_diff[0]} years'
if time_diff[1] == 1:
return f'{time_diff[1]} month'
return f'{time_diff[1]} months'
| 29.169014
| 75
| 0.598262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 613
| 0.295992
|
bbe3022d59c8c55da8fc827a792032bc2f9f1ed9
| 1,053
|
py
|
Python
|
codes/exact/matStuff/lambdaFlucSparseSysRep.py
|
joshuahellier/PhDStuff
|
6fbe9e507c40e9017cde9312b0cfcc6ceefa284e
|
[
"MIT"
] | null | null | null |
codes/exact/matStuff/lambdaFlucSparseSysRep.py
|
joshuahellier/PhDStuff
|
6fbe9e507c40e9017cde9312b0cfcc6ceefa284e
|
[
"MIT"
] | null | null | null |
codes/exact/matStuff/lambdaFlucSparseSysRep.py
|
joshuahellier/PhDStuff
|
6fbe9e507c40e9017cde9312b0cfcc6ceefa284e
|
[
"MIT"
] | null | null | null |
import subprocess
import sys
import os
import math
# This code is meant to manage running multiple instances of my KMCLib codes at the same time,
# in the name of time efficiency
numLambda = 256
sysSize = 5
numVecs = 1
dataLocation = "exactSolns/thesisCorrections/low"
lambdaMin = 10.0**(-4)
lambdaMax = 10.0**(4)
rateStepSize = (lambdaMax-lambdaMin)/float(numLambda-1)
jobIndex = 513
botConc = 0.3
topConc = 0.1
boundMult = 1000.0
tolerance = 10.0**(-18)
runningJobs = []
for rateIndex in range(0, numLambda):
tempRate = lambdaMin + rateStepSize*rateIndex
# currentRate = tempRate
currentRate = math.exp(((tempRate-lambdaMin)*math.log(lambdaMax)+(lambdaMax-tempRate)*math.log(lambdaMin))/(lambdaMax-lambdaMin))
jobInput = "simpleGroundStateFinder.py "+str(botConc)+" "+str(topConc)+" "+str(currentRate)+" "+str(sysSize)+" "+str(numVecs)+" "+str(boundMult)+" "+str(tolerance)+" "+str(1)+" "+dataLocation+str(rateIndex)+"\n"
with open("jobInputs/testInput."+str(jobIndex), 'w') as f:
f.write(jobInput)
jobIndex += 1
| 33.967742
| 215
| 0.706553
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 269
| 0.255461
|
bbe30cef5cf3479058d3d846d89799afd1e7069c
| 3,997
|
py
|
Python
|
lib/galaxy/webapps/tool_shed/util/shed_statistics.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 2
|
2016-02-23T00:09:14.000Z
|
2019-02-11T07:48:44.000Z
|
lib/galaxy/webapps/tool_shed/util/shed_statistics.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | null | null | null |
lib/galaxy/webapps/tool_shed/util/shed_statistics.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 6
|
2015-05-27T13:09:50.000Z
|
2019-02-11T07:48:46.000Z
|
from time import strftime, gmtime
class ShedCounter( object ):
def __init__( self, model ):
# TODO: Enhance the ShedCounter to retrieve information from the db instead of displaying what's currently in memory.
self.model = model
self.generation_time = strftime( "%b %d, %Y", gmtime() )
self.repositories = 0
#self.new_repositories = 0
self.deleted_repositories = 0
self.invalid_tools = 0
self.valid_tools = 0
self.workflows = 0
self.proprietary_datatypes = 0
self.total_clones = 0
self.generate_statistics()
@property
def sa_session( self ):
"""Returns a SQLAlchemy session"""
return self.model.context
def generate_statistics( self ):
self.repositories = 0
#self.new_repositories = 0
self.deleted_repositories = 0
self.invalid_tools = 0
self.valid_tools = 0
self.workflows = 0
self.proprietary_datatypes = 0
self.total_clones = 0
for repository in self.sa_session.query( self.model.Repository ):
self.repositories += 1
self.total_clones += repository.times_downloaded
is_deleted = repository.deleted
#is_new = repository.is_new
#if is_deleted and is_new:
if is_deleted:
self.deleted_repositories += 1
# self.new_repositories += 1
#elif is_deleted:
# self.deleted_repositories += 1
#elif is_new:
# self.new_repositories += 1
else:
processed_guids = []
processed_invalid_tool_configs = []
processed_relative_workflow_paths = []
processed_datatypes = []
# A repository's metadata_revisions are those that ignore the value of the repository_metadata.downloadable column.
for metadata_revision in repository.metadata_revisions:
metadata = metadata_revision.metadata
if 'tools' in metadata:
tool_dicts = metadata[ 'tools' ]
for tool_dict in tool_dicts:
if 'guid' in tool_dict:
guid = tool_dict[ 'guid' ]
if guid not in processed_guids:
self.valid_tools += 1
processed_guids.append( guid )
if 'invalid_tools' in metadata:
invalid_tool_configs = metadata[ 'invalid_tools' ]
for invalid_tool_config in invalid_tool_configs:
if invalid_tool_config not in processed_invalid_tool_configs:
self.invalid_tools += 1
processed_invalid_tool_configs.append( invalid_tool_config )
if 'datatypes' in metadata:
datatypes = metadata[ 'datatypes' ]
for datatypes_dict in datatypes:
if 'extension' in datatypes_dict:
extension = datatypes_dict[ 'extension' ]
if extension not in processed_datatypes:
self.proprietary_datatypes += 1
processed_datatypes.append( extension )
if 'workflows' in metadata:
workflows = metadata[ 'workflows' ]
for workflow_tup in workflows:
relative_path, exported_workflow_dict = workflow_tup
if relative_path not in processed_relative_workflow_paths:
self.workflows += 1
processed_relative_workflow_paths.append( relative_path )
self.generation_time = strftime( "%b %d, %Y", gmtime() )
| 49.345679
| 131
| 0.536652
| 3,961
| 0.990993
| 0
| 0
| 114
| 0.028521
| 0
| 0
| 642
| 0.16062
|
bbe3888f6e45c812298b82e8ac836a9bd58f2e7b
| 2,461
|
py
|
Python
|
lib/helpertools/helpertools/geometry.py
|
aws-samples/amazon-textract-large-scale-selectable-pdf
|
e62b1c5ed79c02008783bc1361d70f0b5b69e0b7
|
[
"MIT-0"
] | 4
|
2022-01-12T15:30:54.000Z
|
2022-02-28T22:12:17.000Z
|
lib/helpertools/helpertools/geometry.py
|
aws-samples/amazon-textract-large-scale-selectable-pdf
|
e62b1c5ed79c02008783bc1361d70f0b5b69e0b7
|
[
"MIT-0"
] | null | null | null |
lib/helpertools/helpertools/geometry.py
|
aws-samples/amazon-textract-large-scale-selectable-pdf
|
e62b1c5ed79c02008783bc1361d70f0b5b69e0b7
|
[
"MIT-0"
] | 2
|
2021-10-24T01:43:12.000Z
|
2022-01-21T20:14:33.000Z
|
'''
'''
from typing import Dict, Optional
# classes
# -------
class BoundingBox():
'''
Class to manipulate a bounding box (bbox). A bounding box is a rectangle aligned with
the coordinate system. The bounding box are defined on coordinate system with x
pointing toward east and y pointing toward north. This is the usual coordinate
system used in euclidian geometry (i.e. lower left origin). Note that images
generally have a a cordinate system defined with a upper left origin.
'''
# constructors
def __init__(self, left: float, bottom: float, right: float, top: float) -> None:
'''
constructor.
'''
self.bounds = [left, bottom, right, top]
@classmethod
def from_textract_bbox(cls, textract_bbox: Dict[str, float]) -> None:
'''
Construct a BoundingBox object from the bounding box defined by an AWS Textract
output json. As Textract uses a coordinate system with a upper left origin
(i.e. y pointing downward), the definition of bottom=top+height (note
the "+", instead of a "-").
'''
return cls(
left=textract_bbox['Left'],
bottom=textract_bbox['Top']+textract_bbox['Height'],
right=textract_bbox['Left']+textract_bbox['Width'],
top=textract_bbox['Top'],
)
# class methods
def scale(self, x_scale: None, y_scale: Optional[float]=None) -> None:
'''
Scale a bounding box by a x and y factor. If y is not defined, the bbox is
scaled by x in all directions
'''
if not y_scale:
y_scale = x_scale
self.bounds[0] *= x_scale
self.bounds[1] *= y_scale
self.bounds[2] *= x_scale
self.bounds[3] *= y_scale
# overload methods
def __getitem__(self, key):
return self.bounds[key]
def __setitem__(self, key, value):
self.bounds[key] = value
# getters
@property
def left(self) -> float:
return self.bounds[0]
@property
def bottom(self) -> float:
return self.bounds[1]
@property
def right(self) -> float:
return self.bounds[2]
@property
def top(self) -> float:
return self.bounds[3]
@property
def width(self) -> float:
return abs(self.bounds[0]-self.bounds[2])
@property
def height(self) -> float:
return abs(self.bounds[3]-self.bounds[1])
| 30.7625
| 90
| 0.604632
| 2,398
| 0.974401
| 0
| 0
| 1,088
| 0.442097
| 0
| 0
| 1,023
| 0.415685
|
bbe58ee718d54a29743fdde56951e945cc81bce6
| 378
|
py
|
Python
|
examples/chart-types/pie_chart.py
|
tcbegley/dash-google-charts
|
b8b22e5b6bac533167f218e3610697dec0c3e4ca
|
[
"Apache-2.0"
] | 6
|
2019-01-23T17:37:09.000Z
|
2020-11-17T16:12:27.000Z
|
examples/chart-types/pie_chart.py
|
tcbegley/dash-google-charts
|
b8b22e5b6bac533167f218e3610697dec0c3e4ca
|
[
"Apache-2.0"
] | 9
|
2019-01-25T11:09:17.000Z
|
2022-02-26T09:10:04.000Z
|
examples/chart-types/pie_chart.py
|
tcbegley/dash-google-charts
|
b8b22e5b6bac533167f218e3610697dec0c3e4ca
|
[
"Apache-2.0"
] | 1
|
2019-01-23T17:37:12.000Z
|
2019-01-23T17:37:12.000Z
|
import dash
from dash_google_charts import PieChart
app = dash.Dash()
app.layout = PieChart(
height="500px",
data=[
["Task", "Hours per Day"],
["Work", 11],
["Eat", 2],
["Commute", 2],
["Watch TV", 2],
["Sleep", 7],
],
options={"title": "My Daily Activities"},
)
if __name__ == "__main__":
app.run_server()
| 18
| 45
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 103
| 0.272487
|
bbe8dbb33350754634ad5a39bc45f35bec1cec43
| 4,183
|
py
|
Python
|
threats-monitoring/modules/thehive.py
|
filippostz/McAfee-MVISION-EDR-Integrations
|
0fbe1af15f844b796337ccd2ff219a0c4e625846
|
[
"Apache-2.0"
] | null | null | null |
threats-monitoring/modules/thehive.py
|
filippostz/McAfee-MVISION-EDR-Integrations
|
0fbe1af15f844b796337ccd2ff219a0c4e625846
|
[
"Apache-2.0"
] | null | null | null |
threats-monitoring/modules/thehive.py
|
filippostz/McAfee-MVISION-EDR-Integrations
|
0fbe1af15f844b796337ccd2ff219a0c4e625846
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Written by mohlcyber v.0.1 (15.04.2020)
# Edited by filippostz v.0.2 (24.09.2021)
import random
import sys
import socket
import requests
import json
import re
import smtplib
from datetime import datetime
from urllib.parse import urljoin
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
#Used for comments in Cases and Email
EDR_URL = 'https://ui.soc.eu-central-1.mcafee.com/monitoring/'
class TheHive():
def __init__(self, kwargs):
self.base_url = kwargs.get('url')
self.port = kwargs.get('port')
self.session = requests.Session()
self.verify = False
token = kwargs.get('token')
self.headers = {'Authorization': 'Bearer {0}'.format(token),
'Content-Type': 'application/json'}
self.artifacts = []
def create(self, event, eventType = "case"):
if eventType != "case" and eventType != "alert":
return 1
else:
try:
name = str(event['name'])
edr_severity = str(event['severity'])
if edr_severity == 's4' or edr_severity == 's5':
severity = 3
elif edr_severity == 's2' or edr_severity == 's3':
severity = 2
else:
severity = 1
self.artifacts.append(event['hashes']['md5'])
self.artifacts.append(event['hashes']['sha1'])
self.artifacts.append(event['hashes']['sha256'])
payload = {
'title': 'MVISION EDR Threat Detection - {0}'.format(name),
'description': 'This case has been created by MVISION EDR',
'severity': severity,
'type':'Detection',
'source':'edr',
'sourceRef':'ref-' + str(random.randint(10000, 99000)),
'tlp': 3,
'tags': ['edr', 'threat']
}
print('{0}:{1}/thehive/api/{2}'.format(self.base_url, self.port, eventType))
res = self.session.post('{0}:{1}/thehive/api/{2}'.format(self.base_url, self.port, eventType),
headers=self.headers, data=json.dumps(payload), verify=self.verify)
if res.ok:
print('SUCCESS: Successfully created case in TheHive - {0}.'.format(str(self.base_url)))
eventId = res.json()['id']
for artifact in self.artifacts:
self.add_observable(eventId, eventType, artifact)
else:
print('ERROR: HTTP {0} - {1}'.format(str(res.status_code), res.content))
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
print("ERROR: SNOW Error in {location}.{funct_name}() - line {line_no} : {error}"
.format(location=__name__, funct_name=sys._getframe().f_code.co_name, line_no=exc_tb.tb_lineno,
error=str(e)))
def add_observable(self, eventId, eventType, artifact):
try:
payload = {
'dataType': 'hash',
'data': artifact,
'ioc': True,
'tlp': 3,
'tags': ['edr', 'threat'],
'message': 'MVISION EDR Threat Detection'
}
print('{0}:{1}/thehive/api/{2}/{3}/artifact'.format(self.base_url, self.port, eventType, str(eventId)))
self.session.post('{0}:{1}/thehive/api/{2}/{3}/artifact'.format(self.base_url, self.port, eventType, str(eventId)),
headers=self.headers, data=json.dumps(payload), verify=self.verify)
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
print("ERROR: SNOW Error in {location}.{funct_name}() - line {line_no} : {error}"
.format(location=__name__, funct_name=sys._getframe().f_code.co_name, line_no=exc_tb.tb_lineno,
error=str(e)))
def run(self, event):
self.create(event)
| 41.83
| 127
| 0.531198
| 3,731
| 0.891944
| 0
| 0
| 0
| 0
| 0
| 0
| 991
| 0.236911
|
bbe9079acec9fc7d47c390d5c89d9f262c9f1f50
| 518
|
py
|
Python
|
src/config.py
|
psu-os-rss/Rock-Paper-and-Scissors
|
05e9f51978cae1f05c9f06a71d9822ccfedbc5e1
|
[
"MIT"
] | null | null | null |
src/config.py
|
psu-os-rss/Rock-Paper-and-Scissors
|
05e9f51978cae1f05c9f06a71d9822ccfedbc5e1
|
[
"MIT"
] | 6
|
2020-08-03T20:55:44.000Z
|
2020-08-13T22:03:13.000Z
|
src/config.py
|
psu-os-rss/Rock-Paper-and-Scissors
|
05e9f51978cae1f05c9f06a71d9822ccfedbc5e1
|
[
"MIT"
] | null | null | null |
#parameters
accumulated_weight = 0.5
detector_u = 50
detector_b = 350
detector_r = 300
detector_l = 600
message_x = 10
message_y = 400
date_x = 0
date_y = 450
threshold_min=22
rate = 0.8
RGB_INT_MAX = 255
RGB_INT_MIN = 0
RGB_FLT_MAX = 255.0
RGB_FLT_MIN = 0.0
Blur_value = 7
text_color = (200,50,150)
rectangle_color = (0,0,255)
rectangle_thickness = 5
processing_frame = 35
font_scale = 0.7
thickness = 2
cv2adaptive_block = 11
cv2adaptive_param = 2
erodtime = 1
dilatetime = 2
circle_thickness = 10
circle_rate = 0.25
| 17.862069
| 27
| 0.758687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 11
| 0.021236
|
bbe94404de84755169d02669d387f24583e7d3f0
| 1,309
|
py
|
Python
|
ejercicios/arreglos/perimetro.py
|
leugimkm/Soluciones
|
d71601c8d9b5e86e926f48d9e49462af8a956b6d
|
[
"MIT"
] | 1
|
2022-02-02T04:44:56.000Z
|
2022-02-02T04:44:56.000Z
|
ejercicios/arreglos/perimetro.py
|
leugimkm/Soluciones
|
d71601c8d9b5e86e926f48d9e49462af8a956b6d
|
[
"MIT"
] | null | null | null |
ejercicios/arreglos/perimetro.py
|
leugimkm/Soluciones
|
d71601c8d9b5e86e926f48d9e49462af8a956b6d
|
[
"MIT"
] | null | null | null |
"""AyudaEnPython: https://www.facebook.com/groups/ayudapython
Genere una matriz de 25 x 40 con números decimales al azar entre 0 y 1.
Mostrar los numeros del perimetro y calcularlo.
"""
from random import random
from prototools import show_matrix
def solver_a():
"""
>>> solver_a()
[1, 2, 3, 4, 5, 16, 17, 18, 19, 20, 6, 11, 10, 15]
147
"""
#arr = [[random() for _ in range(40)] for _ in range(25)]
arr = [
[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15],
[16, 17, 18, 19, 20],
]
r_arr = list(map(list, zip(*arr)))
perimetro = [*arr[0], *arr[-1], *r_arr[0][1:-1], *r_arr[-1][1:-1]]
print(perimetro)
print(sum(perimetro))
def solver_b():
arr = [
[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15],
[16, 17, 18, 19, 20],
]
r_arr = list(map(list, zip(*arr)))
perimetro = [*arr[0], *arr[-1], *r_arr[0][1:-1], *r_arr[-1][1:-1]]
t = [[0 for _ in range(5)] for _ in range(4)]
t[0] = arr[0]
t[-1] = arr[-1]
for i in range(1, len(t[0]) - 1):
t[i][0] = r_arr[0][i]
t[i][-1] = r_arr[-1][i]
show_matrix(t)
print(sum(perimetro))
if __name__ == "__main__":
import doctest
doctest.testmod()
# solver_a()
# solver_b()
| 23.375
| 71
| 0.50573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 371
| 0.283206
|
bbec33adffc946520b52f5157928147efe6188be
| 5,941
|
py
|
Python
|
HIS_void/rbac/admin.py
|
YuanchenZhu2020/HIS_void
|
7289bf537e9fc4b09750bbca76a4cc8354dc770f
|
[
"MIT"
] | null | null | null |
HIS_void/rbac/admin.py
|
YuanchenZhu2020/HIS_void
|
7289bf537e9fc4b09750bbca76a4cc8354dc770f
|
[
"MIT"
] | null | null | null |
HIS_void/rbac/admin.py
|
YuanchenZhu2020/HIS_void
|
7289bf537e9fc4b09750bbca76a4cc8354dc770f
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import gettext_lazy as _
from .models import (
ObjectPermission, URLPermission, UserGroup, UserInfo, Role, LoginLog
)
class UserCreationForm(forms.ModelForm):
"""
Admin 中,创建新用户所需要的全部字段。包括密码重复输入字段。
"""
password1 = forms.CharField(label = _("用户密码"), widget = forms.PasswordInput)
password2 = forms.CharField(label = _("确认密码"), widget = forms.PasswordInput)
class Meta:
model = UserInfo
fields = (
"username", "password",
"roles", "groups", "url_permissions", "obj_permissions",
"is_active", "is_admin", "is_superuser",
)
def clean_password2(self):
# 检查前后输入的密码是否相同
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(_("前后输入的密码不匹配"))
return password2
def save(self, commit = True):
"""
使用 hasher 处理第一次输入的密码并储存
@commit: 是否提交到数据库
"""
user = super().save(commit = False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
"""
Admin 中,更新用户资料,包含用户模型的全部字段。密码显示哈希加密参数与部分密文
"""
password = ReadOnlyPasswordHashField(label = _("用户密码哈希"))
class Meta:
model = UserInfo
fields = (
"username", "password",
"roles", "groups", "url_permissions", "obj_permissions",
"is_active", "is_admin", "is_superuser",
)
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class UserAdmin(BaseUserAdmin):
# form: 用户对象更改
# add_form: 用户对象新增
form = UserChangeForm
add_form = UserCreationForm
list_display = ("username", "is_admin", "create_time", "last_login")
list_filter = ("is_admin", "create_time")
fieldsets = (
(None, {"fields": ("username", "password")}),
(_("角色"), {"fields": ("roles", )}),
(_("用户组"), {"fields": ("groups", )}),
(_("直接权限"), {"fields": ("url_permissions", "obj_permissions", )}),
(_("账号状态"), {"fields": ("is_active", "is_admin", "is_superuser")}),
)
# UserAdmin 使用方法 get_fieldsets,通过 add_fieldsets 定义创建对象时所用的字段。
add_fieldsets = (
(None, {
"classes": ("wide",),
"fields": (
"username", "password1", "password2",
),
}),
(_("角色"), {
"classes": ("wide", ),
"fields": ("roles", ),
}),
(_("用户组"), {
"classes": ("wide",),
"fields": ("groups", ),
}),
(_("直接权限"), {
"classes": ("wide",),
"fields": ("url_permissions", "obj_permissions", )
}),
(_("账号状态"), {
"classes": ("wide",),
"fields": (
"is_active", "is_admin", "is_superuser"
)
}),
)
search_fields = ("username", "is_admin")
ordering = ("username",)
filter_horizontal = ()
admin.site.register(UserInfo, UserAdmin)
admin.site.unregister(Group)
class UserGroupAdmin(admin.ModelAdmin):
list_display = (
"ug_id", "name",
)
list_filter = ("ug_id", "name")
search_fields = ("ug_id", "name")
fieldsets = (
(None, {"fields": ("ug_id", "name")}),
(_("直接权限"), {"fields": ("url_permissions", "obj_permissions")}),
(_("角色"), {"fields": ("roles", )}),
)
add_fieldsets = (
(None, {"classes": ("wide",), "fields": ("ug_id", "name", ),}),
(_("直接权限"), {"classes": ("wide",), "fields": ("url_permissions", "obj_permissions"),}),
(_("角色"), {"fields": ("roles", )}),
)
admin.site.register(UserGroup, UserGroupAdmin)
class URLPermissionAdmin(admin.ModelAdmin):
list_display = (
"name", "url", "codename", "create_time",
)
list_filter = ("name", "codename", "create_time", )
search_fields = ("name", "codename", "url")
admin.site.register(URLPermission, URLPermissionAdmin)
class ObjectPermissionAdmin(admin.ModelAdmin):
list_display = (
"name", "permission", "object_id", "create_time",
)
list_filter = ("name", "permission", "object_id", "create_time",)
search_fields = ("name", "permission", "object_id", )
admin.site.register(ObjectPermission, ObjectPermissionAdmin)
class RoleAdmin(admin.ModelAdmin):
list_display = (
"title", "description", "create_time"
)
list_filter = ("title", "create_time")
search_fields = ("title", "create_time")
fieldsets = (
(None, {"fields": ("title", "description")}),
(_("直接权限"), {"fields": ("url_permissions", "obj_permissions")}),
)
add_fieldsets = (
(None, {"classes": ("wide",), "fields": ("title", "description", ),}),
(_("直接权限"), {"classes": ("wide",), "fields": ("url_permissions", "obj_permissions"),}),
)
admin.site.register(Role, RoleAdmin)
class LoginLogAdmin(admin.ModelAdmin):
list_display = (
"id", "user", "login_time", "ip_address"
)
list_filter = ("user", "login_time", "ip_address")
search_fields = ("user", "login_time", "ip_address")
admin.site.register(LoginLog, LoginLogAdmin)
| 32.113514
| 96
| 0.56068
| 5,566
| 0.877503
| 0
| 0
| 0
| 0
| 0
| 0
| 2,465
| 0.388617
|
bbec39b874803a9ced574ab89af24276b12b55c2
| 4,698
|
py
|
Python
|
process.py
|
bisi-dev/wa-analytics
|
a657fd793a59fa551d5755877c4e6c814bc3d17c
|
[
"Apache-2.0"
] | 1
|
2022-01-09T21:57:56.000Z
|
2022-01-09T21:57:56.000Z
|
process.py
|
bisi-dev/wa-analytics
|
a657fd793a59fa551d5755877c4e6c814bc3d17c
|
[
"Apache-2.0"
] | null | null | null |
process.py
|
bisi-dev/wa-analytics
|
a657fd793a59fa551d5755877c4e6c814bc3d17c
|
[
"Apache-2.0"
] | null | null | null |
# import modules
import os
import re
import glob
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from wordcloud import WordCloud
class Analyse:
# Data Cleaning Function
def raw_to_df(self, file, key):
global df
# Time formatting
split_formats = {
"12hr": "\d{1,2}/\d{1,2}/\d{2,4},\s\d{1,2}:\d{2}\s[APap][mM]\s-\s",
"24hr": "\d{1,2}/\d{1,2}/\d{2,4},\s\d{1,2}:\d{2}\s-\s",
"custom": "",
}
datetime_formats = {
"12hr": "%m/%d/%y, %I:%M %p - ",
"24hr": "%m/%d/%y, %H:%M - ",
"custom": "",
}
with open(file, "r", encoding="utf8") as raw_data:
# Converting the list split by newline char as one whole string
# As there can be multi-line messages
raw_string = " ".join(raw_data.read().split("\n"))
# Splits at all the date-time pattern,
# resulting in list of all the messages with user names
user_msg = re.split(split_formats[key], raw_string)[1:]
# Finds all the date-time patterns
date_time = re.findall(split_formats[key], raw_string)
# Export it to a df
df = pd.DataFrame({"date_time": date_time, "user_msg": user_msg})
# Converting date-time pattern which is of type String to datetime,
# Format is to be specified for the whole string
# where the placeholders are extracted by the method
df["date_time"] = pd.to_datetime(
df["date_time"], format=datetime_formats[key]
)
# Split user and msg
usernames = []
msgs = []
for i in df["user_msg"]:
# Lazy pattern match to first {user_name}
# pattern and splitting each msg from a user
a = re.split("([\w\W]+?):\s", i)
# User typed messages
if a[1:]:
usernames.append(a[1])
msgs.append(a[2])
# Other notifications in the group(someone was added, some left...)
else:
usernames.append("grp_notif")
msgs.append(a[0])
# Creating new columns
df["user"] = usernames
df["msg"] = msgs
# Dropping the old user_msg col.
df.drop("user_msg", axis=1, inplace=True)
# Group Notifications
grp_notif = df[df["user"] == "grp_notif"]
# Media
# no. of images, images are represented by <media omitted>
media = df[df["msg"] == "<Media omitted> "]
# removing images
df.drop(media.index, inplace=True)
# removing grp_notif
df.drop(grp_notif.index, inplace=True)
# Reset Index
df.reset_index(inplace=True, drop=True)
return df
# Function to get total sum of messages in chat.
def messages_count(self):
return df.shape[0] - 1
# Function to get total sum of people with message frequency in chat.
def users_count(self):
msgs_per_user = df["user"].value_counts(sort=True)
df2 = msgs_per_user.to_frame()
df2.rename({"user": "FREQUENCY"}, axis=1, inplace=True)
return (df2.shape[0], df2)
# Function uses Wordcloud lib to create infographics on words used in chat.
def infographics(self):
# Version Control - Keep Directory Clean for repeated usage in server
# Check for Last Image file
list_of_files = glob.glob("./static/data/*.png")
latest_file = max(list_of_files, key=os.path.getctime)
# Get Filename without extension
basename, fileext = os.path.splitext(latest_file)
# Increase count
current = basename[14:]
v = re.findall("[0-9]+", current)
version = int(v[0])
version += 1
version = str(version)
# Rename it
current = re.sub("\d+", "", current)
current_file = current + version + fileext
# Delete Previous File
os.remove(latest_file)
# Comment out all previous code and switch to remove V.C
# current_file = "test.png"
comment_words = " "
for val in df.msg.values:
val = str(val)
tokens = val.split()
for i in range(len(tokens)):
tokens[i] = tokens[i].lower()
for words in tokens:
comment_words = comment_words + words + " "
wordcloud = WordCloud(
width=800, height=800, background_color="black", min_font_size=10
).generate(comment_words)
wordcloud.to_file("./static/data/" + current_file)
return current_file
| 29.923567
| 79
| 0.561303
| 4,521
| 0.962324
| 0
| 0
| 0
| 0
| 0
| 0
| 1,740
| 0.37037
|
bbece8b4d9743d75c14096162d201bda457080e8
| 2,857
|
py
|
Python
|
server/rdp.py
|
husmen/Trajectory-Mapping
|
215d5a2c58482b7ddb168a50dd02c59ba285c8bd
|
[
"MIT"
] | 2
|
2019-08-06T07:28:45.000Z
|
2020-05-31T14:41:40.000Z
|
server/rdp.py
|
husmen/Trajectory-Mapping
|
215d5a2c58482b7ddb168a50dd02c59ba285c8bd
|
[
"MIT"
] | null | null | null |
server/rdp.py
|
husmen/Trajectory-Mapping
|
215d5a2c58482b7ddb168a50dd02c59ba285c8bd
|
[
"MIT"
] | 1
|
2019-01-07T10:14:50.000Z
|
2019-01-07T10:14:50.000Z
|
#!/usr/bin/python3
"""
rdp
Python implementation of the Ramer-Douglas-Peucker algorithm.
"""
import sys
import numpy as np
#from math import sqrt
#from functools import partial
from math import radians, cos, sin, asin, sqrt
if sys.version_info[0] >= 3:
xrange = range
def pl_dist(point, start, end):
"""
Calculates the distance from ``point`` to the line given
by the points ``start`` and ``end``.
:param point: a point
:type point: numpy array
:param start: a point of the line
:type start: numpy array
:param end: another point of the line
:type end: numpy array
"""
if np.all(np.equal(start, end)):
return np.linalg.norm(point - start)
return np.divide(
np.abs(np.linalg.norm(np.cross(end - start, start - point))),
np.linalg.norm(end - start))
def rdp_rec(M, epsilon, dist=pl_dist):
"""
Simplifies a given array of points.
Recursive version.
:param M: an array
:type M: numpy array
:param epsilon: epsilon in the rdp algorithm
:type epsilon: float
:param dist: distance function
:type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pl_dist`
"""
dmax = 0.0
index = -1
for i in xrange(1, M.shape[0]):
d = dist(M[i], M[0], M[-1])
if d > dmax:
index = i
dmax = d
if dmax > epsilon:
r_1 = rdp_rec(M[:index + 1], epsilon, dist)
r_2 = rdp_rec(M[index:], epsilon, dist)
return np.vstack((r_1[:-1], r_2))
else:
return np.vstack((M[0], M[-1]))
def rdp(M, epsilon=0, dist=pl_dist):
"""
Simplifies a given array of points using the Ramer-Douglas-Peucker
algorithm.
Example:
>>> from rdp import rdp
>>> rdp([[1, 1], [2, 2], [3, 3], [4, 4]])
[[1, 1], [4, 4]]
This is a convenience wrapper around :func:`rdp.rdp_rec`
that detects if the input is a numpy array
in order to adapt the output accordingly. This means that
when it is called using a Python list as argument, a Python
list is returned, and in case of an invocation using a numpy
array, a NumPy array is returned.
Example:
>>> from rdp import rdp
>>> import numpy as np
>>> arr = np.array([1, 1, 2, 2, 3, 3, 4, 4]).reshape(4, 2)
>>> arr
array([[1, 1],
[2, 2],
[3, 3],
[4, 4]])
:param M: a series of points
:type M: numpy array with shape (n,d) where n is the number of points and d their dimension
:param epsilon: epsilon in the rdp algorithm
:type epsilon: float
:param dist: distance function
:type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pl_dist`
"""
if "numpy" in str(type(M)):
return rdp_rec(M, epsilon, dist)
return rdp_rec(np.array(M), epsilon, dist).tolist()
| 26.453704
| 95
| 0.60098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,879
| 0.657683
|
bbed6bb7a92a7f22be0a1cdd4bf174a34f7f4719
| 7,655
|
py
|
Python
|
tests/Monkeypatching/test_Api_monkeypatching_api_get_by_id.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
tests/Monkeypatching/test_Api_monkeypatching_api_get_by_id.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
tests/Monkeypatching/test_Api_monkeypatching_api_get_by_id.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
import unittest
import requests
from assertpy import assert_that
from requests.exceptions import Timeout
from unittest.mock import Mock, patch
from src.Api import Api
from src.todos import todos
class TestApiMonkeyPatch(unittest.TestCase):
@patch('src.Api.Api', autospec=True)
def test_method_api_get_by_id_raises_timeout(self, mock_class):
mock_id = Mock()
mock_id.return_value = 1
mock_class.api_get_by_id.side_effect = Timeout
with self.assertRaises(Timeout):
mock_class.api_get_by_id(mock_id)
def test_method_api_get_by_id_assert_that_called_once(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id.assert_called_once()
def test_method_api_get_by_id_assert_that_called(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id(mock_id2)
mock_api.api_get_by_id.assert_called()
def test_method_api_get_by_id_assert_that_not_called(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id.assert_not_called()
def test_method_api_get_by_id_assert_that_called_with_id_1(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id.assert_called_with(mock_id)
def test_method_api_get_by_id_assert_that_called_once_with_id_1(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id.assert_called_once_with(mock_id)
def test_method_api_get_by_id_assert_that_response_equal_to_expected_userId_1_id_1_completed_false(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"data": todos[todo_id - 1], "status_code": 200}
response = mock_api.api_get_by_id(todo_id)
expected_todo = {
"userId": 1,
"id": 1,
"title": "delectus aut autem",
"completed": False
}
assert_that(response["data"]).is_equal_to(expected_todo)
def test_method_api_get_by_id_assert_that_response_contains_all_keys_userId_id_title_completed(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"data": todos[todo_id - 1], "status_code": 200}
response = mock_api.api_get_by_id(todo_id)
assert_that(response["data"]).contains_key("userId", "id", "title", "completed")
def test_method_api_get_by_id_assert_that_response_has_status_code_200(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"data": todos[todo_id - 1], "status_code": 200}
response = mock_api.api_get_by_id(todo_id)
assert_that(response).has_status_code(200)
def test_method_api_get_by_id_assert_that_response_dont_have_status_code_200(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"status_code": 408}
response = mock_api.api_get_by_id(todo_id)
assert_that(response["status_code"]).is_not_equal_to(200)
def test_method_api_get_by_id_assert_that_not_called_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_not_called()
def test_method_api_get_by_id_assert_that_called_once_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_called_once()
def test_method_api_get_by_id_assert_that_called_with_id_1_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_called_with(mock_id)
def test_api_get_by_id_monkeypatch_called_once_with_id_1_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_called_once_with(mock_id)
def test_method_api_get_by_id_no_parameter_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
with self.assertRaises(TypeError):
mock_api.api_get_by_id()
def test_method_api_get_by_id_assert_that_response_returns_ValueError_when_called_with_id_0_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 0
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = ValueError
assert_that(mock_api.api_get_by_id).raises(ValueError).when_called_with(todo_id)
def test_method_api_get_by_id_assert_that_response_returns_ValueError_when_called_with_id_300_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 300
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = ValueError
assert_that(mock_api.api_get_by_id).raises(ValueError).when_called_with(todo_id)
def test_method_api_get_by_id_assert_that_response_returns_TypeError_when_called_with_id_not_int_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = "1"
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = TypeError
assert_that(mock_api.api_get_by_id).raises(TypeError).when_called_with(todo_id)
def test_method_api_get_by_id_assert_that_response_returns_AttributeError_when_called_with_id_None_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = None
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = AttributeError
assert_that(mock_api.api_get_by_id).raises(AttributeError).when_called_with(todo_id)
if __name__ == '__main__':
unittest.main()
| 44.248555
| 119
| 0.678903
| 7,407
| 0.967603
| 0
| 0
| 304
| 0.039713
| 0
| 0
| 487
| 0.063619
|
bbed960d66995a862622a4fef9dc7cece3c6c141
| 361
|
py
|
Python
|
spark_auto_mapper_fhir/generator/test_generator_get_types_for_codeable_concepts.py
|
imranq2/SparkAutoMapper.FHIR
|
dd23b218fb0097d1edc2f3e688e8d6d4d7278bd2
|
[
"Apache-2.0"
] | 1
|
2020-10-31T23:25:07.000Z
|
2020-10-31T23:25:07.000Z
|
spark_auto_mapper_fhir/generator/test_generator_get_types_for_codeable_concepts.py
|
icanbwell/SparkAutoMapper.FHIR
|
98f368e781b46523142c7cb513c670d659a93c9b
|
[
"Apache-2.0"
] | null | null | null |
spark_auto_mapper_fhir/generator/test_generator_get_types_for_codeable_concepts.py
|
icanbwell/SparkAutoMapper.FHIR
|
98f368e781b46523142c7cb513c670d659a93c9b
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
from spark_auto_mapper_fhir.generator.fhir_xml_schema_parser import (
FhirXmlSchemaParser,
FhirCodeableType,
)
def test_generator_get_types_for_codeable_concepts() -> None:
print("")
codeable_types: List[
FhirCodeableType
] = FhirXmlSchemaParser.get_types_for_codeable_concepts()
print(codeable_types)
| 24.066667
| 69
| 0.772853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2
| 0.00554
|
bbed9aef40b38f60b39c83a06b6bc7dda41d6fe6
| 3,112
|
py
|
Python
|
hard-gists/6572592/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 21
|
2019-07-08T08:26:45.000Z
|
2022-01-24T23:53:25.000Z
|
hard-gists/6572592/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 5
|
2019-06-15T14:47:47.000Z
|
2022-02-26T05:02:56.000Z
|
hard-gists/6572592/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 17
|
2019-05-16T03:50:34.000Z
|
2021-01-14T14:35:12.000Z
|
#!/usr/bin/env python
# Lastfm loved tracks to Google Music All Access playlist. As noted in the comments you do need the All Access subscription thing otherwise it will always find 0 songs.
#
# Written by Tim Hutt, tdhutt@gmail.com, based on this script:
#
# https://gist.github.com/oquno/3664731
#
# Today is the 15th of September 2013.
#
# Not really tested!
#
# Update on 20th of September 2016:
#
# * Changed login API to match gmusicapi (not tested at all)
#
# Instructions:
#
# 0. Install python and pip.
# 1. Download this to a file `lastfm_to_gmusic.py`
# 2. Make it executable: `chmod +x lastfm_to_gmusic.py`
# 3. Install `gmusicapi` using `pip`: `pip install gmusicapi`
# 4. Get a last.fm API key here: http://www.last.fm/api/account/create
# 5. Run it! `./lastfm_to_gmusic.py`.
#
# Troubleshooting:
#
# 1. It says "Login error": Go to your gmail and check that it didn't block any "suspicious logins".
# 2. It doesn't find any tracks: Update gmusicapi.
# 3. Something else: Email me. There's a small chance I'll reply.
#
#
import urllib, urllib2
import gmusicapi
from xml.etree.ElementTree import *
def main():
# Gather required info.
google_username = raw_input("Google username: ").strip()
google_password = raw_input("Google password: ")
lastfm_username = raw_input("Lastfm username: ").strip()
lastfm_key = raw_input("Lastfm API key: ").strip()
# Log in.
api = gmusicapi.Mobileclient()
if not api.login(google_username, google_password, gmusicapi.Mobileclient.FROM_MAC_ADDRESS):
print "Login error"
return
# Get loved tracks.
loved = []
page = 1
while True:
url = "http://ws.audioscrobbler.com/2.0/?method=user.getlovedtracks&user=%s&api_key=%s&page=%d" % \
(lastfm_username, lastfm_key, page)
print("Fetching: " + url)
tree = parse(urllib2.urlopen(url)).getroot()
tracks = tree.findall('lovedtracks/track')
for track in tracks:
title = track.find('name').text
artist = track.find('artist/name').text
loved.append((artist,title))
if len(tracks) < 50:
break
page += 1
print("Got " + str(len(loved)) + " loved tracks")
if len(loved) == 0:
print "Exiting"
return
# Creating new playlist
playlist_id = api.create_playlist("Loved tracks")
to_add = []
# Search for each song in all access.
# This is quite a dirty way to do it, and the gmusicapi seems to be a little out of date
# hence the catch-all. This found 529 out of the 787 loved songs I have which is not too bad.
for target in loved:
try:
res = api.search_all_access(target[0] + " " + target[1], max_results=1)
to_add.append(res["song_hits"][0]["track"]["nid"])
except:
pass
print("Got " + str(len(to_add)) + " songs so far out of " + str(len(loved)))
print("Adding " + str(len(to_add)) + " songs to playlist")
api.add_songs_to_playlist(playlist_id, to_add)
print("Done! I hope.")
if __name__ == '__main__':
main()
| 31.12
| 168
| 0.645566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,677
| 0.538882
|
bbee0d1262c642ad50187e5394e6ab5c37bd528f
| 5,560
|
py
|
Python
|
tests/algorithms/test_gail.py
|
sony/nnabla-rl
|
6a9a91ac5363b8611e0c9f736590729952a8d460
|
[
"Apache-2.0"
] | 75
|
2021-06-14T02:35:19.000Z
|
2022-03-23T04:30:24.000Z
|
tests/algorithms/test_gail.py
|
sony/nnabla-rl
|
6a9a91ac5363b8611e0c9f736590729952a8d460
|
[
"Apache-2.0"
] | 2
|
2021-12-17T08:46:54.000Z
|
2022-03-15T02:04:53.000Z
|
tests/algorithms/test_gail.py
|
sony/nnabla-rl
|
6a9a91ac5363b8611e0c9f736590729952a8d460
|
[
"Apache-2.0"
] | 3
|
2021-06-15T13:32:57.000Z
|
2022-03-25T16:53:14.000Z
|
# Copyright 2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import nnabla as nn
import nnabla_rl.algorithms as A
import nnabla_rl.environments as E
from nnabla_rl.replay_buffer import ReplayBuffer
class TestGAIL():
def setup_method(self):
nn.clear_parameters()
def _create_dummy_buffer(self, env, batch_size=5):
experiences = generate_dummy_experiences(env, batch_size)
dummy_buffer = ReplayBuffer()
dummy_buffer.append_all(experiences)
return dummy_buffer
def test_algorithm_name(self):
dummy_env = E.DummyContinuous()
dummy_buffer = self._create_dummy_buffer(dummy_env)
gail = A.GAIL(dummy_env, dummy_buffer)
assert gail.__name__ == 'GAIL'
def test_discrete_action_env_unsupported(self):
'''
Check that error occurs when training on discrete action env
'''
dummy_env = E.DummyDiscrete()
dummy_env = EpisodicEnv(dummy_env, min_episode_length=3)
dummy_buffer = self._create_dummy_buffer(dummy_env, batch_size=15)
config = A.GAILConfig()
with pytest.raises(Exception):
A.GAIL(dummy_env, dummy_buffer, config=config)
def test_run_online_training(self):
'''
Check that no error occurs when calling online training
'''
dummy_env = E.DummyContinuous()
dummy_env = EpisodicEnv(dummy_env, min_episode_length=3)
dummy_buffer = self._create_dummy_buffer(dummy_env, batch_size=15)
config = A.GAILConfig(num_steps_per_iteration=5,
pi_batch_size=5,
vf_batch_size=2,
discriminator_batch_size=2,
sigma_kl_divergence_constraint=10.0,
maximum_backtrack_numbers=50)
gail = A.GAIL(dummy_env, dummy_buffer, config=config)
gail.train_online(dummy_env, total_iterations=5)
def test_run_offline_training(self):
'''
Check that raising error when calling offline training
'''
dummy_env = E.DummyContinuous()
dummy_buffer = self._create_dummy_buffer(dummy_env)
gail = A.GAIL(dummy_env, dummy_buffer)
with pytest.raises(NotImplementedError):
gail.train_offline([], total_iterations=10)
def test_compute_eval_action(self):
dummy_env = E.DummyContinuous()
dummy_buffer = self._create_dummy_buffer(dummy_env)
gail = A.GAIL(dummy_env, dummy_buffer)
state = dummy_env.reset()
state = np.float32(state)
action = gail.compute_eval_action(state)
assert action.shape == dummy_env.action_space.shape
def test_parameter_range(self):
with pytest.raises(ValueError):
A.GAILConfig(gamma=-0.1)
with pytest.raises(ValueError):
A.GAILConfig(num_steps_per_iteration=-1)
with pytest.raises(ValueError):
A.GAILConfig(sigma_kl_divergence_constraint=-0.1)
with pytest.raises(ValueError):
A.GAILConfig(maximum_backtrack_numbers=-0.1)
with pytest.raises(ValueError):
A.GAILConfig(conjugate_gradient_damping=-0.1)
with pytest.raises(ValueError):
A.GAILConfig(conjugate_gradient_iterations=-5)
with pytest.raises(ValueError):
A.GAILConfig(vf_epochs=-5)
with pytest.raises(ValueError):
A.GAILConfig(vf_batch_size=-5)
with pytest.raises(ValueError):
A.GAILConfig(vf_learning_rate=-0.5)
with pytest.raises(ValueError):
A.GAILConfig(discriminator_learning_rate=-0.5)
with pytest.raises(ValueError):
A.GAILConfig(discriminator_batch_size=-5)
with pytest.raises(ValueError):
A.GAILConfig(policy_update_frequency=-5)
with pytest.raises(ValueError):
A.GAILConfig(discriminator_update_frequency=-5)
with pytest.raises(ValueError):
A.GAILConfig(adversary_entropy_coef=-0.5)
def test_latest_iteration_state(self):
'''
Check that latest iteration state has the keys and values we expected
'''
dummy_env = E.DummyContinuous()
dummy_buffer = self._create_dummy_buffer(dummy_env)
gail = A.GAIL(dummy_env, dummy_buffer)
gail._v_function_trainer_state = {'v_loss': 0.}
gail._discriminator_trainer_state = {'reward_loss': 1.}
latest_iteration_state = gail.latest_iteration_state
assert 'v_loss' in latest_iteration_state['scalar']
assert 'reward_loss' in latest_iteration_state['scalar']
assert latest_iteration_state['scalar']['v_loss'] == 0.
assert latest_iteration_state['scalar']['reward_loss'] == 1.
if __name__ == "__main__":
from testing_utils import EpisodicEnv, generate_dummy_experiences
pytest.main()
else:
from ..testing_utils import EpisodicEnv, generate_dummy_experiences
| 37.823129
| 77
| 0.673561
| 4,568
| 0.821583
| 0
| 0
| 0
| 0
| 0
| 0
| 1,053
| 0.189388
|
bbef40d1d77a7ea412c5b45aa8b16fa7be4ecbe1
| 23,124
|
py
|
Python
|
authkit/authenticate/__init__.py
|
bobrock/AuthKit
|
ba82501d9dff699be9eef33266aecd03d016cec2
|
[
"MIT"
] | null | null | null |
authkit/authenticate/__init__.py
|
bobrock/AuthKit
|
ba82501d9dff699be9eef33266aecd03d016cec2
|
[
"MIT"
] | null | null | null |
authkit/authenticate/__init__.py
|
bobrock/AuthKit
|
ba82501d9dff699be9eef33266aecd03d016cec2
|
[
"MIT"
] | 1
|
2020-06-24T19:20:13.000Z
|
2020-06-24T19:20:13.000Z
|
"""Authentication middleware
This module provides one piece of middleware named
``authkit.authenticate.middleware`` which is used to intercept responses with
a specified status code, present a user with a means of authenticating
themselves and handle the sign in process.
Each of the authentication methods supported by the middleware is described in
detail in the main AuthKit manual. The methods include:
* HTTP Basic (``basic``)
* HTTP Digest (``digest``)
* OpenID Passurl (``openid``)
* Form and Cookie (``form``)
* Forward (``forward``)
* Redirect (``redirect``)
The authenticate middleware can be configured directly or by means of a Paste
deploy config file as used by Pylons. It can be used directly like this:
.. code-block:: Python
from authkit.authenticate import middleware, test_app
from paste.httpserver import serve
import sys
app = middleware(
test_app,
enable = True,
method = 'passurl',
cookie_secret='some_secret',
)
serve(app, host='0.0.0.0', port=8000)
"""
import types
import warnings
import logging
import os
import os.path
from paste.util.import_string import eval_import
from multi import MultiHandler, status_checker
from pkg_resources import iter_entry_points, load_entry_point
from paste.deploy.converters import asbool
import paste.httpexceptions
import webob.exc
from authkit.authorize import authorize_request
from authkit.permissions import RemoteUser, no_authkit_users_in_environ, \
AuthKitConfigError
# Main middleware base classes
class AuthKitAuthHandler(object):
"""
The base class for all middleware responsible for handling
authentication and setting whatever needs to be set so that the
``AuthKitUserSetter`` middleware can set REMOTE_USER on subsequent
requests. ``AuthKitAuthHandler``s only get inserted into the
middleware stack if an appropriate status code (as set in the
``authkit.setup.intercept`` config option) is intercepted by the
authentication middleware.
"""
pass
class AuthKitUserSetter(object):
"""
The base class for all middleware responsible for attempting to set
REMOTE_USER on each request. The class is overridden by the induvidual
handlers.
"""
pass
# Setting up logging
log = logging.getLogger('authkit.authenticate')
def strip_base(conf, base):
result = {}
for key in conf.keys():
if key.startswith(base):
result[key[len(base):]] = conf[key]
return result
def swap_underscore(*confs):
results = []
for conf in confs:
result = {}
for k,v in conf.items():
result[k.replace('.','_')] = v
results.append(result)
return results
def valid_password(environ, username, password):
"""
A function which can be used with the ``basic`` and ``form`` authentication
methods to validate a username and passowrd.
This implementation is used by default if no other method is specified. It
checks the for an ``authkit.users`` object present in the ``environ``
dictionary under the ``authkit.users`` key and uses the information there
to validate the username and password.
In this implementation usernames are case insensitive and passwords are
case sensitive. The function returns ``True`` if the user ``username`` has
the password specified by ``password`` and returns ``False`` if the user
doesn't exist or the password is incorrect.
If you create and specify your own ``authkit.users`` object with the same
API, this method will also work correctly with your custom solution. See
the AuthKit manual for information on the user management api, how to
specify a different ``authkit.users`` object (say to read user information
from a file rather than have it specified directly) and for information on
how to create your own ``Users`` objects.
"""
log.debug("valid_password called. username: %s", username)
if not environ.has_key('authkit.users'):
raise no_authkit_users_in_environ
users = environ['authkit.users']
if not users.user_exists(username):
return False
elif users.user_has_password(username.lower(), password):
return True
return False
def digest_password(environ, realm, username):
"""
This is similar to ``valid_password()`` but is used with the ``digest``
authentication method and rather than checking a username and password and
returning ``True`` or ``False`` it takes the realm and username as input,
looks up the correct password and and returns a digest by calling the
``authkit.authenticate.digest.digest_password()`` function with the
parameters ``realm``, ``username`` and ``password`` respectively. The
digest returned is then compared with the one submitted by the browser.
As with ``valid_password()`` this method is designed to work with the user
management API so you can use it with ``authkit.users`` objects or your own
custom ``Users`` objects. Alternatively you can specify your own function
which can lookup the password in whichever way you prefer, perhaps from a
database or LDAP connection.
Only required if you intend to use HTTP digest authentication.
"""
log.debug(
"digest_password called. username: %s, realm: %s", username, realm
)
if not environ.has_key('authkit.users'):
raise no_authkit_users_in_environ
users = environ['authkit.users']
if users.user_exists(username):
password = users.user(username)['password']
return digest.digest_password(realm, username, password)
# After speaking to Clark Evans who wrote the origianl code, this is the
# correct thing:
return None
class AddUsersObjectToEnviron(object):
"""Simple middleware which adds a Users object to the environ."""
def __init__(self, app, key, value, *k, **p):
self.app = app
self.k = k
self.p = p
self.key = key
self.value = value
def __call__(self, environ, start_response):
p = {}
p.update(self.p)
p['environ'] = environ
environ[self.key] = self.value(*self.k, **p)
return self.app(environ, start_response)
def get_authenticate_function(app, authenticate_conf, format, prefix):
"""
Sets up the users object, adds the middleware to add the users object
to the environ and then returns authenticate methods to check a password
and a digest.
"""
function = None
users = None
if len(authenticate_conf) < 1:
raise AuthKitConfigError('Expected at least one authenticate key, not'
' %r'%authenticate_conf)
if authenticate_conf.keys() == ['function']:
function = authenticate_conf['function']
if isinstance(function, (str, unicode)):
function = eval_import(function)
else:
user_conf = strip_base(authenticate_conf, 'user.')
if not user_conf:
raise AuthKitConfigError('No authenticate function or users specified')
else:
if user_conf.has_key('encrypt'):
if format == 'digest':
raise AuthKitConfigError('Encryption cannot be used with '
'digest authentication because the server needs to '
'know the password to generate the digest, try basic '
'or form and cookie authentication instead')
enc_func = eval_import(user_conf['encrypt'])
secret = user_conf.get('encrypt.secret','')
def encrypt(password):
return enc_func(password, secret)
else:
encrypt = None
user_object = 'authkit.users.UsersFromString'
if 'type' in user_conf.keys():
user_object = user_conf['type']
if isinstance(user_object, (str, unicode)):
user_object = eval_import(user_object)
if not hasattr(user_object, "api_version"):
users = user_object(user_conf['data'], encrypt)
app = AddToEnviron(app, 'authkit.users', users)
log.debug("authkit.users added to environ")
elif user_object.api_version == 0.4:
app = AddUsersObjectToEnviron(
app,
'authkit.users',
user_object,
encrypt=encrypt,
data=user_conf.get('data'),
)
log.debug("Setting up authkit.users middleware")
else:
raise Exception(
'Unknown API version %s for user management API'%(
users.api_version,
)
)
if format == 'basic':
function = valid_password
log.debug("valid_password chosen %r", function)
elif format == 'digest':
log.debug("digest_password chosen %r", function)
function = digest_password
else:
raise Exception('Invalid format for authenticate function %r'
% format)
return app, function, users
def get_template(template_conf, prefix):
"""
Another utility method to reduce code duplication. This function parses a
template from one of the available template options:
``string``
The template as a string
``file``
A file containing the template
``obj``
A paste eval_import string or callable which returns a string
authkit.form.template.string =
authkit.form.template.file =
authkit.form.template.obj =
"""
template = None
if len(template_conf) != 1:
raise AuthKitConfigError('Expected one template entry, not %r' %
(', '.join(template_conf.keys())))
if template_conf.keys()[0] not in ['string', 'file', 'obj']:
raise AuthKitConfigError("Template option can only be 'string', 'file'"
" or 'obj'")
if template_conf.keys()[0] == 'string':
template = template_conf['string']
elif template_conf.keys()[0] == 'file':
if not os.path.exists(template_conf['file']):
raise AuthKitConfigError('No such file %r exists. It was specified'
' by config option %r' %
(template_conf['file'], prefix+'file'))
fp = open(template_conf['file'], 'r')
template = fp.read()
fp.close()
if not template:
raise AuthKitConfigError('No data in template file %s specified by'
' config option %r' %
(template_conf['file'], prefix+'file'))
elif template_conf.keys()[0] == 'obj':
template = eval_import(template_conf['obj'])
if not template:
raise AuthKitConfigError('No data in template obj %s specified by '
'config option %r' %
(template_conf['obj'], prefix+'obj'))
else:
raise AuthKitConfigError("Unknown option %r" %
(prefix+template_conf.keys()[0]))
if not template:
raise AuthKitConfigError("The template loaded did not contain any data")
if isinstance(template, (str, unicode)):
def render_template():
return template
return render_template
return template
#
# Main middleware creator
#
class AddToEnviron(object):
"""
Simple middleware which adds a key to the ``environ`` dictionary.
Used to add the ``authkit.users`` key to the environ when this is
appropriate.
"""
def __init__(self, app, key, object):
self.app = app
self.key = key
self.object = object
def __call__(self, environ, start_response):
environ[self.key] = self.object
return self.app(environ, start_response)
class AddDictToEnviron(object):
"""Simple middleware which adds the values of a dict to the environ."""
def __init__(self, app, dct):
self.app = app
self.dct = dct
def __call__(self, environ, start_response):
environ.update(self.dct)
return self.app(environ, start_response)
class RequireEnvironKey(object):
def __init__(self, app, key, missing_error=None):
self.app = app
self.key = key
self.missing_error = missing_error or \
'Missing the key %(key)s from the environ. Have you setup the ' \
'correct middleware?'
def __call__(self, environ, start_response):
if not environ.has_key(self.key):
raise Exception(self.missing_error%{'key':self.key})
return self.app(environ, start_response)
def get_methods():
"""Get a dictionary of the available method entry points."""
available_methods = {}
for method_handler in iter_entry_points(group='authkit.method', name=None):
available_methods[method_handler.name] = method_handler
return available_methods
def load_method(name, from_these=None):
if from_these:
return from_these[name].load()
else:
return load_entry_point('AuthKit','authkit.method',name)
def load_config(options, app_conf, prefix):
merged = strip_base(app_conf, prefix)
# Now override the auth_conf_options with the manaully specified options
for key, value in options.items():
if merged.has_key(key):
warnings.warn(
'Key %s with value %r set in the config file is being ' + \
'replaced with value %r set in the application'%(
key,
auth_conf_options[key],
value
)
)
merged[key.replace('_','.')] = value
return merged
class HTTPExceptionHandler(object):
"""
catches exceptions and turns them into proper HTTP responses
Attributes:
``warning_level``
This attribute determines for what exceptions a stack
trace is kept for lower level reporting; by default, it
only keeps stack trace for 5xx, HTTPServerError exceptions.
To keep a stack trace for 4xx, HTTPClientError exceptions,
set this to 400.
This middleware catches any exceptions (which are subclasses of
``HTTPException``) and turns them into proper HTTP responses.
Note if the headers have already been sent, the stack trace is
always maintained as this indicates a programming error.
Note that you must raise the exception before returning the
app_iter, and you cannot use this with generator apps that don't
raise an exception until after their app_iter is iterated over.
.. note::
This originally came from paste.httpexceptions.HTTPExceptionHandler
and is patched with comments below for compatibility with
webob + Python 2.4
"""
def __init__(self, application, warning_level=None):
assert not warning_level or ( warning_level > 99 and
warning_level < 600)
self.warning_level = warning_level or 500
self.application = application
def __call__(self, environ, start_response):
# Note that catching the webob exception is for Python 2.4 support.
# In the brave new world of new-style exceptions (derived from object)
# multiple inheritance works like you'd expect: the NotAuthenticatedError
# is caught because it descends from the past and webob exceptions.
# In the old world (2.4-), the webob exception needs to be in the catch list
environ['paste.httpexceptions'] = self
environ.setdefault('paste.expected_exceptions',
[]).extend([paste.httpexceptions.HTTPException,
webob.exc.HTTPException])
try:
return self.application(environ, start_response)
except (paste.httpexceptions.HTTPException,
webob.exc.HTTPException), exc:
return exc(environ, start_response)
def middleware(app, app_conf=None, global_conf=None, prefix='authkit.',
handle_httpexception=True, middleware=None, **options):
"""
This function sets up the AuthKit authenticate middleware and its use and
options are described in detail in the AuthKit manual.
The function takes the following arguments and returns a WSGI application
wrapped in the appropriate AuthKit authentication middleware based on the
options specified:
``app``
The WSGI application the authenticate middleware should wrap
``app_conf``
A paste deploy ``app_conf`` dictionary to be used to setup the
middleware
``global_conf``
A paste deploy ``global_conf`` dictionary
``prefix``
The prefix which all authkit related options in the config file will
have prefixed to their names. This defaults to ``authkit.`` and
shouldn't normally need overriding.
``middleware``
A make_middleware function which should be called directly instead of
loading and calling a function based on the method name. If this is
set then ``authkit.setup.methof`` should not be set.
``**options``
Any AuthKit options which are setup directly in Python code. If
specified, these options will override any options specifed in a config
file.
All option names specified in the config file will have their prefix
removed and any ``.`` characters replaced by ``_`` before the options
specified by ``options`` are merged in. This means that the the option
``authkit.cookie.name`` specified in a config file sets the same options as
``cookie_name`` specified directly as an option.
"""
if handle_httpexception:
app = HTTPExceptionHandler(app)
# Configure the config files
if global_conf is None:
global_conf = {}
if app_conf is None:
app_conf = {}
if not isinstance(app_conf, dict):
raise AuthKitConfigError(
"Expected app_conf to be paste deploy app_conf dictionary "
"from not %r" % app_conf
)
# Merge config file and options
available_methods = get_methods()
all_conf = load_config(options, app_conf, prefix)
if middleware is not None and all_conf.has_key('setup.method'):
raise AuthKitConfigError(
'You cannot specify a middleware function '
'and an authkit.setup.method'
)
if not middleware and not all_conf.has_key('setup.method'):
raise AuthKitConfigError('No authkit.setup.method was specified')
# Add the configuration to the environment
enable_ = asbool(all_conf.get('setup.enable', True))
all_conf['setup.enable'] = enable_
app = AddToEnviron(app, 'authkit.config', all_conf)
if all_conf.has_key('setup.fakeuser'):
app = AddToEnviron(app, 'REMOTE_USER', all_conf['setup.fakeuser'])
# Check to see if middleware is disabled
if enable_ == False:
warnings.warn("AuthKit middleware has been turned off by the config "
"option authkit.setup.enable")
return app
# Status Checking/Changing Middleware
intercept = [str(x).strip() for x in \
all_conf.get('setup.intercept','401').split(',')]
if not '401' in intercept:
warnings.warn(
"AuthKit is configured via the authkit.setup.intercept option not "
"to intercept 401 responses so the authentication middleware will "
"not be triggered even if a 401 Unauthenticated response is "
"returned.")
if middleware:
prefix_ = prefix
app = middleware(
app,
auth_conf=all_conf,
app_conf=app_conf,
global_conf=global_conf,
prefix=prefix_,
)
else:
methods = [method.strip() for method in all_conf['setup.method'].split(',')]
log.debug("Trying to load the following methods: %r", methods)
for method in methods:
if method in ['setup','config']:
raise AuthKitConfigError("The name %s is reserved cannot be used "
"as a method name" % method)
if not available_methods.has_key(method):
raise AuthKitConfigError(
'The authkit method %r is not available. The available methods '
'are %s and %s'%(
all_conf['setup.method'],
', '.join(available_methods.keys()[:-1]),
available_methods.keys()[-1],
)
)
prefix_ = prefix+method+'.'
auth_conf = strip_base(all_conf, method+'.')
app = available_methods[method].load()(
app,
auth_conf=auth_conf,
app_conf=app_conf,
global_conf=global_conf,
prefix=prefix_,
)
app = AddDictToEnviron(
app,
{
'authkit.config':strip_base(all_conf, 'config.'),
'authkit.intercept':intercept,
'authkit.authenticate': True,
}
)
return app
def sample_app(environ, start_response):
"""
A sample WSGI application that returns a 401 status code when the path
``/private`` is entered, triggering the authenticate middleware to
prompt the user to sign in.
If used with the authenticate middleware's form method, the path
``/signout`` will display a signed out message if
``authkit.cookie.signout = /signout`` is specified in the config file.
If used with the authenticate middleware's forward method, the path
``/signin`` should be used to display the sign in form.
The path ``/`` always displays the environment.
"""
if environ['PATH_INFO']=='/private':
authorize_request(environ, RemoteUser())
if environ['PATH_INFO'] == '/signout':
start_response('200 OK', [('Content-type', 'text/plain; charset=UTF-8')])
if environ.has_key('REMOTE_USER'):
return ["Signed Out"]
else:
return ["Not signed in"]
elif environ['PATH_INFO'] == '/signin':
start_response('200 OK', [('Content-type', 'text/plain; charset=UTF-8')])
return ["Your application would display a \nsign in form here."]
else:
start_response('200 OK', [('Content-type', 'text/plain; charset=UTF-8')])
result = ['You Have Access To This Page.\n\nHere is the environment...\n\n']
for k,v in environ.items():
result.append('%s: %s\n'%(k,v))
return result
| 38.604341
| 84
| 0.625195
| 4,866
| 0.210431
| 0
| 0
| 0
| 0
| 0
| 0
| 11,706
| 0.506227
|
bbefdf91c1e6ecf066af1879e3918f12b778aa84
| 11,398
|
py
|
Python
|
options_chain_pull.py
|
anupamsharma01/python
|
f415aa663c9e83ff8ab615da93a5a71ec877834b
|
[
"blessing"
] | 2
|
2020-12-25T22:30:52.000Z
|
2021-11-26T14:08:12.000Z
|
options_chain_pull.py
|
anupamsharma01/python_options_trade
|
f415aa663c9e83ff8ab615da93a5a71ec877834b
|
[
"blessing"
] | null | null | null |
options_chain_pull.py
|
anupamsharma01/python_options_trade
|
f415aa663c9e83ff8ab615da93a5a71ec877834b
|
[
"blessing"
] | 3
|
2020-04-10T15:00:10.000Z
|
2021-08-19T21:20:19.000Z
|
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import parse_qs
import requests
import ssl
import sys
import tdameritrade.auth #added as40183
import urllib
import urllib3 #as40183
from sys import argv
import pymysql.cursors
import datetime
import dateutil.relativedelta
import calendar
import time
import json
import ast
import pandas
import sqlite3
import string
import xlwt
import openpyxl
KEY = 'STOCKTIPS'
# Arguements
in_file = r'C:\Anupam\market\stock_options_api-master\trading_api\tdameritrade\my_programs\data\program_in.txt'
out_file=r'C:\Anupam\market\stock_options_api-master\trading_api\tdameritrade\my_programs\data\program_out.txt'
script='C:/Anupam/market/stock_options_api-master/trading_api/tdameritrade/my_programs/options_chain_pull.py'
debug = 'true'
f_in = open(in_file)
equity_list = f_in.readlines()
equity_list = [l.replace('\n','') for l in equity_list]
f_out = open(out_file,'w')
print ('EQUITY | CMP | 52WkRange', file=f_out)
#sqlite3 connection
connection = sqlite3.connect('C:\Anupam\Technical\sqlite\db\mydb.db')
cursor = connection.cursor()
create_sql = """CREATE TABLE IF NOT EXISTS chain (
equity text NOT NULL,
symbol text NOT NULL,
cmp real NOT NULL, --added from stocks
_52WkRange text NOT NULL, --added from stocks
strikePrice real NOT NULL,
last real NOT NULL,
bid real NOT NULL,
ask real NOT NULL,
bidSize real NOT NULL,
askSize real NOT NULL,
totalVolume real NOT NULL,
volatility real NOT NULL,
putCall text NOT NULL,
inTheMoney text NOT NULL,
daysToExpiration int NOT NULL,
timeValue real NOT NULL,
theoreticalVolatility real NOT NULL
);"""
drop_sql = "DROP TABLE CHAIN"
select_sql = "SELECT * FROM CHAIN"
delete_sql = "DELETE FROM CHAIN"
if (debug == 'true'):
print ('create_sql==',create_sql)
print ('delete_sql==',delete_sql)
#cursor.execute(drop_sql)
cursor.execute(create_sql)
cursor.execute(delete_sql)
connection.commit()
cursor.execute(select_sql)
row=cursor.fetchall()
print (row)
# Declare
#start = datetime.now()
args_list = []
count = str(250)
myFormat = "%Y-%m-%d"
today = datetime.date.today()
rd = dateutil.relativedelta.relativedelta(days=1, weekday=dateutil.relativedelta.FR)
next_friday = today + rd
if (debug == 'true'):
print ('today=',today)
print('next_friday=',str(next_friday))
#debug: Remove comment to use expiration of a future date
next_friday=today+datetime.timedelta(days=17)
print('next_friday=', str(next_friday))
#debug starts
#equity='AAPL'
count=40
start_date=next_friday
#active_day variables start - syncup from excel_pull
#CUSTOMIZATION BLOCK starts
debug='false'
skip_days=0 #set to 0 if placing order today; update to 1 if need for tomorrow+day-after-tomorrow
#CUSTOMIZATION BLOCK ends
curr_date = datetime.date.today() + datetime.timedelta(days=skip_days)
if (curr_date.isoweekday() == 6):
curr_date = curr_date + datetime.timedelta(days=2)
elif (curr_date.isoweekday() == 7):
curr_date = curr_date + datetime.timedelta(days=1)
if curr_date.isoweekday() in set((5, 6)):
next_date = curr_date + datetime.timedelta(days=8 - curr_date.isoweekday())
else:
next_date = curr_date + datetime.timedelta(days=1)
print (curr_date, calendar.day_name[curr_date.weekday()], curr_date.isoweekday())
print (next_date, calendar.day_name[next_date.weekday()], next_date.isoweekday())
active_day_today = calendar.day_name[curr_date.weekday()]
active_day_tomorrow = calendar.day_name[next_date.weekday()]
print (active_day_today, active_day_tomorrow)
#active_day variables end
# for NEXT WEEK FRIDAY DEBUG only
#next_friday = next_friday + datetime.timedelta(days=7)
#start_date = start_date + datetime.timedelta(days=7)
#print("NEXT WEEK next_friday-start_date", next_friday, start_date)
# END OF NEXT WEEK DEBUG
for equity in equity_list:
#EQUITY STOCK CODE
time.sleep(1.01)
equity, mkt_time = equity.split(",")
equity = equity.strip()
print('equity=', equity)
start_equity = datetime.datetime.now()
url = 'https://api.tdameritrade.com/v1/marketdata/'+equity+'/quotes?apikey='+KEY
#url1 = 'https://api.tdameritrade.com/v1/marketdata/AAPL/quotes?apikey=STOCKTIPS'
r = requests.get(url)
payload = r.json()
if (debug=='true'):
print(url)
print ('r=',r)
print ('r.text=',r.text)
print ('payload=',payload)
equity = payload[equity]['symbol']
cmp = payload[equity]['regularMarketLastPrice'] #lastPrice
_52WkLow = round(payload[equity]['52WkLow'])
_52WkHigh = round(payload[equity]['52WkHigh'])
if (debug=='true'):
print ('equity=',equity)
print ('cmp=',cmp)
print ('EQUITY | CMP | 52WkRange', file=f_out)
print (equity, '|', cmp, '|', _52WkLow, '-', _52WkHigh, file=f_out)
#OPTION CHAIN CODE
url = 'https://api.tdameritrade.com/v1/marketdata/chains?apikey=' + KEY + \
'&symbol=' + equity + '&contractType=' + 'PUT' + '&range=OTM' + '&fromDate=' + \
str(start_date) + '&toDate=' + str(next_friday) + '&strikeCount=' + str(count) # + '&strike<170.0'
r = requests.get(url) # <Response [200]>
payload = r.json()
if (debug == 'true'):
print('URL==', url)
print(r.text)
print(payload)
symbol = payload['symbol']
# Get Puts
for keyy, valuee in payload["putExpDateMap"].items():
d = datetime.datetime.strptime(keyy, "%Y-%m-%d:%f")
ex_date = d.strftime(myFormat)
for key, value in valuee.items():
for v in value:
args = [ v['symbol'], payload["symbol"], v['strikePrice'], v['last'], v['bid'], v['ask'], v['bidSize'], v['askSize'], v['totalVolume'], v['volatility'], v['putCall'], ex_date, v['inTheMoney'], v['daysToExpiration'], v['timeValue'], v['theoreticalVolatility'] ]
if (debug == 'true'):
print (v['strikePrice'] ,'CMP=', float(cmp))
if (v['strikePrice'] < float(cmp)):
args_list.append(args)
if (debug == 'true'):
print ('args_list=',args_list)
insert_sql = "INSERT INTO CHAIN (" \
+ " equity, symbol, cmp, _52WkRange, strikePrice, last, bid, ask, bidSize, askSize, totalVolume, volatility, putCall, inTheMoney, daysToExpiration, timeValue, theoreticalVolatility " \
+ ") values ('" \
+ payload['symbol'] + "','" \
+ v['symbol'] + "'," \
+ str(cmp) + "," \
+ str("'" + str(_52WkLow) + "-" + str(_52WkHigh)) + "'" + "," \
+ str(v['strikePrice']) + "," \
+ str(v['last']) + "," \
+ str(v['bid']) + "," \
+ str(v['ask']) + "," \
+ str(v['bidSize']) + "," \
+ str(v['askSize']) + "," \
+ str(v['totalVolume']) + "," \
+ str(v['volatility']) + ",'" \
+ str(v['putCall']) + "','" \
+ str(v['inTheMoney']) + "'," \
+ str(v['daysToExpiration']) + "," \
+ str(v['timeValue']) + "," \
+ str(v['theoreticalVolatility']) \
+ ")"
if (debug == 'true'):
print ('insert_sql==',insert_sql)
cursor.execute(insert_sql)
connection.commit()
# FINAL RESULT SQLs
wbkName_out = r'C:\Anupam\market\consolidated_excel_data.xlsx'
wbk_out = openpyxl.load_workbook(wbkName_out)
wks_out = wbk_out[active_day_today+'-'+active_day_tomorrow]
#WRITE OUTPUT TO EXCEL
select_sql1 = "select distinct equity, market_time from chain order by equity;"
print ('select_sql1=',select_sql1)
print ("-----------------", file=f_out)
select_sql2 = "select distinct equity, cmp, _52WkRange from chain order by equity;"
print ('select_sql2=',select_sql2)
cursor.execute(select_sql2)
rows = cursor.fetchall()
idx=2
#wks_out.cell(row=1, column=3).value = " ".join(["EQUITY" , " | " , "CMP" , "|" , "52WkRange"])
wks_out.cell(row=1, column=2).value = "52WkRange"
wks_out.cell(row=1, column=5).value = "CMP"
for row in rows:
if (debug == 'true'):
print('select_sql2:', row[0], "|" ,row[1], "|", row[2])
#wks_out.cell(row=idx, column=3).value = " ".join([str(row[0]) , "|" , str(row[1]) , "|" , str(row[2])])
wks_out.cell(row=idx, column=2).value = str(row[2])
wks_out.cell(row=idx, column=5).value = row[1]
idx+= 1
if (debug == 'true'):
print ('select_sql2:idx=',idx)
print ("-----------------", file=f_out)
select_sql3 = "select equity, strikeprice, bid, round(bid*100/strikeprice,2) prem_per from chain " + \
"where equity||strikeprice in (select equity||max(strikeprice) from chain group by equity) order by equity;"
print ('select_sql3=',select_sql3)
cursor.execute(select_sql3)
rows = cursor.fetchall()
idx=2
#wks_out.cell(row=1, column=4).value = " ".join(["EQUITY" , " | " , "STRIKEPRICE" , "|" , "BID", "|", "PREM_PCT"])
wks_out.cell(row=1, column=3).value = "EQUITY"
wks_out.cell(row=1, column=6).value = "STRIKEPRICE"
wks_out.cell(row=1, column=7).value = "BID"
wks_out.cell(row=1, column=8).value = "PREM_PCT"
for row in rows:
print(row[0], "|" ,row[1], "|", row[2], "|", row[3])
#wks_out.cell(row=idx, column=4).value = " ".join([str(row[0]) , "|" , str(row[1]) , "|" , str(row[2]), "|" , str(row[3])])
wks_out.cell(row=idx, column=3).value = str(row[0])
wks_out.cell(row=idx, column=6).value = row[1]
wks_out.cell(row=idx, column=7).value = row[2]
wks_out.cell(row=idx, column=8).value = row[3]
idx+= 1
if (debug == 'true'):
print ('select_sql3:idx=',idx)
print ("-----------------", file=f_out)
select_sql4 = "select equity, strikeprice, round(((cmp-strikeprice)*-100/cmp),1) prc_diff, bid, round(bid*100/strikeprice,1) prem_per from chain a " + \
"where bid>=0.05 and (prc_diff <=-5 and prc_diff >= -12) or (prc_diff <= -14 and prc_diff >= -20) " + \
"order by equity, prc_diff;"
print ('select_sql4=',select_sql4)
cursor.execute(select_sql4)
rows = cursor.fetchall()
idx=2
wks_out.cell(row=1, column=9).value = " ".join(["EQUITY" , " | " , "STRIKEPRICE" , "|", "PCT_DIFF", "|" , "BID", "|", "PREM_PCT"])
for row in rows:
new_eq = row[0]
if (debug == 'true'):
print(row[0], "|" ,row[1], "|", row[2], "|", row[3], "|", row[4], file=f_out)
wks_out.cell(row=idx, column=9).value = " ".join([str(row[0]) , "|" , str(row[1]) , "|" , str(row[2]), "|" , str(row[3]), "|" , str(row[4])])
idx += 1
if (debug == 'true'):
print('idx=', idx)
prev_eq = row[0]
if (prev_eq != new_eq):
print ("---", file=f_out)
wbk_out.save(wbkName_out)
wbk_out.close
| 37.993333
| 277
| 0.5887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,750
| 0.41674
|