max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
ngcccbase/tests/test_verifier.py
|
killerstorm/ngcccbase
| 31
|
12780151
|
<filename>ngcccbase/tests/test_verifier.py
#!/usr/bin/env python
import unittest
from ngcccbase.verifier import Verifier, hash_decode
from ngcccbase.services.electrum import (
ElectrumInterface, EnhancedBlockchainState)
class FakeBlockchainState(object):
def get_height(self):
return 100
def get_merkle(self, tx_hash):
l = ["3a459eab5f0cf8394a21e04d2ed3b2beeaa59795912e20b9c680e9db74dfb18c",
"f6ae335dc2d2aecb6a255ebd03caaf6820e6c0534531051066810080e0d822c8",
"15eca0aa3e2cc2b9b4fbe0629f1dda87f329500fcdcd6ef546d163211266b3b3"]
return {'merkle': l, 'block_height': 99, 'pos': 1}
def get_header(self, tx_hash):
r = "9cdf7722eb64015731ba9794e32bdefd9cf69b42456d31f5e59aedb68c57ed52"
return {'merkle_root': r, 'timestamp': 123}
class TestVerifier(unittest.TestCase):
def setUp(self):
fake_blockchain_state = FakeBlockchainState()
self.verifier = Verifier(fake_blockchain_state)
def test_get_confirmations(self):
self.verifier.verified_tx['test'] = (95, 111, 1)
self.assertEqual(self.verifier.get_confirmations('test'), 6)
self.verifier.verified_tx['test'] = (101, 111, 1)
self.assertEqual(self.verifier.get_confirmations('test'), 0)
self.assertEqual(self.verifier.get_confirmations(''), None)
del self.verifier.verified_tx['test']
def test_get_merkle_root(self):
# r = root, s = start, l = merkle hash list
r = "56dee62283a06e85e182e2d0b421aceb0eadec3d5f86cdadf9688fc095b72510"
self.assertEqual(self.verifier.get_merkle_root([], r, 0), r)
# example from pycoin/merkle.py
r = "30325a06daadcefb0a3d1fe0b6112bb6dfef794316751afc63f567aef94bd5c8"
s = "67ffe41e53534805fb6883b4708fd3744358f99e99bc52111e7a17248effebee"
l = ["c8b336acfc22d66edf6634ce095b888fe6d16810d9c85aff4d6641982c2499d1"]
self.assertEqual(self.verifier.get_merkle_root(l, s, 0), r)
# example from here: https://bitcointalk.org/index.php?topic=44707.0
r = "9cdf7722eb64015731ba9794e32bdefd9cf69b42456d31f5e59aedb68c57ed52"
s = "be38f46f0eccba72416aed715851fd07b881ffb7928b7622847314588e06a6b7"
l = ["3a459eab5f0cf8394a21e04d2ed3b2beeaa59795912e20b9c680e9db74dfb18c",
"f6ae335dc2d2aecb6a255ebd03caaf6820e6c0534531051066810080e0d822c8",
"<KEY>"]
self.assertEqual(self.verifier.get_merkle_root(l, s, 1), r)
s = "59d1e83e5268bbb491234ff23cbbf2a7c0aa87df553484afee9e82385fc7052f"
l = ["d173f2a12b6ff63a77d9fe7bbb590bdb02b826d07739f90ebb016dc9297332be",
"13a3595f2610c8e4d727130daade66c772fdec4bd2463d773fd0f85c20ced32d",
"<KEY>"]
self.assertEqual(self.verifier.get_merkle_root(l, s, 3), r)
def test_verify_merkle(self):
h = "be38f46f0eccba72416aed715851fd07b881ffb7928b7622847314588e06a6b7"
self.verifier.verify_merkle(h)
self.assertEqual(self.verifier.get_confirmations(h), 2)
def test_random_merkle(self):
server_url = "electrum.pdmc.net"
ei = ElectrumInterface(server_url, 50001)
bcs = EnhancedBlockchainState(server_url, 50001)
self.verifier.blockchain_state = bcs
h = '265db1bc122c4dae20dd0b55d55c7b270fb1378054fe624457b73bc28b5edd55'
self.verifier.verify_merkle(h)
self.assertTrue(self.verifier.get_confirmations(h) > 3)
if __name__ == '__main__':
unittest.main()
| 2.484375
| 2
|
scripts/plot.py
|
Rabscuttler/heat
| 3
|
12780152
|
# general plotting functions
import matplotlib.pyplot as plt
# plot the given hourly profile
def hourly_profile(profile):
hourly_profile_building('SFH',profile)
hourly_profile_building('MFH',profile)
hourly_profile_building('COM',profile)
def hourly_profile_building(building,profile):
for(name,data) in profile[building].iteritems():
data.plot(label=name, use_index=False)
plt.title('Hourly Profiles for ' + building)
plt.xlabel('Hour of the day')
plt.ylabel('Normalised Demand')
plt.legend(loc='upper right')
plt.show()
| 3.578125
| 4
|
python/435.non-overlapping-intervals.py
|
Zhenye-Na/leetcode
| 10
|
12780153
|
#
# @lc app=leetcode id=435 lang=python3
#
# [435] Non-overlapping Intervals
#
class Solution:
def eraseOverlapIntervals(self, intervals: List[List[int]]) -> int:
if not intervals:
return 0
intervals = sorted(intervals, key=lambda x: x[1])
self.index = 0
self.result = 1
for i in range(1, len(intervals)):
if intervals[i][0] >= intervals[self.index][1]:
self.result += 1
self.index = i
return len(intervals) - self.result
| 3.375
| 3
|
backend/flask-api/migrations/versions/d77d5b7c3921_.py
|
lucasbibianot/inova-cnj-time16
| 0
|
12780154
|
<filename>backend/flask-api/migrations/versions/d77d5b7c3921_.py
"""Retirando complemento 5008
Revision ID: d77d5b7c3921
Revises: 495061e1bbe4
Create Date: 2020-10-18 14:28:57.319452
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '495061e1bbe4'
branch_labels = None
depends_on = None
def upgrade():
op.execute("delete from tb_desc_complemento where cd_tpu_complemento in ('5008', '')")
def downgrade():
pass
| 1.460938
| 1
|
pythonAlgorithm/highlevel/Big Integer Addition.py
|
Sky-zzt/lintcodePractice
| 1
|
12780155
|
class Solution:
"""
@param num1: a non-negative integers
@param num2: a non-negative integers
@return: return sum of num1 and num2
以字符串的形式给出两个非负整数 num1 和 num2,返回 num1 和 num2 的和。
Example
样例 1:
输入 : num1 = "123", num2 = "45"
输出 : "168"
Notice
num1 和 num2 的长度都小于5100。
num1 和 num2 都只包含数字 0-9。
num1 和 num2 都不包含任何前导零。
您不能使用任何内置的BigInteger库内的方法或直接将输入转换为整数。
https://blog.csdn.net/csdnsevenn/article/details/84753109?utm_medium=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-3.add_param_isCf&depth_1-utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-3.add_param_isCf
"""
def addStrings(self, nums1, nums2):
# write your code here
max_len = max(len(nums1), len(nums2))
nums1 = list(nums1)[:: -1]
nums2 = list(nums2)[:: -1]
nums3 = [0 for i in range(max_len)]
# Add
id_ = 0
while id_ < max_len:
num1 = int(nums1[id_]) if id_ < len(nums1) else 0
num2 = int(nums2[id_]) if id_ < len(nums2) else 0
nums3[id_] = num1 + num2
id_ += 1
# Carry
for i in range(len(nums3) - 1):
if nums3[i] > 9:
nums3[i + 1] += nums3[i] // 10
nums3[i] %= 10
# Deal with the last digit
if nums3[-1] > 9:
nums3 += [nums3[-1] // 10]
nums3[-2] %= 10
return ''.join(str(x) for x in nums3[:: -1])
| 3.921875
| 4
|
src/utils/dictutils.py
|
sourav1122/foremast-brain
| 23
|
12780156
|
def retrieveKVList(dicts):
keys=[]
values=[]
for key ,value in dicts.items():
keys.append(key)
values.append(value)
return keys, values
def convertDictKey(mydict,replacefrom, replaceto):
for key in mydict:
mydict[key.replace(replacefrom,replaceto)] = mydict.pop(key)
return mydict
| 3.359375
| 3
|
migrations/versions/eeafa5624ec7_artpiece_slug.py
|
cclrobotics/ARTBot
| 5
|
12780157
|
<gh_stars>1-10
"""artpiece_slug
Revision ID: eeafa5624ec7
Revises: a<PASSWORD>
Create Date: 2020-02-09 05:42:17.894691
"""
# revision identifiers, used by Alembic.
revision = 'eeafa5624ec7'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
from slugify import slugify
from alembic import op
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from migrations.utils.session import session_scope
Base = declarative_base()
class ArtpieceModel(Base):
__tablename__ = 'artpieces'
id = sa.Column(sa.Integer, primary_key=True)
slug = sa.Column(sa.String(60), nullable=True, unique=True, index=True)
title = sa.Column(sa.String(50), nullable=False)
submit_date = sa.Column(sa.DateTime(), nullable=False)
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('artpieces', sa.Column('slug', sa.String(length=60), nullable=True))
op.create_index(op.f('ix_artpieces_slug'), 'artpieces', ['slug'], unique=True)
# ### end Alembic commands ###
with session_scope() as session:
artpieces = session.query(ArtpieceModel).order_by(ArtpieceModel.submit_date.asc()).all()
# create unique slugs
slug_counts = dict()
for artpiece in artpieces:
slug = slugify(artpiece.title)
count = (slug_counts.get(slug) or 0) + 1
artpiece.slug = f'{slug}#{count}'
slug_counts[slug] = count
op.alter_column('artpieces', 'slug', nullable=False)
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_artpieces_slug'), table_name='artpieces')
op.drop_column('artpieces', 'slug')
# ### end Alembic commands ###
| 1.703125
| 2
|
src/voter.py
|
keyan/e2e_voting
| 1
|
12780158
|
import random
from typing import Optional
from src.tablet import Tablet
from src.sbb import SBBContents
from src.sv_vote import SVVote
DEFAULT_NUM_CANDIDATES = 2
class Voter:
def __init__(self, voter_id: int, M: int, vote: Optional[int] = None):
self.voter_id: int = voter_id
self.ballot_hash: str = ''
self.bid: Optional[int] = None
self.M: int = M
self.vote: Optional[int] = vote
def do_vote(self, tablet: Tablet):
if self.vote is None or self.vote >= self.M:
self.vote = random.choice(range(DEFAULT_NUM_CANDIDATES))
print(f'Voter ID: {self.voter_id}, vote is: {self.vote}')
self.bid, self.ballot_hash = tablet.send_vote(self.vote)
def verify(self, sbb_contents: SBBContents) -> bool:
if self.bid is None:
raise Exception('Voter does not have a valid bid, cannot verify vote')
return sbb_contents.get_bid_receipt(self.bid) == self.ballot_hash
| 2.96875
| 3
|
test_herencia.py
|
Ivan395/Python
| 0
|
12780159
|
#! /usr/bin/python3
# -*- coding: utf-8 -*-
from cuadrado import Cuadrado
def run():
cuad = Cuadrado(1,2,3)
print(cuad.show())
if __name__ == '__main__':
run()
| 2.53125
| 3
|
problem/01000~09999/02004/2004.py3.py
|
njw1204/BOJ-AC
| 1
|
12780160
|
<reponame>njw1204/BOJ-AC
MIN=lambda a,b: a if a<b else b
def FactoPowerCount(n,x):
count = 0
while n//x>0:
count+=n//x
n//=x
return count
n,m=map(int,input().split())
print(MIN(FactoPowerCount(n,2)-FactoPowerCount(m,2)-FactoPowerCount(n-m,2),
FactoPowerCount(n,5)-FactoPowerCount(m,5)-FactoPowerCount(n-m,5)))
| 3.015625
| 3
|
lbm/src/core/obstacle.py
|
jviquerat/lbm
| 32
|
12780161
|
### ************************************************
### Class defining an obstacle in the lattice
class obstacle:
def __init__(self, name, n_pts, n_spts, type, size, pos):
self.name = name
self.n_pts = n_pts
self.n_spts = n_spts
self.type = type
self.size = size
self.pos = pos
def set_polygon(self, polygon):
self.polygon = polygon
def set_tag(self, tag):
self.tag = tag
def fill(self, area, boundary, ibb):
self.area = area
self.boundary = boundary
self.ibb = ibb
| 3.15625
| 3
|
events/views.py
|
wwangwe/Team-213-A-Back-End
| 0
|
12780162
|
from __future__ import unicode_literals
from django.shortcuts import render
from datetime import date, timedelta
# django:
from django.views.generic import ListView, DetailView
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.utils.dates import MONTHS_ALT
# thirdparties:
import six
# utils
from .models import Event
from events.utils.displays import month_display, day_display
from events.utils.mixins import JSONResponseMixin
from events.utils import common as c
CALENDAR_LOCALE = getattr(settings, 'CALENDAR_LOCALE', 'en_US.utf8')
class GenericEventView(JSONResponseMixin, ListView):
model = Event
def render_to_response(self, context, **kwargs):
if self.request.is_ajax():
return self.render_to_json_response(context, **kwargs)
return super(GenericEventView, self).render_to_response(
context, **kwargs
)
def get_context_data(self, **kwargs):
context = super(GenericEventView, self).get_context_data(**kwargs)
self.net, self.category, self.tag = c.get_net_category_tag(
self.request
)
if self.category is not None:
context['cal_category'] = self.category
if self.tag is not None:
context['cal_tag'] = self.tag
return context
class EventMonthView(GenericEventView):
template_name = 'event_month_list.html'
def get_year_and_month(self, net, qs, **kwargs):
"""
Get the year and month. First tries from kwargs, then from
querystrings. If none, or if cal_ignore qs is specified,
sets year and month to this year and this month.
"""
now = c.get_now()
year = now.year
month = now.month + net
month_orig = None
if 'cal_ignore=true' not in qs:
if 'year' and 'month' in self.kwargs: # try kwargs
year, month_orig = map(
int, (self.kwargs['year'], self.kwargs['month'])
)
month = month_orig + net
else:
try: # try querystring
year = int(self.request.GET['cal_year'])
month_orig = int(self.request.GET['cal_month'])
month = month_orig + net
except Exception:
pass
# return the year and month, and any errors that may have occurred do
# to an invalid month/year being given.
return c.clean_year_month(year, month, month_orig)
def get_month_events(self, *args, **kwargs):
return Event.objects.all_month_events(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(EventMonthView, self).get_context_data(**kwargs)
qs = self.request.META['QUERY_STRING']
year, month, error = self.get_year_and_month(self.net, qs)
mini = True if 'cal_mini=true' in qs else False
# get any querystrings that are not next/prev/year/month
if qs:
qs = c.get_qs(qs)
# add a dict containing the year, month, and month name to the context
current = dict(
year=year, month_num=month, month=MONTHS_ALT[month][:3]
)
context['current'] = current
display_month = MONTHS_ALT[month]
if isinstance(display_month, six.binary_type):
display_month = display_month.decode('utf-8')
context['month_and_year'] = u"%(month)s, %(year)d" % (
{'month': display_month, 'year': year}
)
if error: # send any year/month errors
context['cal_error'] = error
# List enables sorting. As far as I can tell, .order_by() can't be used
# here because we need it ordered by l_start_date.hour (simply ordering
# by start_date won't work). The only alternative I've found is to use
# extra(), but this would likely require different statements for
# different databases...
all_month_events = list(self.get_month_events(
year, month, self.category, loc=True
))
all_month_events.sort(key=lambda x: x.l_start_date.hour)
start_day = getattr(settings, "CALENDAR_START_DAY", 0)
context['calendar'] = month_display(
year, month, all_month_events, start_day, self.net, qs, mini,
request=self.request,
)
context['show_events'] = False
if getattr(settings, "CALENDAR_SHOW_LIST", False):
context['show_events'] = True
context['events'] = c.order_events(all_month_events, d=True) \
if self.request.is_ajax() else c.order_events(all_month_events)
return context
class EventDayView(GenericEventView):
template_name = 'event_day_list.html'
def get_month_events(self, *args, **kwargs):
return Event.objects.all_month_events(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(EventDayView, self).get_context_data(**kwargs)
kw = self.kwargs
y, m, d = map(int, (kw['year'], kw['month'], kw['day']))
year, month, day, error = c.clean_year_month_day(y, m, d, self.net)
if error:
context['cal_error'] = error
# Note that we don't prefetch 'cancellations' because they will be
# prefetched later (in day_display in displays.py)
all_month_events = self.get_month_events(
year, month, self.category, self.tag
)
self.events = day_display(
year, month, all_month_events, day
)
context['events'] = self.events
display_month = MONTHS_ALT[month]
if isinstance(display_month, six.binary_type):
display_month = display_month.decode('utf-8')
context['month'] = display_month
context['month_num'] = month
context['year'] = year
context['day'] = day
context['month_day_year'] = u"%(month)s %(day)d, %(year)d" % (
{'month': display_month, 'day': day, 'year': year}
)
# for use in the template to build next & prev querystrings
context['next'], context['prev'] = c.get_next_and_prev(self.net)
return context
class EventDetailView(DetailView):
model = Event
context_object_name = 'event'
def get_object(self):
return get_object_or_404(
Event.objects.prefetch_related(
'location', 'categories'
),
pk=self.kwargs['pk']
)
| 1.90625
| 2
|
tests/test_evaluate.py
|
Tommo565/titanic-mlflow
| 1
|
12780163
|
<reponame>Tommo565/titanic-mlflow
import os
import mlflow
import sklearn
from src.utils import (
load_config,
load_logger,
load_parameters,
)
from src.ingest_split import ingest_split
from src.preprocessing_pipeline import create_preprocessing_pipeline
from src.models import create_logreg_model
from src.model_pipeline import evaluate_model
def test_evaluate():
config = load_config(".env.test")
logger = load_logger(
app_name=config["app_name"],
logs_path=config["logs_path"]
)
# Configure MLFlow
mlflow.set_tracking_uri(config["mlflow_tracking_uri"])
mlflow.set_experiment(config["mlflow_experiment"])
# Start MLFlow Tracking
with mlflow.start_run():
parameters = load_parameters(parameters_path=config["parameters_path"])
# Ingest the data
X_train, X_test, y_train, y_test, X_holdout = ingest_split(
train_test_raw_path=config["train_test_raw_path"],
holdout_raw_path=config["holdout_raw_path"],
target=parameters["target"],
ingest_split_parameters=parameters["ingest_split_parameters"]
)
# Create the preprocessing pipeline
preprocessing_pipeline = create_preprocessing_pipeline(
pipeline_parameters=parameters["pipeline_parameters"]
)
# Create a model with hyperparameters
model, model_name, cv = create_logreg_model(
logreg_hyperparameters=parameters["logreg_hyperparameters"]
)
# Run the function
model = evaluate_model(
preprocessing_pipeline=preprocessing_pipeline,
model=model,
X_train=X_train,
y_train=y_train,
X_test=X_test,
y_test=y_test,
artifact_path=config["artifact_path"],
cv=cv
)
assert isinstance(
model, sklearn.linear_model._logistic.LogisticRegression
)
| 2.53125
| 3
|
src/kusto/azext_kusto/vendored_sdks/kusto/aio/_kusto_management_client_async.py
|
tilnl/azure-cli-extensions
| 0
|
12780164
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
from ._configuration_async import KustoManagementClientConfiguration
from .operations_async import ClusterOperations
from .operations_async import ClusterPrincipalAssignmentOperations
from .operations_async import DatabaseOperations
from .operations_async import DatabasePrincipalAssignmentOperations
from .operations_async import AttachedDatabaseConfigurationOperations
from .operations_async import DataConnectionOperations
from .operations_async import OperationOperations
from .. import models
class KustoManagementClient(object):
"""The Azure Kusto management API provides a RESTful set of web services that interact with Azure Kusto services to manage your clusters and databases. The API enables you to create, update, and delete clusters and databases.
:ivar cluster: ClusterOperations operations
:vartype cluster: azure.mgmt.kusto.aio.operations_async.ClusterOperations
:ivar cluster_principal_assignment: ClusterPrincipalAssignmentOperations operations
:vartype cluster_principal_assignment: azure.mgmt.kusto.aio.operations_async.ClusterPrincipalAssignmentOperations
:ivar database: DatabaseOperations operations
:vartype database: azure.mgmt.kusto.aio.operations_async.DatabaseOperations
:ivar database_principal_assignment: DatabasePrincipalAssignmentOperations operations
:vartype database_principal_assignment: azure.mgmt.kusto.aio.operations_async.DatabasePrincipalAssignmentOperations
:ivar attached_database_configuration: AttachedDatabaseConfigurationOperations operations
:vartype attached_database_configuration: azure.mgmt.kusto.aio.operations_async.AttachedDatabaseConfigurationOperations
:ivar data_connection: DataConnectionOperations operations
:vartype data_connection: azure.mgmt.kusto.aio.operations_async.DataConnectionOperations
:ivar operation: OperationOperations operations
:vartype operation: azure.mgmt.kusto.aio.operations_async.OperationOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Gets subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = KustoManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.cluster = ClusterOperations(
self._client, self._config, self._serialize, self._deserialize)
self.cluster_principal_assignment = ClusterPrincipalAssignmentOperations(
self._client, self._config, self._serialize, self._deserialize)
self.database = DatabaseOperations(
self._client, self._config, self._serialize, self._deserialize)
self.database_principal_assignment = DatabasePrincipalAssignmentOperations(
self._client, self._config, self._serialize, self._deserialize)
self.attached_database_configuration = AttachedDatabaseConfigurationOperations(
self._client, self._config, self._serialize, self._deserialize)
self.data_connection = DataConnectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.operation = OperationOperations(
self._client, self._config, self._serialize, self._deserialize)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "KustoManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
| 1.757813
| 2
|
Python/test/currencies.py
|
yrtf/QuantLib-SWIG
| 231
|
12780165
|
"""
Copyright (C) 2021 <NAME>
This file is part of QuantLib, a free-software/open-source library
for financial quantitative analysts and developers - http://quantlib.org/
QuantLib is free software: you can redistribute it and/or modify it
under the terms of the QuantLib license. You should have received a
copy of the license along with this program; if not, please email
<<EMAIL>>. The license is also available online at
<http://quantlib.org/license.shtml>.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the license for more details.
"""
import unittest
import QuantLib as ql
class CurrencyTest(unittest.TestCase):
def test_default_currency_constructor(self):
"""Testing default currency constructor"""
fail_msg = "Failed to create default currency."
default_ccy = ql.Currency()
self.assertTrue(default_ccy.empty(), fail_msg)
def test_eur_constructor(self):
"""Testing EUR constructor"""
fail_msg = "Failed to create EUR currency."
eur = ql.EURCurrency()
self.assertFalse(eur.empty(), fail_msg)
def test_bespoke_currency_constructor(self):
"""Testing bespoke currency constructor"""
fail_msg = "Failed to create bespoke currency."
custom_ccy = ql.Currency(
"CCY", "CCY", 100, "#", "", 100, ql.Rounding(), "")
self.assertFalse(custom_ccy.empty(), fail_msg)
if __name__ == '__main__':
print('testing QuantLib ' + ql.__version__)
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(CurrencyTest, 'test'))
unittest.TextTestRunner(verbosity=2).run(suite)
| 3.09375
| 3
|
example_racktests/6_create_host_with_minimum_bigger_disk_to_verify_rackattack_virtual.py
|
eyal-stratoscale/pyracktest
| 0
|
12780166
|
from strato.racktest.infra.suite import *
class Test:
HOSTS = dict(it=dict(rootfs="rootfs-basic", minimumDisk1SizeGB=20))
def run(self):
partitions = host.it.ssh.run.script("cat /proc/partitions")
vdaSizeKB = int(partitions.split('\n')[2][13:13 + 12].strip())
TS_ASSERT_LESS_THAN(18, vdaSizeKB / 1024 / 1024)
| 1.921875
| 2
|
Python/detect_loop_in_linkedList.py
|
Shrenik811rp/Hacktoberfest
| 1
|
12780167
|
<gh_stars>1-10
# Python Demonstration to detect loop/circle in linked list in two different approches:
# 1. Two pointer approch
# 2. Hashing approch
# 3. Normal approch by marking visited nodes and checking if a cycle exists.
# Two pointer approch
def detectLoop_TwoPointer(head):
slow = head # initializing slow pointer to point to head of the linked list
fast = (
head.next
) # initializing fast pointer to point next node to head node of the linked list
flag = 0 # initializing flag to 0
while (
slow and fast and fast.next
): # checks if slow pointer, fast pointer and next to fast pointer are not pointing to None
slow = slow.next # setting slow pointer to point to it's next node
fast = fast.next.next # setting fast pointer to point to it's next to next node
if slow == fast:
flag = (
1 # set flag to 1 if slow and fast pointers points to the same node and
)
break # break from the loop
if flag:
return True
return False
# Hashing approch
def detectLoop_Hashing(head):
hashmap = set() # Initializing the hashmap
temp = head # assign head node to temp
while temp: # if temp node is not None, execute the following code
if (
temp in hashmap
): # If we have already has this node in hashmap it means their is a cycle (Because you we encountering the node second time).
return True
hashmap.add(
temp
) # If we are seeing the node for the first time, insert it in hashmap
temp = temp.next # changing temp to temp.next
return False
# Marking visited nodes without modifying the linked list data structure.
def detectLoop(head):
temp = "" # initialize temp to be empty/null
while (
head != None
): # checking the condition, if head is None, if it is isn't None... enters the while loop.
# This condition is for the case, when there is no loop
if head.next == None:
return False
if (
head.next == temp
): # Check if next node is pointing to empty/null (since temp is empty)
return True
next_pointer = head.next # Store the pointer to the next node
head.next = temp # Make next pointer to temp (indirectly, make the current node's next pointer to None)
head = next_pointer # Get to the next node in the linked list
return False
# Driver code
# Creating Node class
class Node:
# Constructor
def __init__(self, data):
self.data = data
self.next = None
# Creating Linked List class
class LinkedList:
# Constructor
def __init__(self):
self.head = None
# insert method creates a new node with given value and appends it at the end of the linked list
def insert(self, value):
new_node = Node(value)
new_node.next = self.head
self.head = new_node
# Found function
def Found(found):
if found:
print("Through Two-Pointer approch: Cycle exists in linked list")
else:
print("Through Two-Pointer approch: No cycle exists in lisked list")
# Creating a linked list which doesn't contain cycle
linked_list = LinkedList()
linked_list.insert(20)
linked_list.insert(4)
linked_list.insert(15)
linked_list.insert(10)
## Checking without creating the cycle
print("**-------------------Without Cycle in linked list-------------------***")
Found(
detectLoop_TwoPointer(linked_list.head)
) # check if cycle exists through detectLoop_TwoPointer function
Found(
detectLoop_Hashing(linked_list.head)
) # check if cycle exists through detectLoop_Hashing function
Found(detectLoop(linked_list.head)) # check if cycle exists through detectLoop function
# Output:
"""
"***-------------------Without Cycle in linked list-------------------***"
Through Two-Pointer approch: No cycle exists in lisked list
Through Hashing approch: No cycle exists in lisked list
Through marking visited nodes approch: No cycle exists in lisked list
"""
# Creating a linked list which contains cycle
linked_list = LinkedList()
linked_list.insert(1)
linked_list.insert(2)
linked_list.insert(3)
linked_list.insert(4)
linked_list.insert(5)
# Creating a cycle in linked list
linked_list.head.next.next.next.next.next = (
linked_list.head.next.next
) # Node which contains the value '5' is pointing to the node containing the value '3'
# Checking after creating a cycle
print("\n***---------------------With Cycle in linked list---------------------***")
Found(
detectLoop_TwoPointer(linked_list.head)
) # check if cycle exists through detectLoop_TwoPointer function
Found(
detectLoop_Hashing(linked_list.head)
) # check if cycle exists through detectLoop_Hashing function
Found(detectLoop(linked_list.head)) # check if cycle exists through detectLoop function
# Output:
"""
***---------------------With Cycle in linked list---------------------***
Through Two-Pointer approch: Cycle exists in linked list
Through Hashing approch: Cycle exists in linked list
Through marking visited nodes approch: Cycle exists in linked list
"""
| 3.875
| 4
|
src/apps/devices/discoball.py
|
ajintom/music_sync
| 0
|
12780168
|
<reponame>ajintom/music_sync
import device
from phosphene.signal import *
from phosphene.signalutil import *
from phosphene.graphs import *
class DiscoBall(device.Device):
def __init__(self, port):
device.Device.__init__(self, "DiscoBall", port)
def setupSignal(self, signal):
def beats(s):
return numpymap(lambda (a, b): 1 if a > b * 1.414 else 0, zip(s.avg6, s.longavg6))
signal.beats = lift(beats)
signal.discoball = blend(beats,0.7)
def graphOutput(self, signal):
return boopGraph(signal.discoball[:4])
def redraw(self, signal):
data = self.truncate(signal.discoball[:4] * 255)
print data
self.port.write(self.toByteStream(data))
| 2.8125
| 3
|
Python/check-if-n-and-its-double-exist.py
|
RideGreg/LeetCode
| 1
|
12780169
|
<reponame>RideGreg/LeetCode
# Time: O(n)
# Space: O(n)
# 1346 weekly contest 175 2/8/2020
# Given an array arr of integers, check if there exists two integers N and M such that N is the double of M ( i.e. N = 2 * M).
#
# More formally check if there exists two indices i and j such that :
#
# i != j
# 0 <= i, j < arr.length
# arr[i] == 2 * arr[j]
class Solution(object):
def checkIfExist(self, arr):
"""
:type arr: List[int]
:rtype: bool
"""
lookup = set()
for x in arr:
if 2*x in lookup or \
(x%2 == 0 and x//2 in lookup):
return True
lookup.add(x)
return False
print(Solution().checkIfExist([10,2,5,3])) # True
print(Solution().checkIfExist([3,1,7,11])) # False
| 3.796875
| 4
|
examples/tf/trpo_cartpole_recurrent.py
|
icml2020submission6857/metarl
| 2
|
12780170
|
#!/usr/bin/env python3
"""This is an example to train a task with TRPO algorithm.
It uses an LSTM-based recurrent policy.
Here it runs CartPole-v1 environment with 100 iterations.
Results:
AverageReturn: 100
RiseTime: itr 13
"""
from metarl.experiment import run_experiment
from metarl.np.baselines import LinearFeatureBaseline
from metarl.tf.algos import TRPO
from metarl.tf.envs import TfEnv
from metarl.tf.experiment import LocalTFRunner
from metarl.tf.optimizers import ConjugateGradientOptimizer
from metarl.tf.optimizers import FiniteDifferenceHvp
from metarl.tf.policies import CategoricalLSTMPolicy
def run_task(snapshot_config, *_):
"""Defines the main experiment routine.
Args:
snapshot_config (metarl.experiment.SnapshotConfig): Configuration
values for snapshotting.
*_ (object): Hyperparameters (unused).
"""
with LocalTFRunner(snapshot_config=snapshot_config) as runner:
env = TfEnv(env_name='CartPole-v1')
policy = CategoricalLSTMPolicy(name='policy', env_spec=env.spec)
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(env_spec=env.spec,
policy=policy,
baseline=baseline,
max_path_length=100,
discount=0.99,
max_kl_step=0.01,
optimizer=ConjugateGradientOptimizer,
optimizer_args=dict(hvp_approach=FiniteDifferenceHvp(
base_eps=1e-5)))
runner.setup(algo, env)
runner.train(n_epochs=100, batch_size=4000)
run_experiment(
run_task,
snapshot_mode='last',
seed=1,
)
| 2.453125
| 2
|
examples/ae-benchmarks/model/xceptionnet.py
|
lijiansong/singa
| 0
|
12780171
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# the code is modified from
# https://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/xception.py
from singa import autograd
from singa import module
class Block(autograd.Layer):
def __init__(self,
in_filters,
out_filters,
reps,
strides=1,
padding=0,
start_with_relu=True,
grow_first=True):
super(Block, self).__init__()
if out_filters != in_filters or strides != 1:
self.skip = autograd.Conv2d(in_filters,
out_filters,
1,
stride=strides,
padding=padding,
bias=False)
self.skipbn = autograd.BatchNorm2d(out_filters)
else:
self.skip = None
self.layers = []
filters = in_filters
if grow_first:
self.layers.append(autograd.ReLU())
self.layers.append(
autograd.SeparableConv2d(in_filters,
out_filters,
3,
stride=1,
padding=1,
bias=False))
self.layers.append(autograd.BatchNorm2d(out_filters))
filters = out_filters
for i in range(reps - 1):
self.layers.append(autograd.ReLU())
self.layers.append(
autograd.SeparableConv2d(filters,
filters,
3,
stride=1,
padding=1,
bias=False))
self.layers.append(autograd.BatchNorm2d(filters))
if not grow_first:
self.layers.append(autograd.ReLU())
self.layers.append(
autograd.SeparableConv2d(in_filters,
out_filters,
3,
stride=1,
padding=1,
bias=False))
self.layers.append(autograd.BatchNorm2d(out_filters))
if not start_with_relu:
self.layers = self.layers[1:]
else:
self.layers[0] = autograd.ReLU()
if strides != 1:
self.layers.append(autograd.MaxPool2d(3, strides, padding + 1))
def __call__(self, x):
y = self.layers[0](x)
for layer in self.layers[1:]:
if isinstance(y, tuple):
y = y[0]
y = layer(y)
if self.skip is not None:
skip = self.skip(x)
skip = self.skipbn(skip)
else:
skip = x
y = autograd.add(y, skip)
return y
class Xception(module.Module):
"""
Xception optimized for the ImageNet dataset, as specified in
https://arxiv.org/pdf/1610.02357.pdf
"""
def __init__(self, num_classes=10, num_channels=3, in_size=299):
""" Constructor
Args:
num_classes: number of classes
"""
super(Xception, self).__init__()
self.num_classes = num_classes
self.input_size = in_size
self.dimension = 4
self.conv1 = autograd.Conv2d(num_channels, 32, 3, 2, 0, bias=False)
self.bn1 = autograd.BatchNorm2d(32)
self.conv2 = autograd.Conv2d(32, 64, 3, 1, 1, bias=False)
self.bn2 = autograd.BatchNorm2d(64)
# do relu here
self.block1 = Block(64,
128,
2,
2,
padding=0,
start_with_relu=False,
grow_first=True)
self.block2 = Block(128,
256,
2,
2,
padding=0,
start_with_relu=True,
grow_first=True)
self.block3 = Block(256,
728,
2,
2,
padding=0,
start_with_relu=True,
grow_first=True)
self.block4 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block5 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block6 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block7 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block8 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block9 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block10 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block11 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block12 = Block(728,
1024,
2,
2,
start_with_relu=True,
grow_first=False)
self.conv3 = autograd.SeparableConv2d(1024, 1536, 3, 1, 1)
self.bn3 = autograd.BatchNorm2d(1536)
# do relu here
self.conv4 = autograd.SeparableConv2d(1536, 2048, 3, 1, 1)
self.bn4 = autograd.BatchNorm2d(2048)
self.globalpooling = autograd.MaxPool2d(10, 1)
if self.input_size == 299:
self.fc = autograd.Linear(2048, num_classes)
elif self.input_size == 416:
self.fc = autograd.Linear(32768, num_classes)
elif self.input_size == 720:
self.fc = autograd.Linear(401408, num_classes)
elif self.input_size == 1280:
self.fc = autograd.Linear(1968128, num_classes)
def features(self, input):
x = self.conv1(input)
x = self.bn1(x)
x = autograd.relu(x)
x = self.conv2(x)
x = self.bn2(x)
x = autograd.relu(x)
x = self.block1(x)
x = self.block2(x)
x = self.block3(x)
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.block8(x)
x = self.block9(x)
x = self.block10(x)
x = self.block11(x)
x = self.block12(x)
x = self.conv3(x)
x = self.bn3(x)
x = autograd.relu(x)
x = self.conv4(x)
x = self.bn4(x)
return x
def logits(self, features):
x = autograd.relu(features)
x = self.globalpooling(x)
x = autograd.flatten(x)
x = self.fc(x)
return x
def forward(self, input):
x = self.features(input)
x = self.logits(x)
return x
def loss(self, out, ty):
return autograd.softmax_cross_entropy(out, ty)
def optim(self, loss, dist_option, spars):
if dist_option == 'fp32':
self.optimizer.backward_and_update(loss)
elif dist_option == 'fp16':
self.optimizer.backward_and_update_half(loss)
elif dist_option == 'partialUpdate':
self.optimizer.backward_and_partial_update(loss)
elif dist_option == 'sparseTopK':
self.optimizer.backward_and_sparse_update(loss,
topK=True,
spars=spars)
elif dist_option == 'sparseThreshold':
self.optimizer.backward_and_sparse_update(loss,
topK=False,
spars=spars)
def set_optimizer(self, optimizer):
self.optimizer = optimizer
def create_model(pretrained=False, **kwargs):
"""Constructs a Xceptionnet model.
Args:
pretrained (bool): If True, returns a model pre-trained
"""
model = Xception(**kwargs)
return model
__all__ = ['Xception', 'create_model']
| 2.078125
| 2
|
tests/continuous_tests/interval_tests/test_merges_with_interval.py
|
lycantropos/topo
| 0
|
12780172
|
<reponame>lycantropos/topo<filename>tests/continuous_tests/interval_tests/test_merges_with_interval.py
from tests.utils import implication
from topo.continuous import Interval
def test_reflexivity(interval: Interval) -> None:
assert interval.merges_with_interval(interval)
def test_intersecting_intervals(interval: Interval,
other_interval: Interval) -> None:
assert implication(interval.intersects_with_interval(other_interval),
interval.merges_with_interval(other_interval))
def test_overlapping_intervals(interval: Interval,
other_interval: Interval) -> None:
assert implication(interval.overlaps_interval(other_interval),
interval.merges_with_interval(other_interval))
def test_symmetry(interval: Interval, other_interval: Interval) -> None:
assert implication(interval.merges_with_interval(other_interval),
other_interval.merges_with_interval(interval))
| 2.171875
| 2
|
tests/masks/test_mask.py
|
j-h-m/Media-Journaling-Tool
| 0
|
12780173
|
import unittest
from tests.test_support import TestSupport
from mock import Mock
from maskgen.masks.donor_rules import VideoDonor, AudioDonor, AllStreamDonor, AllAudioStreamDonor, \
VideoDonorWithoutAudio, InterpolateDonor,AudioZipDonor
from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, \
get_end_time_from_segment, get_end_frame_from_segment
class TestDonorRules(TestSupport):
def test_video_donor(self):
graph = Mock()
def lkup_preds(x):
return {'b':['a'],'e':['d']}[x]
def lkup_edge(x,y):
return {'ab':{'op':'NoSelect'},'de':{'op':'SelectSomething','arguments': {'Start Time': 20, 'End Time':100}}}[x + y]
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
donor = VideoDonor(graph, 'e','f', 'x',(None,self.locateFile('tests/videos/sample1.mov')), (None,self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual(20, args['Start Time']['defaultvalue'])
self.assertEqual(100, args['End Time']['defaultvalue'])
segments = donor.create(arguments={'include audio':'yes','Start Time':30,'End Time':150})
for segment in segments:
if get_type_of_segment(segment) == 'audio':
self.assertEqual(115542,get_start_frame_from_segment(segment))
self.assertEqual(509061, get_end_frame_from_segment(segment))
else:
self.assertEqual(30, get_start_frame_from_segment(segment))
self.assertEqual(150, get_end_frame_from_segment(segment))
self.assertEqual(2620.0, get_start_time_from_segment(segment))
self.assertEqual(11543, int(get_end_time_from_segment(segment)))
donor = VideoDonor(graph, 'b','c','x', (None,self.locateFile('tests/videos/sample1.mov')), (None,self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual(1, args['Start Time']['defaultvalue'])
self.assertEqual(0, args['End Time']['defaultvalue'])
segments = donor.create(arguments={'include audio':'yes','Start Time':30,'End Time':150})
for segment in segments:
if get_type_of_segment(segment) == 'audio':
self.assertEqual(115542,get_start_frame_from_segment(segment))
self.assertEqual(509061, get_end_frame_from_segment(segment))
else:
self.assertEqual(30, get_start_frame_from_segment(segment))
self.assertEqual(150, get_end_frame_from_segment(segment))
self.assertEqual(2620.0, get_start_time_from_segment(segment))
self.assertEqual(11543, int(get_end_time_from_segment(segment)))
segments = donor.create(arguments={'include audio': 'no', 'Start Time': 30, 'End Time': 150})
self.assertEqual(0,len([segment for segment in segments if get_type_of_segment(segment) == 'audio']))
donor = VideoDonorWithoutAudio(graph, 'b','c', 'x', (None,self.locateFile('tests/videos/sample1.mov')),
(None,self.locateFile('tests/videos/sample1.mov')))
self.assertTrue('include audio' not in donor.arguments())
def test_audio_donor(self):
graph = Mock()
def lkup_preds(x):
return {'b': ['a'], 'e': ['d']}[x]
def lkup_edge(x, y):
return \
{'ab': {'op': 'NoSelect'}, 'ef': {'op': 'SelectSomething', 'arguments': {'Start Time': "00:00:00.000000"}}}[
x + y]
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
donor = AudioDonor(graph, 'e', 'f', 'x', (None, self.locateFile('tests/videos/sample1.mov')),
(None, self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual("00:00:00.000000", args['Start Time']['defaultvalue'])
self.assertEqual("00:00:00.000000", args['End Time']['defaultvalue'])
segments = donor.create(arguments={'Start Time': "00:00:01.11", 'End Time': "00:00:01.32"})
for segment in segments:
self.assertEqual(48951, get_start_frame_from_segment(segment))
self.assertEqual(58212, get_end_frame_from_segment(segment))
self.assertAlmostEqual(1109.97, get_start_time_from_segment(segment),places=1)
self.assertEqual(1320.0, int(get_end_time_from_segment(segment)))
donor = AllStreamDonor(graph, 'e', 'f', 'y', (None, self.locateFile('tests/videos/sample1.mov')),
(None, self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual(0,len(args))
segments = donor.create(arguments={})
types = set()
for segment in segments:
types.add(get_type_of_segment(segment))
if get_type_of_segment(segment) == 'audio':
self.assertEqual(1, get_start_frame_from_segment(segment))
self.assertEqual(2617262, get_end_frame_from_segment(segment))
self.assertAlmostEqual(0, get_start_time_from_segment(segment), places=1)
self.assertAlmostEqual(59348, int(get_end_time_from_segment(segment)))
else:
self.assertEqual(1, get_start_frame_from_segment(segment))
self.assertEqual(803, get_end_frame_from_segment(segment))
self.assertAlmostEqual(0, get_start_time_from_segment(segment), places=1)
self.assertAlmostEqual(59348, int(get_end_time_from_segment(segment)))
self.assertEqual(2,len(types))
donor = AllAudioStreamDonor(graph, 'e', 'f', 'y', (None, self.locateFile('tests/videos/sample1.mov')),
(None, self.locateFile('tests/videos/sample1.mov')))
self.assertEqual(0, len(donor.arguments()))
self.assertEqual(['audio'],donor.media_types())
def test_audio_zip_donor(self):
graph = Mock()
def lkup_preds(x):
return {'b': ['a'], 'e': ['d']}[x]
def lkup_edge(x, y):
return \
{'ab': {'op': 'NoSelect'}, 'ef': {'op': 'SelectSomething', 'arguments': {'Start Time': "00:00:00.000000"}}}[
x + y]
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
donor = AudioZipDonor(graph, 'e', 'f', 'x', (None, self.locateFile('tests/zips/test.wav.zip')),
(None, self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual("00:00:00.000000", args['Start Time']['defaultvalue'])
segments = donor.create(arguments={'Start Time': "00:00:09.11", 'End Time': "00:00:16.32", 'sample rate':44100})
for segment in segments:
self.assertEqual(401752, get_start_frame_from_segment(segment))
self.assertEqual(719713, get_end_frame_from_segment(segment))
self.assertAlmostEqual(9110, get_start_time_from_segment(segment),places=1)
self.assertEqual(16320.0, int(get_end_time_from_segment(segment)))
segments = donor.create(
arguments={'Start Time': "00:00:00.00", 'End Time': "00:00:00.00", 'sample rate': 44100})
for segment in segments:
self.assertEqual(1, get_start_frame_from_segment(segment))
self.assertEqual(1572865, get_end_frame_from_segment(segment))
self.assertAlmostEqual(0.0, get_start_time_from_segment(segment),places=1)
self.assertEqual(35665, int(get_end_time_from_segment(segment)))
def test_image_donor(self):
import numpy as np
from maskgen.image_wrap import ImageWrapper
graph = Mock()
def lkup_preds(x):
return {'b': ['a'], 'e': ['d']}[x]
def lkup_edge(x, y):
return \
{'ab': {'op': 'NoSelect'}, 'de': {'op': 'SelectRegion'}}[
x + y]
withoutalpha = ImageWrapper(np.zeros((400, 400, 3), dtype=np.uint8))
withAlpha = ImageWrapper(np.zeros((400, 400, 4), dtype=np.uint8))
mask = ImageWrapper(np.ones((400, 400),dtype = np.uint8)*255)
mask.image_array[0:30, 0:30] = 0
withAlpha.image_array[0:30, 0:30, 3] = 255
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
graph.get_edge_image = Mock(return_value=mask)
donor = InterpolateDonor(graph, 'e', 'f', 'x', (withoutalpha, self.locateFile('tests/videos/sample1.mov')),
(withAlpha, self.locateFile('tests/videos/sample1.mov')))
mask = donor.create(arguments={})
self.assertTrue(np.all(mask.image_array[0:30,0:30] == 255))
self.assertEquals(900,np.sum((mask.image_array/255)))
donor = InterpolateDonor(graph, 'b', 'c', 'x', (withoutalpha, self.locateFile('tests/videos/sample1.mov')),
(withAlpha, self.locateFile('tests/videos/sample1.mov')))
mask = donor.create(arguments={})
self.assertIsNone(mask)
donor = InterpolateDonor(graph, 'b', 'c', 'x', (withAlpha, self.locateFile('tests/videos/sample1.mov')),
(withAlpha, self.locateFile('tests/videos/sample1.mov')))
mask = donor.create(arguments={})
self.assertTrue(np.all(mask.image_array[0:30, 0:30] == 0))
self.assertEquals(159100, np.sum((mask.image_array / 255)))
if __name__ == '__main__':
unittest.main()
| 2.40625
| 2
|
6_1_2.py
|
rursvd/pynumerical2
| 0
|
12780174
|
%matplotlib inline
from numpy import linspace,sqrt
import matplotlib.pyplot as plt
x = linspace(-1,5,50)
y1 = 1.0/sqrt(x**2 + 1)
y2 = 1.0/sqrt(3 * x**2 + 1)
plt.plot(x,y1,label='plot 1')
plt.plot(x,y2,'--',label='plot 2')
plt.legend()
plt.show()
| 3.21875
| 3
|
ubersmith_client/ubersmith_request.py
|
internap/python-ubersmithclient
| 1
|
12780175
|
<filename>ubersmith_client/ubersmith_request.py
# Copyright 2017 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import abstractmethod
from requests import Timeout, ConnectionError
from ubersmith_client import exceptions
class UbersmithRequest(object):
def __init__(self, url, user, password, module, timeout):
self.url = url
self.user = user
self.password = password
self.module = module
self.methods = []
self.timeout = timeout
def __getattr__(self, function):
self.methods.append(function)
return self
@abstractmethod
def __call__(self, **kwargs):
raise AttributeError
def _process_request(self, method, **kwargs):
try:
return method(**kwargs)
except ConnectionError:
raise exceptions.UbersmithConnectionError(self.url)
except Timeout:
raise exceptions.UbersmithTimeout(self.url, self.timeout)
def _build_request_params(self, kwargs):
_methods = '.'.join(self.methods)
kwargs['method'] = '{0}.{1}'.format(self.module, _methods)
@staticmethod
def process_ubersmith_response(response):
if response.status_code < 200 or response.status_code >= 400:
raise exceptions.get_exception_for(status_code=response.status_code)
if response.headers['content-type'] == 'application/json':
response_json = response.json()
if not response_json['status']:
raise exceptions.UbersmithException(response_json['error_code'],
response_json['error_message'])
return response_json['data']
return response.content
| 2.296875
| 2
|
Learning/surface 3D plot radial.py
|
TOLOSAT/gravimetry-payload
| 1
|
12780176
|
#%% -*- coding: utf-8 -*-
"""
Created on Sun Apr 26 02:47:57 2020
plot sherical hermonics in 3D with radial colormap
http://balbuceosastropy.blogspot.com/2015/06/spherical-harmonics-in-python.html
"""
from __future__ import division
import scipy as sci
import scipy.special as sp
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm, colors
#%% ===========================================================================
l = 4 #degree
m = 2 # order
PHI, THETA = np.mgrid[0:2*np.pi:200j, 0:np.pi:100j] #arrays of angular variables
R = np.abs(sp.sph_harm(m, l, PHI, THETA)) #Array with the absolute values of Ylm
"""
THETA = pi/2 - G_Lat*pi/180
PHI = G_Long*pi/180 + pi
R = G_Grid + 50000
"""
#Now we convert to cartesian coordinates
# for the 3D representation
X = R * np.sin(THETA) * np.cos(PHI)
Y = R * np.sin(THETA) * np.sin(PHI)
Z = R * np.cos(THETA)
N = R/R.max() # Normalize R for the plot colors to cover the entire range of colormap.
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'), figsize=(7,5))
im = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=cm.jet(N))
ax.set_title(r'$|Y^2_ 4|$', fontsize=20)
m = cm.ScalarMappable(cmap=cm.jet)
m.set_array(R) # Assign the unnormalized data array to the mappable
#so that the scale corresponds to the values of R
fig.colorbar(m, shrink=0.8);
#%% ===========================================================================
l = 4 # degree
m = 2 # order
PHI, THETA = np.mgrid[0:2*np.pi:200j, 0:np.pi:100j]
R = sp.sph_harm(m, l, PHI, THETA).real
X = R * np.sin(THETA) * np.cos(PHI)
Y = R * np.sin(THETA) * np.sin(PHI)
Z = R * np.cos(THETA)
#As R has negative values, we'll use an instance of Normalize
#see http://stackoverflow.com/questions/25023075/normalizing-colormap-used-by-facecolors-in-matplotlib
norm = colors.Normalize()
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'), figsize=(7,5))
m = cm.ScalarMappable(cmap=cm.jet)
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=cm.jet(norm(R)))
ax.set_title('real$(Y^2_ 4)$', fontsize=20)
m.set_array(R)
fig.colorbar(m, shrink=0.8);
#%% ===========================================================================
l = 4 # degree
m = 2 # order
PHI, THETA = np.mgrid[0:2*np.pi:300j, 0:np.pi:150j]
R = sp.sph_harm(m, l, PHI, THETA).real
s = 1
X = (s*R+1) * np.sin(THETA) * np.cos(PHI)
Y = (s*R+1) * np.sin(THETA) * np.sin(PHI)
Z = (s*R+1) * np.cos(THETA)
norm = colors.Normalize()
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'), figsize=(7,5))
m = cm.ScalarMappable(cmap=cm.jet)
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=cm.terrain(norm(R)))
ax.set_title('1 + real$(Y^2_ 4)$', fontsize=20)
m.set_array(R)
fig.colorbar(m, shrink=0.8);
#%%
| 2.5625
| 3
|
blog_content/models.py
|
paulootavio343/Blog
| 0
|
12780177
|
<filename>blog_content/models.py
from django.db import models
from django.utils.text import slugify
import os
from PIL import Image
from Blog import settings
from django.contrib.auth import get_user_model
User = get_user_model()
class Category(models.Model):
category_name = models.CharField(
max_length=64, unique=True, verbose_name='Nome')
category_slug = models.SlugField(
blank=True, null=True, unique=True, verbose_name='Slug')
def __str__(self):
return self.category_name
def save(self, *args, **kwargs):
if not self.category_slug:
new_slug = slugify(self.category_name)
self.category_slug = new_slug
super().save(*args, **kwargs)
class Meta:
verbose_name = 'Categoria'
verbose_name_plural = 'Categorias'
class Posts(models.Model):
user = models.ForeignKey(
User, on_delete=models.CASCADE, blank=False, null=False, verbose_name='Usuário'
)
post_category = models.ForeignKey(
Category, on_delete=models.CASCADE, blank=False, null=False, verbose_name='Categoria'
)
title = models.CharField(
max_length=64, blank=False, null=False, verbose_name='Título'
)
excerpt = models.CharField(
max_length=255, blank=False, null=False, verbose_name='Excerto'
)
keywords = models.CharField(
max_length=255, blank=False, null=False, verbose_name='Palavras chave'
)
slug = models.SlugField(blank=True, null=True)
content = models.TextField(
blank=False, null=False, verbose_name='Conteúdo'
)
image = models.ImageField(
blank=True, null=True, upload_to='post_img/%Y/%m/%d', verbose_name='Imagem'
)
publication_date = models.DateTimeField(
auto_now_add=True, verbose_name='Publicação'
)
update_date = models.DateTimeField(
auto_now=True, verbose_name='Atualização'
)
published = models.BooleanField(default=False, verbose_name='Publicado')
def __str__(self):
return self.title
def save(self, *args, **kwargs):
if not self.slug:
new_slug = slugify(self.title)
self.slug = new_slug
super().save(*args, **kwargs)
if self.image:
self.resize_image(self.image.name, 800)
@staticmethod
def resize_image(img_name, new_width):
img_path = os.path.join(settings.MEDIA_ROOT, img_name)
img = Image.open(img_path)
width, height = img.size
new_height = round((new_width * height) / width)
if width <= new_width:
img.close()
return
new_img = img.resize((new_width, new_height), Image.ANTIALIAS)
new_img.save(
img_path,
optimize=True,
quality=60
)
new_img.close()
class Meta:
verbose_name = 'Postagem'
verbose_name_plural = 'Postagens'
class Comentaries(models.Model):
post_comment = models.ForeignKey(
Posts, on_delete=models.CASCADE, blank=False, null=False, verbose_name='Post'
)
name = models.CharField(
max_length=64, blank=False, null=False, verbose_name='Nome'
)
email = models.EmailField(
max_length=64, blank=False, null=False, verbose_name='E-mail'
)
comment_title = models.CharField(
max_length=255, blank=False, null=False, verbose_name='Título'
)
message = models.TextField(
blank=False, null=False, verbose_name='Comentário'
)
comment_published = models.BooleanField(
default=False, verbose_name='Publicado'
)
comment_created = models.DateTimeField(
auto_now_add=True, verbose_name='Publicação'
)
comment_updated = models.DateTimeField(
auto_now=True, verbose_name='Atualização'
)
def __str__(self):
return self.message
class Meta:
verbose_name = 'Comentário'
verbose_name_plural = 'Comentários'
| 2.3125
| 2
|
python/dataserver.py
|
chasepd/hackerbox0065
| 2
|
12780178
|
from flask import Flask
from pycoingecko import CoinGeckoAPI
from time import sleep
from threading import Timer
cg = CoinGeckoAPI()
app = Flask(__name__)
coin_data = {}
coins_to_fetch = ["bitcoin", "ethereum", "litecoin", "monero", "dogecoin", "cardano", "tezos", "stellar"]
#Credit for RepeatedTimer class goes to MestreLion from https://stackoverflow.com/questions/474528/what-is-the-best-way-to-repeatedly-execute-a-function-every-x-seconds
class RepeatedTimer(object):
def __init__(self, interval, function, *args, **kwargs):
self._timer = None
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.is_running = False
self.start()
def _run(self):
self.is_running = False
self.start()
self.function(*self.args, **self.kwargs)
def start(self):
if not self.is_running:
self._timer = Timer(self.interval, self._run)
self._timer.start()
self.is_running = True
def stop(self):
self._timer.cancel()
self.is_running = False
def update_coin_data():
prices = cg.get_price(ids=coins_to_fetch, vs_currencies='usd')
for coin in coins_to_fetch:
coin_data[coin] = prices[coin]['usd']
@app.route("/<coin>")
def getPrice(coin):
try:
return str(coin_data[coin])
except KeyError:
return "NULL"
if __name__ == "__main__":
update_coin_data() #get initial values
rt = RepeatedTimer(10, update_coin_data)
app.run('0.0.0.0', 5000)
| 2.9375
| 3
|
moving_zeros_to_the_end.py
|
StefanS97/free-time-fun
| 0
|
12780179
|
def move_zeros(array):
helper_array = []
zeros_counter = 0
helper_zeros = 0
for i in range(len(array)):
if array[i] == 0:
zeros_counter += 1
else:
helper_array.append(array[i])
if zeros_counter > 0:
while helper_zeros < zeros_counter:
helper_array.append(0)
helper_zeros += 1
return helper_array
| 3.65625
| 4
|
examples/src/main/python/download_http.py
|
apache/airavata-mft
| 7
|
12780180
|
import grpc
import MFTApi_pb2
import MFTApi_pb2_grpc
channel = grpc.insecure_channel('localhost:7004')
stub = MFTApi_pb2_grpc.MFTApiServiceStub(channel)
download_request = MFTApi_pb2.HttpDownloadApiRequest(sourceStoreId ="remote-ssh-storage",
sourcePath= "/tmp/a.txt",
sourceToken = "<PASSWORD>",
sourceType= "SCP",
targetAgent = "agent0",
mftAuthorizationToken = "")
result = stub.submitHttpDownload(download_request)
print(result)
## Sample output ##
# url: "http://localhost:3333/53937f40-d545-4180-967c-ddb193d672d8"
# targetAgent: "agent0"
| 2.40625
| 2
|
srcgen.py
|
emlynoregan/sutlstudio
| 0
|
12780181
|
import json
from decl import Decl, Dist
from google.appengine.ext import ndb
import webapp2
from google.appengine.api import users
import logging
class SrcGenBase(webapp2.RequestHandler):
def RequiredUser(self):
return None
def ProcessSrcGen(self, aUser):
raise Exception("ProcessSrcGen Not Overridden")
def get(self, *args):
self.process(*args)
def process(self, *args):
lresponseMessage = None
logging.debug("In Class: %s" % self.__class__.__name__)
try:
lgoogleUser = users.get_current_user()
lrequiredUserId = self.RequiredUser()
if not lgoogleUser and lrequiredUserId:
self.response.status = 401
lresponseMessage = "User required"
elif lrequiredUserId and (lgoogleUser.user_id() != lrequiredUserId):
self.response.status = 304
lresponseMessage = "User not authorised (%s,%s)" % (lgoogleUser.user_id(), lrequiredUserId)
else:
logging.debug("User: %s" % lgoogleUser)
lresponseMessage = self.ProcessSrcGen(lgoogleUser)
except Exception, ex:
logging.exception("Error in %s.post" % self.__class__.__name__)
self.response.status = 500
lresponseMessage = unicode(ex)
logging.debug("Leaving SrcGenBase.process (%s): %s" % (self.response.status, lresponseMessage))
if lresponseMessage:
self.response.out.write(lresponseMessage)
@classmethod
def GetAPIPath(cls):
raise Exception("Not Implemented")
class SrcGenDecl(SrcGenBase):
def GetDeclId(self):
return self.request.get("id")
# def GetUserId(self):
# return self.request.get("userid")
def GetDecl(self):
lid = self.GetDeclId()
#luserId = self.GetUserId()
ldecl = Decl.GetById(lid)
return ldecl
def RequiredUser(self):
ldecl = self.GetDecl()
return ldecl.user_id if ldecl and not ldecl.published else None
def ProcessSrcGen(self, aUser):
ldecl = self.GetDecl()
if not ldecl:
self.response.status = 404
return "Decl not found"
else:
self.response.headers['Content-Type'] = 'application/json'
return json.dumps(ldecl.to_decljson(), indent=4, sort_keys=True)
@classmethod
def GetAPIPath(cls):
return "/srcgen/decl"
class SrcGenDist(SrcGenBase):
def GetDistId(self):
return self.request.get("id")
def GetPublishedOnly(self):
return self.request.get("publishedonly")
def GetDist(self):
lid = self.GetDistId()
#luserId = self.GetUserId()
ldist = Dist.GetById(lid)
return ldist
def RequiredUser(self):
ldist = self.GetDist()
return ldist.user_id if ldist and not ldist.published else None
def ProcessSrcGen(self, aUser):
ldist = self.GetDist()
if not ldist:
self.response.status = 404
return "Dist not found"
else:
self.response.headers['Content-Type'] = 'application/json'
luserId = (aUser if isinstance(aUser, basestring) else aUser.user_id()) if aUser else None
ldecls = ldist.GetAllDeclsForAncestorTransitive(aUser, self.GetPublishedOnly() or not luserId or (ldist.user_id != luserId))
ldeclsSource = [ldecl.to_decljson() for ldecl in ldecls]
return json.dumps(ldeclsSource, indent=2)
@classmethod
def GetAPIPath(cls):
return "/srcgen/dist"
class SrcGenDistLib(SrcGenBase):
def GetDistId(self):
return self.request.get("id")
def GetLibOnly(self):
return self.request.get("libonly")
def GetDist(self):
lid = self.GetDistId()
#luserId = self.GetUserId()
ldist = Dist.GetById(lid)
return ldist
def RequiredUser(self):
ldist = self.GetDist()
return ldist.user_id if ldist and not ldist.published else None
def ProcessSrcGen(self, aUser):
ldist = self.GetDist()
if not ldist:
self.response.status = 404
return "Dist not found"
else:
self.response.headers['Content-Type'] = 'application/json'
llibdecls = ldist.GetLibDecls(aUser, self.GetLibOnly())
#ldeclsSource = [ldecl.to_decljson() for ldecl in llibdecls]
return json.dumps(llibdecls, indent=2)
@classmethod
def GetAPIPath(cls):
return "/srcgen/distlib"
| 2.421875
| 2
|
pyarc/test/test_comparable_itemset.py
|
jirifilip/CBA
| 19
|
12780182
|
<gh_stars>10-100
import unittest
from pyarc.data_structures import (
Item,
Antecedent,
ComparableItemSet,
Transaction
)
class TestComparableItemSet(unittest.TestCase):
def test_compare(self):
row1 = [1, 1, 0]
header1 = ["A", "B", "C"]
transaction1 = Transaction(row1, header1, ("Class", 0))
item1 = Item("A", 1)
item2 = Item("B", 1)
item3 = Item("C", 0)
item4 = Item("B", 5)
ant1 = Antecedent([item1, item2])
ant2 = Antecedent([item2])
ant3 = Antecedent([item3])
ant4 = Antecedent([item4])
assert ant1 <= transaction1
assert ant2 <= transaction1
assert ant3 <= transaction1
self.assertFalse(ant4 <= transaction1)
assert transaction1 >= ant1
assert transaction1 >= ant2
assert transaction1 >= ant3
| 3.09375
| 3
|
analysis/lib/stats/blueprint.py
|
astutespruce/secas-blueprint
| 0
|
12780183
|
<reponame>astutespruce/secas-blueprint<gh_stars>0
from pathlib import Path
import numpy as np
import rasterio
from analysis.constants import BLUEPRINT, INPUT_AREA_VALUES, ACRES_PRECISION, M2_ACRES
from analysis.lib.raster import (
detect_data,
boundless_raster_geometry_mask,
extract_count_in_geometry,
summarize_raster_by_geometry,
)
src_dir = Path("data/inputs")
blueprint_filename = src_dir / "se_blueprint2021.tif"
bp_inputs_filename = src_dir / "input_areas.tif"
bp_inputs_mask_filename = src_dir / "input_areas_mask.tif"
def extract_by_geometry(geometries, bounds):
"""Calculate the area of overlap between geometries and Blueprint grids.
NOTE: Blueprint and inputs are on the same grid
Parameters
----------
geometries : list-like of geometry objects that provide __geo_interface__
bounds : list-like of [xmin, ymin, xmax, ymax]
Returns
-------
dict or None (if does not overlap Blueprint data)
{"shape_mask": <shape_mask_area>, "blueprint": [...], ...}
"""
# prescreen to make sure data are present
with rasterio.open(bp_inputs_mask_filename) as src:
if not detect_data(src, geometries, bounds):
return None
results = {}
# create mask and window
with rasterio.open(blueprint_filename) as src:
shape_mask, transform, window = boundless_raster_geometry_mask(
src, geometries, bounds, all_touched=False
)
# square meters to acres
cellsize = src.res[0] * src.res[1] * M2_ACRES
# DEBUG:
# print(
# f"Memory of shape mask: {shape_mask.size * shape_mask.itemsize / (1024 * 1024):0.2f} MB",
# shape_mask.dtype,
# )
results = {
"shape_mask": (
((~shape_mask).sum() * cellsize)
.round(ACRES_PRECISION)
.astype("float32")
.round(ACRES_PRECISION)
.astype("float32")
)
}
# Nothing in shape mask, return None
if results["shape_mask"] == 0:
return None
blueprint_counts = extract_count_in_geometry(
blueprint_filename,
shape_mask,
window,
np.arange(len(BLUEPRINT)),
boundless=True,
)
results["blueprint"] = (
(blueprint_counts * cellsize).round(ACRES_PRECISION).astype("float32")
)
bp_input_counts = extract_count_in_geometry(
bp_inputs_filename,
shape_mask,
window,
bins=range(0, len(INPUT_AREA_VALUES)),
boundless=True,
)
results["inputs"] = (
(bp_input_counts * cellsize).round(ACRES_PRECISION).astype("float32")
)
return results
def summarize_by_unit(geometries, out_dir):
"""Summarize by HUC12 or marine lease block
Parameters
----------
geometries : Series of pygeos geometries, indexed by HUC12 / marine lease block id
out_dir : str
"""
summarize_raster_by_geometry(
geometries,
extract_by_geometry,
outfilename=out_dir / "blueprint.feather",
progress_label="Summarizing Blueprint and Input Areas",
)
| 2.359375
| 2
|
tmp/app.py
|
ArvisP/Arvis-Rubix
| 2
|
12780184
|
from flask import Flask, render_template, request, session, redirect, url_for
from models import db, User#, Places
from forms import SignupForm, LoginForm
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:edzh@localhost:5432/rubix'
db.init_app(app)
app.secret_key = "development-key"
@app.route('/')
def index():
return render_template('index.html')
@app.route('/about')
def about():
return render_template("about.html")
@app.route('/profile')
def profile():
return render_template('profile.html')
@app.route('/learnmore')
def learnmore():
return render_template('learnmore.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
# Disable access to login page if user is already logged in.
if 'email' in session:
return redirect(url_for('home'))
form = SignupForm()
# Checks if form fields are filled
# if it is, create a new user with provided credentials
if request.method == 'POST':
if form.validate() == False:
return render_template('signup.html', form=form)
else:
newuser = User(form.first_name.data, form.last_name.data, form.email.data, form.password.data)
db.session.add(newuser)
db.session.commit()
session['email'] = newuser.email
return redirect(url_for('home'))
elif request.method == 'GET':
return render_template('signup.html', form=form)
@app.route('/home')
def home():
if 'email' not in session:
return redirect(url_for('login'))
return render_template('home.html')
# Route to the Login Page
@app.route('/login', methods=['GET', 'POST'])
def login():
# Disable access to login page if user is already logged in.
if 'email'in session:
return redirect(url_for('home'))
form = LoginForm()
if request.method == 'POST':
# Checks if form fields are filled
if form.validate() == False:
return render_template('login.html', form=form)
else:
email = form.email.data
password = form.password.data
user = User.query.filter_by(email=email).first()
# If user exists and password is correct
# Create new session
if user is not None and user.check_password(password):
session['email'] = form.email.data
return redirect(url_for('home'))
else:
return redirect(url_for('login'))
elif request.method == 'GET':
return render_template('login.html', form=form)
@app.route('/logout')
def logout():
session.pop('email', None)
return redirect(url_for('index'))
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == "__main__":
app.run(debug=True)
| 3.15625
| 3
|
klimalogger/data_builder.py
|
wuan/klimalogger
| 5
|
12780185
|
<filename>klimalogger/data_builder.py
import socket
import datetime
import pytz
from injector import singleton, inject
from .config import Config
@singleton
class DataBuilder(object):
@inject
def __init__(self, configuration: Config):
self.location = configuration.client_location_name
self.host_name = configuration.client_host_name
self.timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC).isoformat()
self.data = []
def add(self, sensor: str, measurement_type: str, measurement_unit: str, measurement_value: str,
is_calculated: bool = False):
if measurement_value is not None:
self.data += [self.create(sensor, measurement_type, measurement_unit, measurement_value, is_calculated)]
def create(self, sensor: str, measurement_type: str, measurement_unit: str, measurement_value: str,
is_calculated: bool = False):
return {
"measurement": "data",
"tags": {
"host": self.host_name,
"location": self.location,
"type": measurement_type,
"unit": measurement_unit,
"sensor": sensor,
"calculated": is_calculated
},
"time": self.timestamp,
"fields": {
"value": measurement_value
}
}
| 2.53125
| 3
|
2020/day09.py
|
zzl0/aoc
| 2
|
12780186
|
from utils import *
def search(counter, target):
for a in counter:
b = target - a
if (a == b and counter[b] >= 2) or (a != b and counter[b]):
return True
return False
def day9_1(nums):
counter, i = Counter(nums[:25]), 0
for j in range(25, len(nums)):
if not search(counter, nums[j]):
return nums[j]
counter[nums[j]] += 1
counter[nums[i]] -= 1
i += 1
def day9_2(nums):
i, j, s = 0, 0, 0
target = 57195069
while j < len(nums):
s += nums[j]
while s > target:
s -= nums[i]
i += 1
if s == target and i != j:
break
j += 1
arr = nums[i: j+1]
return min(arr) + max(arr)
if __name__ == "__main__":
nums = data(9, int)
print(f'day9_1: {day9_1(nums)}')
print(f'day9_2: {day9_2(nums)}')
# day9_1: 57195069
# day9_2: 7409241
# python3 day09.py 0.08s user 0.01s system 95% cpu 0.092 total
| 3.296875
| 3
|
year/2020/04/passport_validator.py
|
nbalas/advent_of_code
| 0
|
12780187
|
<filename>year/2020/04/passport_validator.py
from operator import methodcaller
from logs.setup_logs import init_logs
from readers.file_reader import FileReader
import re
logger = init_logs(__name__)
VALID_EYE_COLORS = ["amb", "blu", "brn", "gry", "grn", "hzl", "oth"]
REQUIRED_FIELDS = {
"byr": lambda x: 1920 <= int(x) <= 2002,
"iyr": lambda x: 2010 <= int(x) <= 2020,
"eyr": lambda x: 2020 <= int(x) <= 2030,
"hgt": lambda x: 150 <= int(x[:len(x)-2]) <= 193 if "cm" in x else 59 <= int(x[:len(x)-2]) <= 76 if re.search('in|cm', x) != None else False,
"hcl": lambda x: re.search('^#[0-9a-f]{6}$', x) is not None,
"ecl": lambda x: x in VALID_EYE_COLORS,
"pid": lambda x: re.search('^[0-9]{9}$', x) is not None
}
OPTIONAL_FIELDS = ["cid"]
def main():
passports = format_passports(FileReader.read_input_as_string())
valid_passports = list(filter(validate_passport, passports))
logger.info("There are {} valid passports.".format(len(valid_passports)))
def format_passports(raw_input):
raw_passports = raw_input.split("\n\n")
parsed_passports = list(map(lambda s: re.split(' |\n', s), raw_passports))
formatted_passports = list(map(lambda p: dict(map(methodcaller('split', ':'), p)), parsed_passports))
return formatted_passports
def validate_passport(passport):
logger.debug("Processing passport {}".format(passport))
for field in REQUIRED_FIELDS.keys():
if field not in passport or not REQUIRED_FIELDS[field](passport[field]):
logger.info("Passport is invalid! Missing or invalid required field {}".format(field))
return False
logger.info("Passport is valid!")
return True
if __name__ == '__main__':
main()
| 2.9375
| 3
|
src/tt_storage/tt_storage/models.py
|
serhii73/the-tale
| 0
|
12780188
|
import uuid
from django.db import models
from . import conf
class Item(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
owner = models.PositiveIntegerField(db_index=True)
storage = models.IntegerField(default=0)
data = models.JSONField(default=dict)
base_type = models.CharField(max_length=conf.ITEM_TYPE_LENGTH)
full_type = models.CharField(max_length=conf.ITEM_TYPE_LENGTH)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
db_table = 'items'
class LogRecord(models.Model):
id = models.BigAutoField(primary_key=True)
transaction = models.UUIDField(default=uuid.uuid4)
item = models.UUIDField()
type = models.IntegerField()
data = models.JSONField(default=dict)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = 'log_records'
index_together = [('item', 'created_at')]
| 2.265625
| 2
|
freyr_app/core/processing/sentiment.py
|
blanchefort/freyrmonitoring
| 2
|
12780189
|
<filename>freyr_app/core/processing/sentiment.py
import os
from typing import List
import torch
from transformers import BertTokenizer, BertForSequenceClassification
from django.conf import settings
from ..models import categories
from ..nn import CategoryClassifier
from .nlp import preprocess_text
class Sentimenter:
"""Класс для быстрого определния сентимента батча текстов
"""
def __init__(self, model_type='rubert-base-cased-sentiment'):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
self.device = torch.device(device)
self.model_path = settings.ML_MODELS
self.tokenizer = BertTokenizer.from_pretrained(f'{self.model_path}/{model_type}')
self.model = BertForSequenceClassification.from_pretrained(f'{self.model_path}/{model_type}')
self.model.to(self.device)
self.model.eval()
@torch.no_grad()
def __call__(self, texts: List[str]) -> List[int]:
texts = list(map(preprocess_text, texts))
result_labels = []
for batch in range(0, len(texts), settings.BATCH_SIZE):
inputs = self.tokenizer(
texts[batch:batch+settings.BATCH_SIZE],
padding=True,
truncation=True,
max_length=512,
return_tensors='pt')
logits = self.model(**inputs.to(self.device))[0]
probabilities = torch.softmax(logits, dim=1).to('cpu')
labels = torch.argmax(probabilities, dim=1)
result_labels.extend(labels.tolist())
return result_labels
| 2.109375
| 2
|
djnic/web/views_dominio.py
|
avdata99/nic
| 8
|
12780190
|
<filename>djnic/web/views_dominio.py<gh_stars>1-10
from django.conf import settings
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page, cache_control
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from core.views import AnalyticsViewMixin
from dominios.models import Dominio, STATUS_DISPONIBLE
from cambios.data import get_ultimos_caidos
from dominios.data import (get_ultimos_registrados, get_judicializados,
get_primeros_registrados, get_futuros,
get_por_caer)
class DominioView(AnalyticsViewMixin, DetailView):
model = Dominio
context_object_name = "dominio"
template_name = "web/bootstrap-base/dominio.html"
def get_object(self):
return Dominio.objects.get(uid=self.kwargs['uid'])
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['site_title'] = f'Dominio {self.object.full_domain()}'
context['site_description'] = f'Datos del Dominio {self.object.full_domain()}'
context['estado'] = 'Disponible' if self.object.estado == STATUS_DISPONIBLE else 'No disponible'
# ordenar los cambios
cambios = self.object.cambios.prefetch_related('campos').order_by('-momento')
# Por más que NIC lo haya publicado no es de nuestro interes publicar
# algunos datos persomnales
campos_a_ocultar = [
"admin_cp", "admin_domicilio", "admin_fax", "admin_tel", "reg_cp",
"reg_documento", "reg_domicilio", "reg_domicilio_exterior", "reg_fax",
"reg_fax_exterior", "reg_telefono", "reg_telefono_exterior",
"registrant_legal_uid", "resp_cp", "resp_domicilio", "resp_fax",
"resp_telefono", "tech_cp", "tech_domicilio", "tech_fax", "tech_telefono"
]
ncambios = []
for cambio in cambios:
chg = {
'have_changes': cambio.have_changes,
'momento': cambio.momento,
'campos_cambiados': []
}
if cambio.have_changes:
for campo in cambio.campos.all():
campo_dict = {
'campo': campo.campo,
'anterior': campo.anterior,
'nuevo': campo.nuevo
}
if campo.campo in campos_a_ocultar:
campo_dict['anterior'] = '[protegido]'
campo_dict['nuevo'] = '[protegido]'
chg['campos_cambiados'].append(campo_dict)
ncambios.append(chg)
context['cambios'] = ncambios
return context
class UltimosCaidos(AnalyticsViewMixin, TemplateView):
template_name = "web/bootstrap-base/dominios/ultimos-caidos.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['site_title'] = 'Ultimos dominios caidos'
context['site_description'] = 'Lista de los últimos dominios caidos'
context['ultimos_caidos'] = get_ultimos_caidos(limit=500)
# limit for non logged users
if not self.request.user.is_authenticated:
context['ultimos_caidos'] = context['ultimos_caidos'][:5]
return context
class UltimosRegistrados(AnalyticsViewMixin, TemplateView):
template_name = "web/bootstrap-base/dominios/ultimos-registrados.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['site_title'] = 'Ultimos dominios caidos'
context['site_description'] = 'Lista de los últimos dominios caidos'
context['ultimos_registrados'] = get_ultimos_registrados(limit=500)
# limit for non logged users
if not self.request.user.is_authenticated:
context['ultimos_registrados'] = context['ultimos_registrados'][:5]
return context
class DominiosAntiguosView(AnalyticsViewMixin, TemplateView):
template_name = "web/bootstrap-base/dominios/antiguos.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['site_title'] = 'Ultimos dominios caidos'
context['site_description'] = 'Lista de los últimos dominios caidos'
context['dominios'] = get_primeros_registrados(limit=500)
# limit for non logged users
if not self.request.user.is_authenticated:
context['dominios'] = context['dominios'][:5]
return context
class Judicializados(AnalyticsViewMixin, TemplateView):
template_name = "web/bootstrap-base/dominios/judicializados.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['site_title'] = 'Dominios judicializados'
context['site_description'] = 'Lista de los dominios vencidos sin caer'
context['dominios'] = get_judicializados(limit=500)
# limit for non logged users
if not self.request.user.is_authenticated:
context['dominios'] = context['dominios'][:5]
return context
class DominiosVencimientoLargoView(AnalyticsViewMixin, TemplateView):
""" Dominios que vencen más en el futuro """
template_name = "web/bootstrap-base/dominios/futuros.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['site_title'] = 'Dominios futuros'
context['site_description'] = 'Dominios que vencen más en el futuro'
context['dominios'] = get_futuros(limit=500)
# limit for non logged users
if not self.request.user.is_authenticated:
context['dominios'] = context['dominios'][:5]
return context
class PorCaerView(AnalyticsViewMixin, TemplateView):
template_name = "web/bootstrap-base/dominios/por-caer.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['site_title'] = 'Dominios apunto de caer'
context['site_description'] = 'Dominios vencidos y listos para liberarse'
context['por_caer'] = get_por_caer(limit=500)
# limit for non logged users
if not self.request.user.is_authenticated:
context['por_caer'] = context['por_caer'][:5]
return context
| 1.921875
| 2
|
_notebooks/make_notebooks.py
|
DTUWindEnergy/TopFarm2
| 4
|
12780191
|
import os
import json
import pprint
import shutil
from _notebooks.notebook import Notebook
from topfarm.easy_drivers import EasyDriverBase
# def get_cells(nb):
# cells = []
# for cell in nb['cells']:
# if cell['cell_type'] == 'code' and len(cell['source']) > 0 and '%%include' in cell['source'][0]:
# cells.extend(load_notebook(cell['source'][0].replace('%%include', '').strip())['cells'])
# else:
# cells.append(cell)
# return cells
#
# def load_notebook(f):
# with open(f) as fid:
# nb = json.load(fid)
#
# nb['cells'] = get_cells(nb)
# return nb
#
#
# def save_notebook(nb, f):
# with open(f, 'w') as fid:
# json.dump(nb, fid, indent=4)
# # fid.write(pprint.pformat(nb))
def make_tutorials():
path = os.path.dirname(__file__) + "/templates/"
for f in [f for f in os.listdir(path) if f.endswith('.ipynb')]:
nb = Notebook(path + f)
nb.replace_include_tag()
nb.save(os.path.dirname(__file__) + "/../tutorials/" + f)
# with open(os.path.dirname(__file__) + "/../tutorials/" + f, 'w') as fid:
# json.dump(nb, fid)
def doc_header(name):
nb = Notebook(os.path.dirname(__file__) + "/elements/doc_setup.ipynb")
nb.cells[0]['source'][0] = nb.cells[0]['source'][0].replace('[name]', name)
return nb.cells
def make_doc_notebooks(notebooks):
src_path = os.path.dirname(__file__) + "/elements/"
dst_path = os.path.dirname(__file__) + "/../docs/notebooks/"
if os.path.isdir(dst_path):
try:
shutil.rmtree(dst_path)
except PermissionError:
pass
os.makedirs(dst_path, exist_ok=True)
for name in notebooks:
nb = Notebook(src_path + name + ".ipynb")
t = '[Try this yourself](https://colab.research.google.com/github/DTUWindEnergy/TopFarm2/blob/master/docs/notebooks/%s.ipynb) (requires google account)'
nb.insert_markdown_cell(1, t % name)
code = """%%capture
# Install Topfarm if needed
import importlib
if not importlib.util.find_spec("topfarm"):
!pip install topfarm
"""
if not name in ['loads', 'wake_steering_and_loads', 'layout_and_loads']:
nb.insert_code_cell(2, code)
nb.save(dst_path + name + ".ipynb")
def check_notebooks(notebooks=None):
import matplotlib.pyplot as plt
def no_show(*args, **kwargs):
pass
plt.show = no_show # disable plt show that requires the user to close the plot
path = os.path.dirname(__file__) + "/elements/"
if notebooks is None:
notebooks = [f for f in os.listdir(path) if f.endswith('.ipynb')]
else:
notebooks = [f + '.ipynb' for f in notebooks]
for f in notebooks:
nb = Notebook(path + f)
nb.check_code()
nb.check_links()
if __name__ == '__main__':
notebooks = ['constraints', 'cost_models', 'drivers', 'loads', 'problems',
'roads_and_cables', 'wake_steering_and_loads', 'layout_and_loads',
'bathymetry',]
notebooks.remove('wake_steering_and_loads')
notebooks.remove('loads')
check_notebooks(notebooks)
make_doc_notebooks(notebooks)
print('Done')
| 2.375
| 2
|
cobi_loss.py
|
JungHeeKim29/DiffHDRsyn
| 15
|
12780192
|
<reponame>JungHeeKim29/DiffHDRsyn
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models.vgg as vgg
from collections import namedtuple
class ContextualBilateralLoss(nn.Module):
"""
Creates a criterion that measures the contextual bilateral loss.
Parameters
---
weight_sp : float, optional
a balancing weight between spatial and feature loss.
band_width : int, optional
a band_width parameter described as :math:`h` in the paper.
use_vgg : bool, optional
if you want to use VGG feature, set this `True`.
vgg_layer : str, optional
intermidiate layer name for VGG feature.
Now we support layer names:
`['relu1_2', 'relu2_2', 'relu3_4', 'relu4_4', 'relu5_4']`
"""
def __init__(self,
weight_sp: float = 0.1,
band_width: float = 0.5,
loss_type: str = 'cosine',
use_vgg: bool = True,
vgg_layer: str = 'relu3_4',
device: int = 1,
patch_size : int= 8):
super(ContextualBilateralLoss, self).__init__()
self.band_width = band_width
self.device = device
self.patch_size = patch_size
if use_vgg:
self.vgg_model = VGG19().cuda(device = device)
self.vgg_layer = vgg_layer
self.register_buffer(
name='vgg_mean',
tensor=torch.tensor(
[[[0.485]], [[0.456]], [[0.406]]], requires_grad=False)
)
self.register_buffer(
name='vgg_std',
tensor=torch.tensor(
[[[0.229]], [[0.224]], [[0.225]]], requires_grad=False)
)
def forward(self, x, y):
if hasattr(self, 'vgg_model'):
assert x.shape[1] == 3 and y.shape[1] == 3,\
'VGG model takes 3 chennel images.'
# normalization
x = x.sub(self.vgg_mean.detach().cuda(self.device))\
.div(self.vgg_std.detach().cuda(self.device))
y = y.sub(self.vgg_mean.detach().cuda(self.device))\
.div(self.vgg_std.detach().cuda(self.device))
# picking up vgg feature maps
x = getattr(self.vgg_model(x), self.vgg_layer)
y = getattr(self.vgg_model(y), self.vgg_layer)
output = self.contextual_bilateral_loss(x,y, self.band_width)
else :
x_patch = x.unfold(1,3,3).unfold(2, self.patch_size,self.patch_size)\
.unfold(3, self.patch_size, self.patch_size)
x_patch = x_patch.reshape([-1,self.patch_size, self.patch_size])
x_patch = x_patch.unsqueeze(0)
y_patch = y.unfold(1,3,3).unfold(2,self.patch_size,self.patch_size)\
.unfold(3,self.patch_size,self.patch_size)
y_patch = y_patch.reshape([-1,self.patch_size, self.patch_size])
y_patch = y_patch.unsqueeze(0)
output = self.contextual_bilateral_loss(x_patch, y_patch,
self.band_width)
return output
# TODO: Operation check
def contextual_bilateral_loss(self,
x: torch.Tensor,
y: torch.Tensor,
weight_sp: float = 0.1,
band_width: float = 0.5,
loss_type: str = 'cosine'):
"""
Computes Contextual Bilateral (CoBi) Loss between x and y,
proposed in https://arxiv.org/pdf/1905.05169.pdf.
Parameters
---
x : torch.Tensor
features of shape (N, C, H, W).
y : torch.Tensor
features of shape (N, C, H, W).
band_width : float, optional
a band-width parameter used to convert distance to similarity.
in the paper, this is described as :math:`h`.
loss_type : str, optional
a loss type to measure the distance between features.
Note: `l1` and `l2` frequently raises OOM.
Returns
---
cx_loss : torch.Tensor
contextual loss between x and y (Eq (1) in the paper).
k_arg_max_NC : torch.Tensor
indices to maximize similarity over channels.
"""
assert x.size() == y.size(), 'input tensor must have the same size.'
# spatial loss
grid = self.compute_meshgrid(x.shape).to(self.device)
dist_raw = self.compute_l2_distance(grid, grid)
dist_tilde = self.compute_relative_distance(dist_raw)
cx_sp = self.compute_cx(dist_tilde, band_width)
# feature loss
if loss_type == 'cosine':
dist_raw = self.compute_cosine_distance(x, y)
elif loss_type == 'l1':
dist_raw = self.compute_l1_distance(x, y)
elif loss_type == 'l2':
dist_raw = self.compute_l2_distance(x, y)
dist_tilde = self.compute_relative_distance(dist_raw)
cx_feat = self.compute_cx(dist_tilde, band_width)
# combined loss
cx_combine = (1. - weight_sp) * cx_feat + weight_sp * cx_sp
k_max_NC, _ = torch.max(cx_combine, dim=2, keepdim=True)
cx = k_max_NC.mean(dim=1)
cx_loss = torch.mean(-torch.log(cx + 1e-5))
return cx_loss
def compute_cx(self, dist_tilde, band_width):
w = torch.exp((1 - dist_tilde) / band_width) # Eq(3)
cx = w / torch.sum(w, dim=2, keepdim=True) # Eq(4)
return cx
def compute_relative_distance(self, dist_raw):
dist_min, _ = torch.min(dist_raw, dim=2, keepdim=True)
dist_tilde = dist_raw / (dist_min + 1e-5)
return dist_tilde
def compute_cosine_distance(self, x, y):
# mean shifting by channel-wise mean of `y`.
y_mu = y.mean(dim=(0, 2, 3), keepdim=True)
x_centered = x - y_mu
y_centered = y - y_mu
# L2 normalization
x_normalized = F.normalize(x_centered, p=2, dim=1)
y_normalized = F.normalize(y_centered, p=2, dim=1)
# channel-wise vectorization
N, C, *_ = x.size()
x_normalized = x_normalized.reshape(N, C, -1) # (N, C, H*W)
y_normalized = y_normalized.reshape(N, C, -1) # (N, C, H*W)
# consine similarity
cosine_sim = torch.bmm(x_normalized.transpose(1, 2),
y_normalized) # (N, H*W, H*W)
# convert to distance
dist = 1 - cosine_sim
return dist
# TODO: Considering avoiding OOM.
def compute_l1_distance(self, x: torch.Tensor, y: torch.Tensor):
N, C, H, W = x.size()
x_vec = x.view(N, C, -1)
y_vec = y.view(N, C, -1)
dist = x_vec.unsqueeze(2) - y_vec.unsqueeze(3)
dist = dist.sum(dim=1).abs()
dist = dist.transpose(1, 2).reshape(N, H*W, H*W)
dist = dist.clamp(min=0.)
return dist
# TODO: Considering avoiding OOM.
def compute_l2_distance(self, x, y):
N, C, H, W = x.size()
x_vec = x.view(N, C, -1)
y_vec = y.view(N, C, -1)
x_s = torch.sum(x_vec ** 2, dim=1)
y_s = torch.sum(y_vec ** 2, dim=1)
A = y_vec.transpose(1, 2) @ x_vec
dist = y_s - 2 * A + x_s.transpose(0, 1)
dist = dist.transpose(1, 2).reshape(N, H*W, H*W)
dist = dist.clamp(min=0.)
return dist
def compute_meshgrid(self, shape):
N, C, H, W = shape
rows = torch.arange(0, H, dtype=torch.float32) / (H + 1)
cols = torch.arange(0, W, dtype=torch.float32) / (W + 1)
feature_grid = torch.meshgrid(rows, cols)
feature_grid = torch.stack(feature_grid).unsqueeze(0)
feature_grid = torch.cat([feature_grid for _ in range(N)], dim=0)
return feature_grid
class VGG19(nn.Module):
def __init__(self, requires_grad=False):
super(VGG19, self).__init__()
vgg_pretrained_features = vgg.vgg19(pretrained=True).features
self.slice1 = nn.Sequential()
self.slice2 = nn.Sequential()
self.slice3 = nn.Sequential()
self.slice4 = nn.Sequential()
for x in range(4):
self.slice1.add_module(str(x), vgg_pretrained_features[x])
for x in range(4, 9):
self.slice2.add_module(str(x), vgg_pretrained_features[x])
for x in range(9, 18):
self.slice3.add_module(str(x), vgg_pretrained_features[x])
for x in range(18, 27):
self.slice4.add_module(str(x), vgg_pretrained_features[x])
if not requires_grad:
for param in self.parameters():
param.requires_grad = False
def forward(self, X):
h = self.slice1(X)
h_relu1_2 = h
h = self.slice2(h)
h_relu2_2 = h
h = self.slice3(h)
h_relu3_4 = h
h = self.slice4(h)
h_relu4_4 = h
vgg_outputs = namedtuple(
"VggOutputs", ['relu1_2', 'relu2_2','relu3_4', 'relu4_4'])
out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_4, h_relu4_4)
return out
| 2.4375
| 2
|
treefort.py
|
tedder/priceonomics-sandbox
| 0
|
12780193
|
<reponame>tedder/priceonomics-sandbox<gh_stars>0
#!/usr/bin/env python3
import csv
import sys
import collections
import statistics
cityvals = collections.defaultdict(list)
# notes:
# - data seems to be really clean, actually. after normalizing the case of the city/state they came together.
# - how to represent large cities? (e.g., Manhattan vs New York City, Pasadena vs Los Angeles)
# * leaving alone
# * alternative: collect into census MSAs
# - some outliers with zero reviews- eg a $10000 tree fort in Park City. They could be excluded but where's the cutoff? Perhaps < 2 reviews?
# - some outliers, like Indianapolis and Austin. Austin is an outlier for the number of entries, and Indianapolis is an outlier for the median price.
with open(sys.argv[1]) as csvfile:
r = csv.DictReader(csvfile, fieldnames=['id', 'city', 'state', 'price', 'reviews'])
r.__next__() # skip header
for row in r:
intprice = round(float(row['price']))
# ensure we have the correct number of columns
if len(row.items()) != 5 or intprice < 1 or len(row['city']) == 0 or len(row['state']) == 0:
print('this seems to be a bogus row: {}'.format(' - '.join((row['city'], row['price'], row['reviews']))))
# quick code to exclude entries with less than two reviews
#if round(float(row['reviews'])) < 2:
# continue
# making a composite key so we don't end up counting Portland Maine in the Portland Oregon bucket.
citystate = '{}--{}'.format(row['city'].title(), row['state'].upper())
cityvals[citystate].append(intprice)
citymedians = collections.defaultdict(list)
for row in sorted(cityvals.items(), key=lambda x: len(x[1]), reverse=True)[:100]:
citymedians[row[0]] = (round(statistics.median(row[1])), len(row[1]), round(statistics.pstdev(row[1])))
print(','.join(('city', 'state', 'price median', 'listings', 'price stddev')))
for row in sorted(citymedians.items(), key=lambda x: x[1], reverse=True):
splitcity = row[0].split('--')
# weird/hacky looking code here because row[1] is a list.
print(','.join(splitcity) + ',' + ','.join([str(x) for x in row[1]]))
| 2.953125
| 3
|
src/kestrue/naturals.py
|
brunolange/kestrue
| 0
|
12780194
|
<gh_stars>0
from functools import reduce
zero = lambda f: lambda x: x
one = lambda f: lambda x: f(x)
two = lambda f: lambda x: f(f(x))
three = lambda f: lambda x: f(f(f(x)))
four = lambda f: lambda x: f(f(f(f(x))))
five = lambda f: lambda x: f(f(f(f(f(x)))))
r"""
succ := \n.\fx.f(nfx)
>>> succ(2)
n -> \gy.g(gy)
-> fx.f(f(f(x))) # 3
"""
succ = lambda nat: lambda f: lambda x: f(nat(f)(x))
def to_int(nat):
"""
>>> to_int(lambda f: lambda x: x)
0
>>> to_int(lambda f: lambda x: f(x))
1
>>> to_int(lambda f: lambda x: f(f(f(f(f(f(f(x))))))))
7
"""
return nat(lambda acc: acc + 1)(0)
def from_int(i):
return reduce(lambda acc, _: succ(acc), range(i), zero)
r"""
add := \ab.\fx.af(bfx)
>>> add(2)(3)
a -> \gy.g(gy)
b -> \hz.h(h(hz))
-> \fx: f(f(bfx))
-> \fx: f(f(f(f(f(x))))) # 5
"""
add = lambda a: lambda b: lambda f: lambda x: a(f)(b(f)(x))
| 3.21875
| 3
|
custom_exceptions.py
|
kamens/gae_bingo
| 34
|
12780195
|
<filename>custom_exceptions.py
class InvalidRedirectURLError(Exception):
"""Raised when there is a redirect attempt to an absolute url."""
pass
| 1.960938
| 2
|
Sequences/moreprint.py
|
zahraaliaghazadeh/python
| 0
|
12780196
|
<reponame>zahraaliaghazadeh/python<gh_stars>0
name = "Tim"
age = 10
print(name, age, "Python", 2020)
print(name, age, "Python", 2020, sep=", ")
# it will separate with , and space
| 3.453125
| 3
|
tests/main/views/test_brief_response.py
|
uk-gov-mirror/alphagov.digitalmarketplace-api
| 25
|
12780197
|
<filename>tests/main/views/test_brief_response.py
"""Tests for brief response views in app/views/brief_responses.py."""
from datetime import datetime, timedelta
from freezegun import freeze_time
import json
import mock
import pytest
from dmapiclient.audit import AuditTypes
from app.main.views.brief_responses import COMPLETED_BRIEF_RESPONSE_STATUSES
from app.models import db, Lot, Brief, BriefResponse, AuditEvent, Service, Framework, SupplierFramework
from dmutils.formats import DATE_FORMAT, DATETIME_FORMAT
from tests.bases import BaseApplicationTest, JSONUpdateTestMixin
from tests.helpers import FixtureMixin
class BaseBriefResponseTest(BaseApplicationTest, FixtureMixin):
def example_brief_data(self):
return {
'title': 'My Test Brief Title',
'specialistRole': 'developer',
'location': 'Wales',
'essentialRequirements': [
'Essential Requirement 1',
u'Essential Requirement 2 £Ⰶⶼ',
u"Essential Requirement 3 \u200d\u2029\u202f",
u"Essential Requirement 4\"\'<&%",
u"Essential Requirement 5",
],
'niceToHaveRequirements': [
'Nice to have requirement 1',
'Nice to have requirement 2',
'Nice to have requirement 3',
'Nice to have requirement 4',
'Nice to have requirement 5'
],
}
def example_brief_response_data(self):
return {
"essentialRequirementsMet": True,
"essentialRequirements": [
{"evidence": "Essential evidence 1"},
{"evidence": "Essential evidence 2 £Ⰶⶼ."},
{"evidence": "Essential evidence 3 \u200d\u2029\u202f"},
{"evidence": "Essential evidence 4\"\'<&%"},
],
"niceToHaveRequirements": [],
"respondToEmailAddress": "<EMAIL>",
}
def setup(self):
super(BaseBriefResponseTest, self).setup()
self.supplier_ids = self.setup_dummy_suppliers(2)
supplier_frameworks = [
SupplierFramework(supplier_id=supplier_id, framework_id=5)
for supplier_id in self.supplier_ids
]
brief = Brief(
data=self.example_brief_data(),
status='live', framework_id=5, lot=Lot.query.get(5)
)
service = Service(
service_id='1234560987654321',
data={'locations': [brief.data['location']]},
status='published',
framework_id=5,
lot_id=5,
supplier_id=0,
)
specialist_brief = Brief(
data=self.example_brief_data(),
status='live', framework_id=5, lot=Lot.query.get(6)
)
specialist_service = Service(
service_id='1234560987654322',
data={'developerLocations': [specialist_brief.data['location']],
'developerPriceMin': "0",
'developerPriceMax': "1000"},
status='published',
framework_id=5,
lot_id=6,
supplier_id=0,
)
db.session.add_all([service, specialist_service, brief, specialist_brief] + supplier_frameworks)
db.session.commit()
self.brief_id = brief.id
self.specialist_brief_id = specialist_brief.id
def setup_dummy_brief_response(
self, brief_id=None, supplier_id=0, submitted_at=datetime(2016, 1, 2), award_details=None, data=None
):
brief_response = BriefResponse(
data=data if data else self.example_brief_response_data(),
supplier_id=supplier_id, brief_id=brief_id or self.brief_id,
submitted_at=submitted_at,
award_details=award_details if award_details else {}
)
db.session.add(brief_response)
db.session.commit()
return brief_response.id
def setup_dummy_awarded_brief_response(self, brief_id=None, awarded_at=None, data=None):
self.setup_dummy_briefs(1, status="closed", brief_start=brief_id or self.brief_id)
awarded_brief_response_id = self.setup_dummy_brief_response(
brief_id=brief_id or self.brief_id,
award_details={'pending': True},
data=data if data else self.example_brief_response_data(),
)
awarded_brief_response = BriefResponse.query.get(awarded_brief_response_id)
awarded_brief_response.awarded_at = awarded_at or datetime.utcnow()
db.session.add(awarded_brief_response)
db.session.commit()
return awarded_brief_response.id
def create_brief_response(self, supplier_id=0, brief_id=None, data=None):
brief_responses_data = {
'briefId': brief_id or self.brief_id,
'supplierId': supplier_id,
}
if data:
brief_responses_data = dict(data, **brief_responses_data)
return self.client.post(
'/brief-responses',
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': brief_responses_data,
'page_questions': list(data) if data else None
}),
content_type='application/json'
)
def get_brief_response(self, brief_response_id):
return self.client.get('/brief-responses/{}'.format(brief_response_id))
def list_brief_responses(self, parameters={}):
return self.client.get('/brief-responses', query_string=parameters)
def _update_brief_response(self, brief_response_id, brief_response_data):
return self.client.post(
'/brief-responses/{}'.format(brief_response_id),
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': brief_response_data,
'page_questions': list(brief_response_data.keys())
}),
content_type='application/json'
)
def _submit_brief_response(self, brief_response_id):
return self.client.post(
'/brief-responses/{}/submit'.format(brief_response_id),
data=json.dumps({
'updated_by': '<EMAIL>',
}),
content_type='application/json'
)
class TestCreateBriefResponse(BaseBriefResponseTest, JSONUpdateTestMixin):
endpoint = '/brief-responses'
method = 'post'
def test_create_new_brief_response_with_no_page_questions(self, live_dos_framework):
res = self.create_brief_response()
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 201, data
assert data['briefResponses']['supplierName'] == 'Supplier 0'
assert data['briefResponses']['briefId'] == self.brief_id
def test_create_new_brief_response_with_page_questions(self, live_dos_framework):
res = self.create_brief_response(data={
"respondToEmailAddress": "<EMAIL>"
})
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 201, data
assert data['briefResponses']['supplierName'] == 'Supplier 0'
assert data['briefResponses']['briefId'] == self.brief_id
def test_create_new_brief_response_with_expired_framework(self, expired_dos_framework):
res = self.create_brief_response()
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 201, data
assert data['briefResponses']['supplierName'] == 'Supplier 0'
assert data['briefResponses']['briefId'] == self.brief_id
def test_create_new_brief_response_with_missing_answer_to_page_question_will_error(self, live_dos_framework):
res = self.client.post(
'/brief-responses',
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': {
'briefId': self.brief_id,
'supplierId': 0,
'respondToEmailAddress': '<EMAIL>'
},
'page_questions': ['respondToEmailAddress', 'availability']
}),
content_type='application/json'
)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data == {'error': {'availability': 'answer_required'}}
def test_create_brief_response_creates_an_audit_event(self, live_dos_framework):
res = self.create_brief_response()
assert res.status_code == 201, res.get_data(as_text=True)
audit_events = AuditEvent.query.filter(
AuditEvent.type == AuditTypes.create_brief_response.value
).all()
assert len(audit_events) == 1
assert audit_events[0].data == {
'briefResponseId': json.loads(res.get_data(as_text=True))['briefResponses']['id'],
'briefResponseJson': {
'briefId': self.brief_id,
'supplierId': 0,
},
'supplierId': 0,
}
def test_cannot_create_brief_response_with_empty_json(self, live_dos_framework):
res = self.client.post(
'/brief-responses',
data=json.dumps({
'updated_by': '<EMAIL>',
}),
content_type='application/json'
)
assert res.status_code == 400
def test_cannot_create_brief_response_without_supplier_id(self, live_dos_framework):
res = self.client.post(
'/brief-responses',
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': {
"briefId": self.brief_id
}
}),
content_type='application/json'
)
assert res.status_code == 400
assert 'supplierId' in res.get_data(as_text=True)
def test_cannot_create_brief_response_without_brief_id(self, live_dos_framework):
res = self.client.post(
'/brief-responses',
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': {
"supplierId": 0
}
}),
content_type='application/json'
)
assert res.status_code == 400
assert 'briefId' in res.get_data(as_text=True)
def test_cannot_create_brief_response_with_non_integer_supplier_id(self, live_dos_framework):
res = self.create_brief_response(supplier_id='not a number')
assert res.status_code == 400
assert 'Invalid supplier ID' in res.get_data(as_text=True)
def test_cannot_create_brief_response_with_non_integer_brief_id(self, live_dos_framework):
res = self.create_brief_response(brief_id='not a number')
assert res.status_code == 400
assert 'Invalid brief ID' in res.get_data(as_text=True)
def test_cannot_create_brief_response_when_brief_doesnt_exist(self, live_dos_framework):
res = self.create_brief_response(brief_id=self.brief_id + 100)
assert res.status_code == 400
assert 'Invalid brief ID' in res.get_data(as_text=True)
def test_cannot_create_brief_response_when_supplier_doesnt_exist(self, live_dos_framework):
res = self.create_brief_response(supplier_id=999)
assert res.status_code == 400
assert 'Invalid supplier ID' in res.get_data(as_text=True)
def test_cannot_create_brief_response_when_supplier_isnt_eligible(self, live_dos_framework):
res = self.create_brief_response(supplier_id=1)
assert res.status_code == 400
assert 'Supplier is not eligible to apply to this brief' in res.get_data(as_text=True)
def test_cannot_create_a_brief_response_if_framework_status_is_not_live_or_expired(self, live_dos_framework):
framework_id = live_dos_framework['id']
for framework_status in [status for status in Framework.STATUSES if status not in ('live', 'expired')]:
db.session.execute(
"UPDATE frameworks SET status=:status WHERE id = :framework_id",
{
'status': framework_status,
'framework_id': framework_id,
},
)
db.session.commit()
res = self.create_brief_response()
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data == {'error': 'Brief framework must be live or expired'}
def test_cannot_respond_to_a_brief_that_isnt_live(self, live_dos_framework):
brief = Brief(
data={}, status='draft', framework_id=5, lot=Lot.query.get(5)
)
db.session.add(brief)
db.session.commit()
res = self.create_brief_response(brief_id=brief.id)
assert res.status_code == 400
assert "Brief must be live" in res.get_data(as_text=True)
def test_cannot_respond_to_a_brief_more_than_once_from_the_same_supplier(self, live_dos_framework):
self.create_brief_response()
res = self.create_brief_response()
assert res.status_code == 400, res.get_data(as_text=True)
assert 'Brief response already exists' in res.get_data(as_text=True)
def test_day_rate_should_be_less_than_service_max_price(self, live_dos_framework):
res = self.create_brief_response(
brief_id=self.specialist_brief_id,
data={"dayRate": "100000"}
)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data["error"]["dayRate"] == 'max_less_than_min'
def test_cannot_create_brief_response_with_invalid_json(self, live_dos_framework):
res = self.client.post(
'/brief-responses',
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': {
"supplierId": 0,
"briefId": self.brief_id,
"essentialRequirementsMet": 'string'
},
'page_questions': ["essentialRequirementsMet"]
}),
content_type='application/json'
)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data['error']['essentialRequirementsMet'] == 'not_required_value'
def test_create_digital_specialists_brief_response(self, live_dos_framework):
res = self.create_brief_response(
brief_id=self.specialist_brief_id,
data={
"essentialRequirementsMet": True,
"respondToEmailAddress": "<EMAIL>",
"availability": "24/12/2016",
"dayRate": "500",
}
)
assert res.status_code == 201
def test_cannot_respond_to_a_brief_with_wrong_number_of_essential_or_nice_to_have_reqs(self, live_dos_framework):
res = self.client.post(
'/brief-responses',
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': {
"supplierId": 0,
"briefId": self.brief_id,
"essentialRequirements": [{'evidence': 'Some'}],
"niceToHaveRequirements": [{'yesNo': True, 'evidence': 'Some'}]
},
'page_questions': ["essentialRequirements", "niceToHaveRequirements"]
}),
content_type='application/json'
)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400, res.get_data(as_text=True)
assert data['error']['essentialRequirements'] == 'answer_required'
assert data['error']['niceToHaveRequirements'] == 'answer_required'
class TestUpdateBriefResponse(BaseBriefResponseTest):
def setup(self):
super(TestUpdateBriefResponse, self).setup()
res = self.create_brief_response()
self.brief_response_id = json.loads(res.get_data(as_text=True))['briefResponses']['id']
def test_update_brief_response_succeeds_and_creates_audit_event(self, live_dos_framework):
res = self._update_brief_response(
self.brief_response_id, {'essentialRequirementsMet': True}
)
assert res.status_code == 200
data = json.loads(res.get_data(as_text=True))['briefResponses']
assert data['id'] == self.brief_response_id
assert data['briefId'] == self.brief_id
assert data['supplierId'] == 0
assert data['essentialRequirementsMet'] is True
audit_events = AuditEvent.query.filter(
AuditEvent.type == AuditTypes.update_brief_response.value
).all()
assert len(audit_events) == 1
assert audit_events[0].data == {
'briefResponseId': self.brief_response_id,
'briefResponseData': {'essentialRequirementsMet': True},
'supplierId': 0,
}
def test_update_brief_response_with_expired_framework(self, expired_dos_framework):
res = self._update_brief_response(self.brief_response_id, {'respondToEmailAddress': '<EMAIL>'})
assert res.status_code == 200
def test_update_brief_response_that_does_not_exist_will_404(self, live_dos_framework):
res = self._update_brief_response(100, {'respondToEmailAddress': '<EMAIL>'})
assert res.status_code == 404
def test_can_not_update_brief_response_for_framework_that_is_not_live_or_expired(self, live_dos_framework):
for framework_status in [status for status in Framework.STATUSES if status not in ('live', 'expired')]:
db.session.execute(
"UPDATE frameworks SET status=:status WHERE slug='digital-outcomes-and-specialists'",
{'status': framework_status},
)
db.session.commit()
res = self._update_brief_response(
self.brief_response_id,
{'respondToEmailAddress': '<EMAIL>'}
)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data == {'error': 'Brief framework must be live or expired'}
def test_can_not_update_brief_response_if_supplier_is_ineligible_for_brief(self, live_dos_framework):
with mock.patch('app.main.views.brief_responses.get_supplier_service_eligible_for_brief') as mock_patch:
mock_patch.return_value = None
res = self._update_brief_response(self.brief_response_id, {'respondToEmailAddress': '<EMAIL>'})
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data == {'error': 'Supplier is not eligible to apply to this brief'}
@pytest.mark.parametrize('brief_status', ['closed', 'cancelled', 'unsuccessful', 'withdrawn', 'draft'])
def test_cannot_update_brief_response_when_brief_is_not_live(self, live_dos_framework, brief_status):
# Create dummy brief and brief_response
self.setup_dummy_briefs(1, status=brief_status, brief_start=1234)
brief_response_id = self.setup_dummy_brief_response(brief_id=1234)
# Update brief response
res = self._update_brief_response(brief_response_id, {'respondToEmailAddress': '<EMAIL>'})
assert res.status_code == 400
data = json.loads(res.get_data(as_text=True))
assert data == {'error': "Brief must have 'live' status for the brief response to be updated"}
def test_can_not_submit_a_brief_response_that_already_been_awarded(self, live_dos_framework):
# As above, but for an awarded Brief
awarded_brief_response_id = self.setup_dummy_awarded_brief_response(brief_id=111)
res = self._update_brief_response(awarded_brief_response_id, {'respondToEmailAddress': '<EMAIL>'})
assert res.status_code == 400
data = json.loads(res.get_data(as_text=True))
assert data == {'error': "Brief must have 'live' status for the brief response to be updated"}
def test_update_brief_response_with_missing_answer_to_page_question_will_error(self, live_dos_framework):
res = self.client.post(
'/brief-responses/{}'.format(self.brief_response_id),
data=json.dumps({
'updated_by': '<EMAIL>',
'briefResponses': {'respondToEmailAddress': '<EMAIL>'},
'page_questions': ['respondToEmailAddress', 'niceToHaveRequirements']
}),
content_type='application/json'
)
assert res.status_code == 400
data = json.loads(res.get_data(as_text=True))
assert data == {'error': {'niceToHaveRequirements': 'answer_required'}}
def test_essential_requirements_met_must_be_answered_as_true(self, live_dos_framework):
res = self._update_brief_response(
self.brief_response_id, {'essentialRequirementsMet': False}
)
assert res.status_code == 400
data = json.loads(res.get_data(as_text=True))
assert data["error"] == {'essentialRequirementsMet': 'not_required_value'}
def test_cannot_update_brief_response_with_wrong_number_of_essential_or_nice_to_have_reqs(self, live_dos_framework):
res = self._update_brief_response(
self.brief_response_id,
{
"essentialRequirements": [{'evidence': 'Some'}],
"niceToHaveRequirements": [{'yesNo': True, 'evidence': 'Some'}]
}
)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400, res.get_data(as_text=True)
assert data['error']['essentialRequirements'] == 'answer_required'
assert data['error']['niceToHaveRequirements'] == 'answer_required'
class TestSubmitBriefResponse(BaseBriefResponseTest):
valid_brief_response_data = {
'essentialRequirementsMet': True,
'essentialRequirements': [{'evidence': 'text'}] * 5,
'niceToHaveRequirements': [{'yesNo': True, 'evidence': 'text'}] * 5,
'availability': u'a',
'respondToEmailAddress': '<EMAIL>'
}
def setup(self):
super(TestSubmitBriefResponse, self).setup()
def _setup_existing_brief_response(self):
res = self.create_brief_response(data=self.valid_brief_response_data)
assert res.status_code == 201
self.brief_response_id = json.loads(res.get_data(as_text=True))['briefResponses']['id']
def test_valid_draft_brief_response_can_be_submitted_for_live_framework(self, live_dos_framework):
self._setup_existing_brief_response()
with freeze_time('2016-9-28'):
res = self._submit_brief_response(self.brief_response_id)
assert res.status_code == 200
brief_response = json.loads(res.get_data(as_text=True))['briefResponses']
assert brief_response['status'] == 'submitted'
assert brief_response['submittedAt'] == '2016-09-28T00:00:00.000000Z'
def test_valid_draft_brief_response_can_be_submitted_for_expired_framework(self, expired_dos_framework):
self._setup_existing_brief_response()
with freeze_time('2016-9-28'):
res = self._submit_brief_response(self.brief_response_id)
assert res.status_code == 200
brief_response = json.loads(res.get_data(as_text=True))['briefResponses']
assert brief_response['status'] == 'submitted'
assert brief_response['submittedAt'] == '2016-09-28T00:00:00.000000Z'
def test_submit_brief_response_creates_an_audit_event(self, live_dos_framework):
self._setup_existing_brief_response()
self._submit_brief_response(self.brief_response_id)
audit_events = AuditEvent.query.filter(
AuditEvent.type == AuditTypes.submit_brief_response.value
).all()
assert len(audit_events) == 1
assert audit_events[0].data == {
'briefResponseId': self.brief_response_id,
'supplierId': 0,
}
def test_submit_brief_response_that_doesnt_exist_will_404(self):
res = self._submit_brief_response(100)
assert res.status_code == 404
@pytest.mark.parametrize('brief_status', ['draft', 'closed', 'unsuccessful', 'cancelled', 'withdrawn'])
def test_can_not_submit_a_brief_response_for_a_non_live_brief(self, live_dos_framework, brief_status):
self.setup_dummy_briefs(1, status=brief_status, brief_start=1234)
# Create dummy brief_response which has been submitted
brief_response_id = self.setup_dummy_brief_response(brief_id=1234)
res = self._submit_brief_response(brief_response_id)
assert res.status_code == 400
data = json.loads(res.get_data(as_text=True))
assert data == {'error': "Brief must have 'live' status for the brief response to be submitted"}
def test_can_not_submit_a_brief_response_that_already_been_awarded(self, live_dos_framework):
# As above, but for an awarded Brief
awarded_brief_response_id = self.setup_dummy_awarded_brief_response(brief_id=111)
repeat_res = self._submit_brief_response(awarded_brief_response_id)
assert repeat_res.status_code == 400
data = json.loads(repeat_res.get_data(as_text=True))
assert data == {'error': "Brief must have 'live' status for the brief response to be submitted"}
@pytest.mark.parametrize('framework_status', [i for i in Framework.STATUSES if i not in ['live', 'expired']])
def test_can_not_submit_a_brief_response_for_a_framework_that_is_not_live_or_expired(
self,
live_dos_framework,
framework_status
):
# If a brief response already exists delete the last one. Suppliers can only have one response.
existing_brief_response = db.session.query(BriefResponse).all()
if existing_brief_response:
db.session.delete(existing_brief_response[-1])
db.session.commit()
# Create a brief response while the framework is live.
self._setup_existing_brief_response()
# Set framework status to the invalid status currently under test.
dos_framework = db.session.query(Framework).filter_by(slug='digital-outcomes-and-specialists').first()
dos_framework.status = framework_status
db.session.commit()
# Ensure error code on save attempt.
res = self._submit_brief_response(self.brief_response_id)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data == {'error': 'Brief framework must be live or expired'}
def test_can_not_submit_response_if_supplier_is_ineligble_for_brief(self, live_dos_framework):
self._setup_existing_brief_response()
with mock.patch('app.main.views.brief_responses.get_supplier_service_eligible_for_brief') as mock_patch:
mock_patch.return_value = None
res = self._submit_brief_response(self.brief_response_id)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data == {'error': 'Supplier is not eligible to apply to this brief'}
def test_can_submit_a_brief_response_with_no_nice_to_have_requirements(self, live_dos_framework):
brief = Brief.query.get(self.brief_id)
brief_data = brief.data.copy()
brief_data['niceToHaveRequirements'] = []
brief.data = brief_data
db.session.add(brief)
db.session.commit()
response_data = self.valid_brief_response_data
response_data.pop('niceToHaveRequirements')
create_res = self.create_brief_response(data=response_data)
brief_response_id = json.loads(create_res.get_data(as_text=True))['briefResponses']['id']
submit_res = self._submit_brief_response(brief_response_id)
assert submit_res.status_code == 200
def test_can_not_submit_an_invalid_brief_response(self, live_dos_framework):
res = self.create_brief_response()
brief_response_id = json.loads(res.get_data(as_text=True))['briefResponses']['id']
res = self._submit_brief_response(brief_response_id)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data == {
'error': {
'availability': 'answer_required',
'essentialRequirements': 'answer_required',
'essentialRequirementsMet': 'answer_required',
'niceToHaveRequirements': 'answer_required',
'respondToEmailAddress': 'answer_required'
}
}
class TestGetBriefResponse(BaseBriefResponseTest):
def setup(self):
super(TestGetBriefResponse, self).setup()
self.brief_response_id = self.setup_dummy_brief_response()
def test_get_brief_response(self):
res = self.get_brief_response(self.brief_response_id)
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert data['briefResponses']['id'] == self.brief_response_id
assert data['briefResponses']['supplierId'] == 0
def test_get_missing_brief_returns_404(self):
res = self.get_brief_response(999)
assert res.status_code == 404
class TestListBriefResponses(BaseBriefResponseTest):
def test_list_empty_brief_responses(self):
res = self.list_brief_responses()
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert data['briefResponses'] == []
assert 'self' in data['links'], data
@pytest.mark.parametrize("without_data", (False, True))
def test_list_brief_responses(self, without_data):
for i in range(3):
self.setup_dummy_brief_response()
res = self.list_brief_responses({"with-data": "false"} if without_data else {})
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 3
assert 'self' in data['links']
assert all("essentialRequirementsMet" in br for br in data['briefResponses'])
assert all(("essentialRequirements" in br) == (not without_data) for br in data['briefResponses'])
def test_list_brief_responses_pagination(self):
for i in range(8):
self.setup_dummy_brief_response()
res = self.list_brief_responses()
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 5
assert 'next' in data['links']
res = self.list_brief_responses(dict(page=2))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 3
assert 'prev' in data['links']
@pytest.mark.parametrize("without_data", (False, True))
def test_list_brief_responses_for_supplier_id(self, without_data):
for i in range(8):
self.setup_dummy_brief_response(supplier_id=0)
self.setup_dummy_brief_response(supplier_id=1)
res = self.list_brief_responses({"supplier_id": 1, **({"with-data": "false"} if without_data else {})})
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 8
assert all(br['supplierId'] == 1 for br in data['briefResponses'])
assert all("essentialRequirementsMet" in br for br in data['briefResponses'])
assert all(("essentialRequirements" in br) == (not without_data) for br in data['briefResponses'])
def test_list_brief_responses_for_brief_id(self):
brief = Brief(
data=self.example_brief_data(),
status='live', framework_id=5, lot=Lot.query.get(5)
)
db.session.add(brief)
db.session.commit()
another_brief_id = brief.id
for i in range(8):
self.setup_dummy_brief_response(brief_id=self.brief_id, supplier_id=0)
self.setup_dummy_brief_response(brief_id=another_brief_id, supplier_id=0)
res = self.list_brief_responses(dict(brief_id=another_brief_id))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 8
assert all(br['briefId'] == another_brief_id for br in data['briefResponses'])
def test_list_brief_responses_by_one_framework_slug(self, live_dos2_framework):
supplier_framework = SupplierFramework(supplier_id=0, framework_id=live_dos2_framework["id"])
dos2_brief = Brief(
data=self.example_brief_data(),
status='live', framework_id=live_dos2_framework["id"], lot=Lot.query.get(6)
)
db.session.add_all([dos2_brief, supplier_framework])
db.session.commit()
dos2_brief_id = dos2_brief.id
for i in range(3):
self.setup_dummy_brief_response(brief_id=self.brief_id, supplier_id=0)
self.setup_dummy_brief_response(brief_id=dos2_brief_id, supplier_id=0)
res = self.list_brief_responses(dict(framework='digital-outcomes-and-specialists-2'))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 3
assert all(
br['brief']['framework']['slug'] == "digital-outcomes-and-specialists-2"
for br in data['briefResponses']
)
assert 'self' in data['links']
def test_list_brief_responses_by_multiple_framework_slugs(self, live_dos2_framework):
supplier_framework = SupplierFramework(supplier_id=0, framework_id=live_dos2_framework["id"])
dos2_brief = Brief(
data=self.example_brief_data(),
status='live', framework_id=live_dos2_framework["id"], lot=Lot.query.get(6)
)
db.session.add_all([dos2_brief, supplier_framework])
db.session.commit()
dos2_brief_id = dos2_brief.id
for i in range(2):
self.setup_dummy_brief_response(brief_id=self.brief_id, supplier_id=0)
self.setup_dummy_brief_response(brief_id=dos2_brief_id, supplier_id=0)
res = self.list_brief_responses(dict(
framework='digital-outcomes-and-specialists, digital-outcomes-and-specialists-2'
))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 4
dos1_br = [
br for br in data['briefResponses']
if br['brief']['framework']['slug'] == "digital-outcomes-and-specialists"
]
dos2_br = [
br for br in data['briefResponses']
if br['brief']['framework']['slug'] == "digital-outcomes-and-specialists"
]
assert len(dos1_br) == len(dos2_br) == 2
def test_cannot_list_brief_responses_for_non_integer_brief_id(self):
res = self.list_brief_responses(dict(brief_id="not-valid"))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data['error'] == 'Invalid brief_id: not-valid'
def test_cannot_list_brief_responses_for_non_integer_supplier_id(self):
res = self.list_brief_responses(dict(supplier_id="not-valid"))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 400
assert data['error'] == 'Invalid supplier_id: not-valid'
def test_filter_brief_response_only_includes_submitted_pending_awarded_and_awarded_by_default(self):
self.setup_dummy_brief_response(submitted_at=None) # draft response not to be included in result
self.setup_dummy_brief_response()
self.setup_dummy_brief_response(award_details={"pending": True})
self.setup_dummy_awarded_brief_response(brief_id=111)
res = self.list_brief_responses()
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 3
for response in data['briefResponses']:
assert response["status"] in COMPLETED_BRIEF_RESPONSE_STATUSES
def test_filter_brief_response_list_by_draft_status(self):
self.setup_dummy_brief_response()
expected_brief_id = self.setup_dummy_brief_response(submitted_at=None)
res = self.list_brief_responses(dict(status='draft'))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 1
assert data['briefResponses'][0]['id'] == expected_brief_id
def test_filter_brief_response_list_by_submitted_status(self):
expected_brief_id = self.setup_dummy_brief_response()
self.setup_dummy_brief_response(submitted_at=None)
res = self.list_brief_responses(dict(status='submitted'))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 1
assert data['briefResponses'][0]['id'] == expected_brief_id
def test_filter_brief_response_list_for_all_statuses(self):
self.setup_dummy_brief_response(submitted_at=None)
self.setup_dummy_brief_response()
self.setup_dummy_brief_response(award_details={"pending": True})
self.setup_dummy_awarded_brief_response(brief_id=111)
res = self.list_brief_responses(dict(status='draft,submitted,awarded,pending-awarded'))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 4
def test_filter_responses_awarded_yesterday(self):
yesterday = datetime.utcnow() - timedelta(days=1)
self.setup_dummy_brief_response(submitted_at=None)
self.setup_dummy_awarded_brief_response(brief_id=111, awarded_at=yesterday)
self.setup_dummy_awarded_brief_response(brief_id=222, awarded_at=yesterday - timedelta(days=5))
self.setup_dummy_brief_response(award_details={"pending": True})
res = self.list_brief_responses(dict(awarded_at=yesterday.strftime(DATE_FORMAT)))
data = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert len(data['briefResponses']) == 1
assert data['briefResponses'][0]['awardedAt'] == yesterday.strftime(DATETIME_FORMAT)
| 2.125
| 2
|
python/python.py
|
stewpidtnlvr/Smartproxy
| 0
|
12780198
|
import urllib.request as request
url = 'https://ipinfo.io'
username = 'username'
password = 'password'
proxy = f'http://{username}:{password}@gate.<EMAIL>:7000'
query = request.build_opener(request.ProxyHandler({'http': proxy, 'https': proxy}))
print(query.open(url).read())
| 2.640625
| 3
|
Server/index.py
|
Nycrera/p-game
| 1
|
12780199
|
import socketio
import mysql.connector
class Player:
def __init__(self,name,id,points,banned,admin,mod):
self.name=name
self.id=id
self.points=points
self.banned=banned
self.admin=admin
self.mod=mod
| 2.28125
| 2
|
Practice Round/main.py
|
sbrodehl/Hashcode2k20
| 2
|
12780200
|
<reponame>sbrodehl/Hashcode2k20<filename>Practice Round/main.py
if __name__ == '__main__':
import importlib
import argparse
parser = argparse.ArgumentParser()
# need to be
parser.add_argument("input", help="input file")
parser.add_argument("--output", help="output file")
parser.add_argument("--solver", type=str, default="example")
args = parser.parse_args()
solver = None
# try load the given solver
try:
solver = importlib.import_module('.'.join(["solver", args.solver]))
except ImportError as e:
parser.print_help()
print(e)
exit(1)
# solver init with filepath
solver = solver.Solver(args.input)
# solve the problem with given input
success = solver.solve()
if not success:
raise RuntimeError("No solution found!")
# maybe create a solution file
if args.output:
solver.write(args.output)
| 2.921875
| 3
|
smt/SMT.py
|
jeanqasaur/jeeves
| 253
|
12780201
|
'''
Translate expressions to SMT import format.
'''
from Z3 import Z3
class UnsatisfiableException(Exception):
pass
# NOTE(JY): Think about if the solver needs to know about everything for
# negative constraints. I don't think so because enough things should be
# concrete that this doesn't matter.
def solve(constraints, defaults, desiredVars):
# NOTE(JY): This is just a sketch of what should go on...
# Implement defaults by adding values to the model and
#for v in jeeveslib.env.envVars:
# jeeveslib.solver.push()
# solver.assertConstraint(v = z3.BoolVal(True))
# if (solver.check() == solver.Unsat):
# jeeveslib.solver.pop()
# Now get the variables back from the solver by evaluating all
# variables in question...
# Now return the new environment...
#return NotImplemented
solver = Z3()
result = {}
for constraint in constraints:
if constraint.type != bool:
raise ValueError("constraints must be bools")
solver.boolExprAssert(constraint)
if not solver.check():
raise UnsatisfiableException("Constraints not satisfiable")
for default in defaults:
solver.push()
if default.type != bool:
raise ValueError("defaults must be bools")
solver.boolExprAssert(default)
if not solver.isSatisfiable():
solver.pop()
assert solver.check()
result = {}
for var in desiredVars:
result[var] = solver.evaluate(var)
assert (result[var] is True) or (result[var] is False)
return result
| 2.90625
| 3
|
watchmen/pipeline/core/context/unit_context.py
|
Insurance-Metrics-Measure-Advisory/watchman-data-connector
| 125
|
12780202
|
<reponame>Insurance-Metrics-Measure-Advisory/watchman-data-connector
from model.model.pipeline.pipeline import ProcessUnit
from watchmen.monitor.model.pipeline_monitor import UnitRunStatus
from watchmen.pipeline.core.context.stage_context import StageContext
class UnitContext:
stageContext: StageContext
unit: ProcessUnit
unitStatus: UnitRunStatus
def __init__(self, stageContext, unit):
self.stageContext = stageContext
self.unit = unit
self.unitStatus = UnitRunStatus()
| 1.945313
| 2
|
tensorflow/python/distribute/distribute_coordinator_context.py
|
EricRemmerswaal/tensorflow
| 190,993
|
12780203
|
<filename>tensorflow/python/distribute/distribute_coordinator_context.py<gh_stars>1000+
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The context retrieval method for distribute coordinator."""
import threading
_worker_context = threading.local()
def get_current_worker_context():
"""Returns the current task context."""
try:
return _worker_context.current
except AttributeError:
return None
| 1.820313
| 2
|
trove/tests/api/root_on_create.py
|
dadu1688/trove
| 1
|
12780204
|
<gh_stars>1-10
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from trove.common import cfg
from proboscis import before_class
from proboscis import after_class
from proboscis import test
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_not_equal
from proboscis.asserts import assert_true
from trove import tests
from trove.tests.api.users import TestUsers
from trove.tests.api.instances import instance_info
from trove.tests import util
from trove.tests.api.databases import TestMysqlAccess
CONF = cfg.CONF
GROUP = "dbaas.api.root.oncreate"
@test(depends_on_classes=[TestMysqlAccess],
runs_after=[TestUsers],
groups=[tests.DBAAS_API, GROUP, tests.INSTANCES])
class TestRootOnCreate(object):
"""
Test 'CONF.root_on_create', which if True, creates the root user upon
database instance initialization.
"""
root_enabled_timestamp = 'Never'
@before_class
def setUp(self):
self.orig_conf_value = CONF.root_on_create
CONF.root_on_create = True
self.dbaas = util.create_dbaas_client(instance_info.user)
self.dbaas_admin = util.create_dbaas_client(instance_info.admin_user)
self.history = self.dbaas_admin.management.root_enabled_history
self.enabled = self.dbaas.root.is_root_enabled
@after_class
def tearDown(self):
CONF.root_on_create = self.orig_conf_value
@test
def test_root_on_create(self):
"""Test that root is enabled after instance creation"""
enabled = self.enabled(instance_info.id).rootEnabled
assert_equal(200, self.dbaas.last_http_code)
assert_true(enabled)
@test(depends_on=[test_root_on_create])
def test_history_after_root_on_create(self):
"""Test that the timestamp in the root enabled history is set"""
self.root_enabled_timestamp = self.history(instance_info.id).enabled
assert_equal(200, self.dbaas.last_http_code)
assert_not_equal(self.root_enabled_timestamp, 'Never')
@test(depends_on=[test_history_after_root_on_create])
def test_reset_root(self):
"""Test that root reset does not alter the timestamp"""
orig_timestamp = self.root_enabled_timestamp
self.dbaas.root.create(instance_info.id)
assert_equal(200, self.dbaas.last_http_code)
self.root_enabled_timestamp = self.history(instance_info.id).enabled
assert_equal(200, self.dbaas.last_http_code)
assert_equal(orig_timestamp, self.root_enabled_timestamp)
@test(depends_on=[test_reset_root])
def test_root_still_enabled(self):
"""Test that after root was reset, it's still enabled."""
enabled = self.enabled(instance_info.id).rootEnabled
assert_equal(200, self.dbaas.last_http_code)
assert_true(enabled)
| 1.835938
| 2
|
T53_MaxSubArray.py
|
zoubohao/LeetCodes
| 0
|
12780205
|
<gh_stars>0
from typing import List
### dp array
class Solution:
def maxSubArray(self, nums: List[int]) -> int:
n = len(nums)
dpArray = [0 for _ in range(n)]
ans = nums[0]
dpArray[0] = nums[0]
for i in range(1, n):
dpArray[i] = max(dpArray[i - 1] + nums[i], nums[i])
if dpArray[i] > ans:
ans = dpArray[i]
return ans
s = Solution()
nums = [-2,1,-3,4,-1,2,1,-5,4]
print(s.maxSubArray(nums))
| 3.1875
| 3
|
setup.py
|
oVirt/ovirt-scheduler-proxy
| 2
|
12780206
|
import os
from setuptools import setup, find_packages
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='ovirt-scheduler-proxy',
version=read('VERSION').strip(),
license='ASL2',
description='oVirt Scheduler Proxy',
author='<NAME>',
author_email='<EMAIL>',
url='http://www.ovirt.org/Features/oVirt_External_Scheduling_Proxy',
packages=find_packages("src"),
package_dir={'': 'src'},
long_description=read('README'),
)
| 1.65625
| 2
|
title_classification/train_word2vec.py
|
sun-yitao/PsychicLearners
| 14
|
12780207
|
<filename>title_classification/train_word2vec.py
import gensim
from gensim.models import Word2Vec
from gensim.utils import simple_preprocess
from pathlib import Path
from multiprocessing import cpu_count
psychic_learner_dir = Path.cwd().parent
data_dir = psychic_learner_dir / 'data'
with open(str(data_dir / 'titles.txt'), 'r') as f:
sentences = f.read().splitlines()
print(len(sentences))
for i, sentence in enumerate(sentences):
sentences[i] = simple_preprocess(sentence)
model = Word2Vec(sentences=sentences, size=100, ns_exponent=-0.5, workers=cpu_count())
model.save("word2vec.bin")
from gensim.models.keyedvectors import KeyedVectors
#model.wv.save_word2vec_format('word2vec.bin', binary=True)
| 2.578125
| 3
|
memoboard/__init__.py
|
codebyravi/MemoBoard
| 0
|
12780208
|
<reponame>codebyravi/MemoBoard<gh_stars>0
"""
Everything that needs to be set up to get flask running is initialized in this file.
* set up and configure the app
* start the database (db)
* load all (!) models used (essential to create the database using db_create)
* load all (!) controllers
* load api
* Set up blueprints
"""
from flask import Flask, Blueprint
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from flask_htmlmin import HTMLMIN
from flask_restful import Api
from flask_compress import Compress
from flask_migrate import Migrate
db = SQLAlchemy()
migrate = Migrate()
htmlmin = HTMLMIN()
ma = Marshmallow()
compress = Compress()
api = Api()
def create_app(config):
# Set up app, database and login manager before importing models and controllers
# Important for db_create script
app = Flask(__name__)
app.config.from_object(config)
db.app = app
db.init_app(app)
migrate.init_app(app, db=db, directory=app.config['SQLALCHEMY_MIGRATE_REPO'])
ma.init_app(app)
# Enable Compress
compress.init_app(app)
# Enable HTMLMIN
htmlmin.init_app(app)
from memoboard.models import MemoList, MemoItem
from memoboard.controllers import main
# Flask-Restful api
from memoboard.api_resources import MemoListsResource, MemoListResource
from memoboard.api_resources import MemoListItemsResource, MemoListItemResource
api_bp = Blueprint('api', __name__)
api.init_app(api_bp)
# Register Blueprints
app.register_blueprint(main)
app.register_blueprint(api_bp, url_prefix='/api')
return app
| 2.703125
| 3
|
bin/utils.py
|
levathan/systemds
| 1
|
12780209
|
#!/usr/bin/env python
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
import sys
import os
from os.path import join, exists
from os import environ
import shutil
def get_env_systemds_root():
"""
Env variable error check and path location
return: String
Location of SYSTEMDS_ROOT
"""
systemds_root = os.environ.get('SYSTEMDS_ROOT')
if systemds_root is None:
#print('SYSTEMDS_ROOT not found')
#sys.exit()
fn = sys.argv[0]
systemds_root = fn[:fn.rfind('/')] + '/..'
return systemds_root
def get_env_spark_root():
"""
Env variable error check and path location
return: String
Location of SPARK_ROOT
"""
spark_root = environ.get('SPARK_ROOT')
if spark_root is None:
print('SPARK_ROOT not found')
sys.exit()
return spark_root
def find_file(name, path):
"""
Responsible for finding a specific file recursively given a location
"""
for root, dirs, files in os.walk(path):
if name in files:
return join(root, name)
def find_dml_file(systemds_root, script_file):
"""
Find the location of DML script being executed
return: String
Location of the dml script
"""
scripts_dir = join(systemds_root, 'scripts')
if not exists(script_file):
script_file_path = find_file(script_file, scripts_dir)
if script_file_path is not None:
return script_file_path
else:
print('Could not find DML script: ' + script_file)
sys.exit()
return script_file
def log4j_path(systemds_root):
"""
Create log4j.properties from the template if not exist
return: String
Location of log4j.properties path
"""
log4j_properties_path = join(systemds_root, 'conf', 'log4j.properties')
log4j_template_properties_path = join(systemds_root, 'conf', 'log4j.properties.template')
if not (exists(log4j_properties_path)):
shutil.copyfile(log4j_template_properties_path, log4j_properties_path)
print('... created ' + log4j_properties_path)
return log4j_properties_path
def config_path(systemds_root):
"""
Create SystemDS-config from the template if not exist
return: String
Location of SystemDS-config.xml
"""
systemds_config_path = join(systemds_root, 'conf', 'SystemDS-config.xml')
systemds_template_config_path = join(systemds_root, 'conf', 'SystemDS-config.xml.template')
if not (exists(systemds_config_path)):
shutil.copyfile(systemds_template_config_path, systemds_config_path)
print('... created ' + systemds_config_path)
return systemds_config_path
| 1.992188
| 2
|
battlenet/utils.py
|
BuloZB/battlenet
| 0
|
12780210
|
import unicodedata
import re
import urllib
_slugify_strip_re = re.compile(r'[^\w\s-]')
_slugify_hyphenate_re = re.compile(r'[-\s]+')
def slugify(value):
slug = unicode(_slugify_strip_re.sub('', normalize(value)).strip().lower())
slug = _slugify_hyphenate_re.sub('-', slug)
if not slug:
return quote(value)
return quote(slug)
def normalize(name):
if not isinstance(name, unicode):
name = name.decode('utf8')
return unicodedata.normalize('NFKC', name).encode('utf8')
def quote(name):
return urllib.quote(normalize(name))
def make_icon_url(region, icon, size='large'):
if size == 'small':
size = 18
else:
size = 56
return 'http://%s.media.blizzard.com/wow/icons/%d/%s.jpg' % (region, size, icon)
def make_connection():
if not hasattr(make_connection, 'Connection'):
from .connection import Connection
make_connection.Connection = Connection
return make_connection.Connection()
| 2.875
| 3
|
slippy/contact/lubricant_models.py
|
KDriesen/slippy
| 12
|
12780211
|
"""
Common sub models for lubricants
"""
import numpy as np
__all__ = ['constant_array_property', 'roelands', 'barus', 'nd_barus', 'nd_roelands', 'dowson_higginson',
'nd_dowson_higginson']
def constant_array_property(value: float):
""" Produce a closure that returns an index able constant value
Parameters
----------
value: float
The value of the constant
Returns
-------
inner: closure
A closure that returns a fully populated array the same size as the just_touching_gap keyword argument, this is
guaranteed to be in the current state dict, and therefore passed as a keyword when sub models are saved.
Notes
-----
Using this closure means that lubrication steps can be writen for the general case, using indexing on fluid
properties.
See Also
--------
constant_array_property
Examples
--------
>>> closure = constant_array_property(1.23)
>>> constant_array = closure(just_touching_gap = np.ones((5,5)))
>>> constant_array.shape
(5,5)
>>> constant_array[0,0]
1,23
"""
def inner(just_touching_gap: np.ndarray, **kwargs):
return np.ones_like(just_touching_gap) * value
return inner
def roelands(eta_0, pressure_0, z):
""" The roelands pressure viscosity equation
Parameters
----------
eta_0, pressure_0, z: float
Coefficients for the equation, see notes for details
Returns
-------
inner: closure
A callable that produces the viscosity terms according to the Roelands equation, see notes for details
Notes
-----
The roelands equation linking viscosity (eta) to the fluid pressure (p) is given by:
eta(p) = eta_0*exp((ln(eta_0)+9.67)*(-1+(1+(p/p_0)^z))
eta_0, p_0 and z are coefficients that depend on the oil and it's temperature.
"""
ln_eta_0 = np.log(eta_0) + 9.67
def inner(pressure: np.ndarray, **kwargs):
return eta_0 * np.exp(ln_eta_0 * (-1 + (1 + pressure / pressure_0) ** z))
return inner
def nd_roelands(eta_0: float, pressure_0: float, pressure_hertzian: float, z: float):
""" The roelands pressure viscosity equation in a non dimentional form
Parameters
----------
eta_0, pressure_0, z: float
Coefficients for the equation, see notes for details
pressure_hertzian: float
The hertzian pressure used to non dimentionalise the pressure term in the equation. Should be the same as is
used in the reynolds solver
Returns
-------
inner: closure
A callable that produces the non dimentional viscosity according to the Roelands equation, see notes for details
Notes
-----
The roelands equation linking viscosity (eta) to the non dimentional fluid pressure (nd_p) is given by:
eta(p)/eta_0 = exp((ln(eta_0)+9.67)*(-1+(1+(nd_p/p_0*p_h)^z))
eta_0, p_0 and z are coefficients that depend on the oil and it's temperature.
p_h is the hertzian pressure used to non dimentionalise the pressure term.
"""
ln_eta_0 = np.log(eta_0) + 9.67
p_all = pressure_hertzian / pressure_0
def inner(nd_pressure: np.ndarray, **kwargs):
return np.exp(ln_eta_0 * (-1 + (1 + p_all * nd_pressure) ** z))
return inner
def barus(eta_0: float, alpha: float):
""" The Barus pressure viscosity equation
Parameters
----------
eta_0, alpha: float
Coefficients in the equation, see notes for details
Returns
-------
inner: closure
A callable that returns the resulting viscosity according to the barus equation
Notes
-----
The Barus equation linking pressure (p) to viscosity (eta) is given by:
eta(p) = eta_0*exp(alpha*p)
In which eta_0 and alpha are coefficients which depend on the lubricant and it's temperature
"""
def inner(pressure: np.ndarray, **kwargs):
return eta_0 * np.exp(alpha * pressure)
return inner
def nd_barus(pressure_hertzian: float, alpha: float):
""" A non dimentional form of the Barus equation
Parameters
----------
alpha: float
A coefficient in the Barus equation, see notes for details
pressure_hertzian: float
The hertzian pressure used to non dimensionalise the pressure
Returns
-------
inner: closure
A callable that will produce the non dimentional viscosity according to the barus equation
Notes
-----
The non dimentional Barus equation relating the viscosity (eta) to the non dimentional pressure (nd_p) is given by:
eta(p)/eta_0 = exp(alpha*p_h*nd_p)
In which alpha is alpha is a coefficient which will depend on the lubricant used and the temperature
p_h is the hertzian pressure used to non dimentionalise the pressure, this must be the same as is passed to the
reynolds solver.
"""
def inner(nd_pressure: np.ndarray, **kwargs):
return np.exp(alpha * pressure_hertzian * nd_pressure)
return inner
def dowson_higginson(rho_0: float):
""" The Dowson Higginson equation relating pressure to density
Parameters
----------
rho_0: float
A coefficient of the dowson higginson equation, seen notes for details
Returns
-------
inner: closure
A callable that returns the density based on the pressure according to the dowson higginson equation
Notes
-----
The dowson higginson equation relating pressure (p) to density (rho) is given by:
rho(p) = rho_0 * (5.9e8+1.34*p)/(5.9e8+p)
In which rho_0 is the parameter of the equation which will depend on the lubricant used and it's temperature
"""
def inner(pressure: np.ndarray, **kwargs):
return rho_0 * (5.9e8 + 1.34 * pressure) / (5.9e8 + pressure)
return inner
def nd_dowson_higginson(pressure_hertzian: float):
""" A non dimentional form of the Dowson Higginson equation relating pressure to density
Parameters
----------
pressure_hertzian: float
The hertzian pressure used to non dimentionalise the pressure, this must match the pressure given to the
reynolds solver
Returns
-------
inner: closure
A callable that returns the non dimentional density based on the non dimentional pressure
Notes
-----
The non dimentional dowson higginson equation relating non dimensional pressure (nd_p) to density (rho) is given by:
rho(p)/rho_0 = (5.9e8+1.34*p_h*nd_p)/(5.9e8+p_h*nd_p)
In which p_h is the hertzian pressure used to non denationalise the pressure and rho_0 is a parameter of the
dimentional form of the dowson higginson equation. Here the value rho(p)/rho_0 is returned
"""
constant = 5.9e8 / pressure_hertzian
def inner(nd_pressure: np.ndarray, **kwargs):
return (constant + 1.34 * nd_pressure) / (constant + nd_pressure)
return inner
| 2.84375
| 3
|
fecha_horapy.py
|
miruimi/edem2021MDA
| 0
|
12780212
|
# -*- coding: utf-8 -*-
#!/usr/bin/python
import time
ahora = time.strftime("%c")
## representacion de fecha y hora
print "Fecha y hora " + time.strftime("%c")
time.sleep 10
| 3.75
| 4
|
plugins/quetz_content_trust/quetz_content_trust/repo_signer.py
|
maresb/quetz
| 108
|
12780213
|
import os
import shutil
from pathlib import Path
import conda_content_trust.signing as cct_signing
class RepoSigner:
def sign_repodata(self, repodata_fn, pkg_mgr_key):
final_fn = self.in_folder / "repodata_signed.json"
print("copy", repodata_fn, final_fn)
shutil.copyfile(repodata_fn, final_fn)
cct_signing.sign_all_in_repodata(str(final_fn), pkg_mgr_key)
print(f"Signed {final_fn}")
def __init__(self, in_folder, pkg_mgr_key):
self.in_folder = Path(in_folder).resolve()
f = os.path.join(self.in_folder, "repodata.json")
if os.path.isfile(f):
self.sign_repodata(Path(f), pkg_mgr_key)
| 2.46875
| 2
|
mmelemental/models/molecule/rdkit_mol.py
|
ccbiozhaw/mmelemental
| 6
|
12780214
|
<gh_stars>1-10
from pydantic import Field
from typing import List, Dict, Any
from .gen_mol import ToolkitMol
from mmelemental.util.decorators import require
try:
from rdkit import Chem
from rdkit.Chem import AllChem
except ImportError: # pragma: no cover
Chem = AllChem = None # pragma: no cover
class Bond:
"""RDKit-based bond order: {0: unspecified, 1: single, etc., up to 21}"""
orders = list(Chem.BondType.values.values())
class RDKitMol(ToolkitMol):
mol: Chem.rdchem.Mol = Field(..., description="Rdkit molecule object.")
@require("rdkit")
def __init__(self, **kwargs):
super().__init__(**kwargs)
@property
def dtype(self):
return "rdkit"
@classmethod
def gen3D(cls, mol, nConformers=1) -> Chem.rdchem.Mol:
"""Generates 3D coords for a molecule. Should be called only when instantiating a Molecule object.
:note: a single unique molecule is assumed.
"""
rdkmol = Chem.AddHs(mol)
# create n conformers for molecule
confargs = AllChem.EmbedMultipleConfs(rdkmol, nConformers)
# Energy optimize
for confId in confargs:
AllChem.UFFOptimizeMolecule(rdkmol, confId=confId)
return rdkmol
@classmethod
def remove_residues(cls, mol, residues: List[str]) -> Chem.rdchem.Mol:
atoms = mol.GetAtoms()
RWmol = Chem.RWMol(mol)
for atom in atoms:
if atom.GetPDBResidueInfo().GetResidueName() in residues:
RWmol.RemoveAtom(atom.GetIdx())
return Chem.Mol(RWmol)
@classmethod
def build(cls, inputs: Dict[str, Any], dtype: str = None) -> "RDKitMol":
"""Creates an instance of RDKitMol object storing rdkit.Chem.Mol.
This is done by parsing an input file (pdb, ...) or a chemical code (smiles, ...).
"""
if inputs.file:
if not dtype:
dtype = filename.ext
filename = inputs.file.path
if dtype == ".pdb" or dtype == "pdb":
rdkmol = Chem.MolFromPDBFile(filename, sanitize=False, removeHs=False)
elif dtype == ".mol" or dtype == "mol":
rdkmol = Chem.MolFromMolFile(filename, sanitize=False, removeHs=False)
elif dtype == ".mol2" or dtype == "mol2":
rdkmol = Chem.MolFromMol2File(filename, sanitize=False, removeHs=False)
elif dtype == ".tpl" or dtype == "tpl":
rdkmol = Chem.MolFromTPLFile(filename, sanitize=False, removeHs=False)
elif dtype == ".sdf" or dtype == "sdf":
rdkmols = Chem.SDMolSupplier(filename, sanitize=False, removeHs=False)
if len(rdkmols) > 1:
raise ValueError("SDF file should contain a single molecule")
else:
rdkmol = rdkmols[0] # should we support multiple molecules?
else:
raise ValueError(f"Unrecognized file type: {dtype}")
# construct RDKit molecule from identifiers
elif inputs.code:
code_type = inputs.code.code_type
function = getattr(
Chem, f"MolFrom{code_type}"
) # should work since validation already done by ChemCode!
rdkmol = function(inputs.code.code)
else:
raise ValueError("Missing input file or code.")
if inputs.code:
rdkmol = cls.gen3D(rdkmol)
return cls(mol=rdkmol)
| 2.140625
| 2
|
GradientBoosting/test_gradient_boosting.py
|
ajinChen/Sklearn_ML_Implementation
| 1
|
12780215
|
import numpy as np
from gradient_boosting import *
def test_train_predict():
X_train, y_train = load_dataset("data/tiny.rent.train")
X_val, y_val = load_dataset("data/tiny.rent.test")
y_mean, trees = gradient_boosting_mse(X_train, y_train, 5, max_depth=2, nu=0.1)
assert(np.around(y_mean, decimals=4)== 3839.1724)
y_hat_train = gradient_boosting_predict(X_train, trees, y_mean, nu=0.1)
assert(np.around(r2_score(y_train, y_hat_train), decimals=4)==0.5527)
y_hat = gradient_boosting_predict(X_val, trees, y_mean, nu=0.1)
assert(np.around(r2_score(y_val, y_hat), decimals=4)==0.5109)
| 2.765625
| 3
|
khoros/objects/roles.py
|
truthbyron/khoros
| 0
|
12780216
|
# -*- coding: utf-8 -*-
"""
:Module: khoros.objects.roles
:Synopsis: This module includes functions that handle roles and permissions.
:Usage: ``from khoros.objects import roles``
:Example: ``count = roles.get_total_role_count()``
:Created By: <NAME>
:Last Modified: <NAME>
:Modified Date: 17 Jul 2020
"""
from .. import api, liql, errors
from ..utils import log_utils
# Initialize the logger for this module
logger = log_utils.initialize_logging(__name__)
ROLE_TYPES = {
'board': 'b',
'category': 'c',
'group_hub': 'g',
'top_level': 't'
}
def get_total_role_count(khoros_object, return_dict=False, total=True, top_level=False, board=False, category=False,
group_hub=False):
"""This function retrieves the total role count for one or more role type(s).
.. versionadded:: 2.4.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param return_dict: Determines if the data should be returned as a dictionary (``False`` by default)
:type return_dict: bool
:param total: Indicates that the total overall role count should be returned (``True`` by default)
:type total: bool
:param top_level: Indicates that the total top-level role count should be returned (``False`` by default)
:type top_level: bool
:param board: Indicates that the total board-level role count should be returned (``False`` by default)
:type board: bool
:param category: Indicates that the total category-level role count should be returned (``False`` by default)
:type category: bool
:param group_hub: Indicates that the total group hub-level role count should be returned (``False`` by default)
:type group_hub: bool
:returns: The role count(s) as an integer, tuple or dictionary, depending on the arguments supplied
:raises: :py:exc:`khoros.errors.exceptions.InvalidRoleTypeError`
"""
response = liql.perform_query(khoros_object, liql_query="SELECT id FROM roles", verify_success=True)
counts = {
'total': api.get_results_count(response)
}
if not total:
del counts['total']
if top_level or board or category or group_hub:
roles_dict = api.get_items_list(response)
count_types = {'top_level': top_level, 'board': board, 'category': category, 'group_hub': group_hub}
for count_type, should_count in count_types.items():
if should_count:
counts[count_type] = count_role_types(count_type, roles_dict)
if not return_dict:
if len(counts) == 1:
counts = counts.get(list(counts.keys())[0])
else:
counts = tuple(counts.values())
return counts
def count_role_types(role_type, roles_dict):
"""This function returns the total count for a specific role type.
.. versionadded:: 2.4.0
:param role_type: The role type for which to return the count (e.g. ``board``, ``category``, etc.)
:type role_type: str
:param roles_dict: Dictionary of the roles for a given Khoros Community environment
:type roles_dict: dict
:returns: The total count for the role type as an integer
:raises: :py:exc:`khoros.errors.exceptions.InvalidRoleTypeError`
"""
if role_type not in ROLE_TYPES.keys() and role_type not in ROLE_TYPES.values():
raise errors.exceptions.InvalidRoleTypeError(role_type=role_type)
elif role_type in ROLE_TYPES:
role_type = ROLE_TYPES.get(role_type)
count = 0
for role in roles_dict:
if role['id'].startswith(f"{role_type}:"):
count += 1
return count
def get_roles_for_user(khoros_object, user_id):
"""This function returns all roles associated with a given User ID.
.. versionadded:: 2.4.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param user_id: The User ID for which to retrieve the roles data
:returns: A dictionary with data for each role associated with the given User ID
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`
"""
response = liql.perform_query(khoros_object, liql_query=f"SELECT * FROM roles WHERE users.id = '{user_id}'",
verify_success=True)
return api.get_items_list(response)
| 2.625
| 3
|
machine/storage/backends/base.py
|
drdarina/slack-machine
| 111
|
12780217
|
<reponame>drdarina/slack-machine<filename>machine/storage/backends/base.py
class MachineBaseStorage:
"""Base class for storage backends
Extending classes should implement the five methods in this base class. Slack Machine takes
care of a lot of details regarding the persistent storage of data. So storage backends
**do not** have to deal with the following, because Slack Machine takes care of these:
- Serialization/Deserialization of data
- Namespacing of keys (so data stored by different plugins doesn't clash)
"""
def __init__(self, settings):
self.settings = settings
def get(self, key):
"""Retrieve data by key
:param key: key for which to retrieve data
:return: the raw data for the provided key, as (byte)string. Should return ``None`` when
the key is unknown or the data has expired.
"""
raise NotImplementedError
def set(self, key, value, expires=None):
"""Store data by key
:param key: the key under which to store the data
:param value: data as (byte)string
:param expires: optional expiration time in seconds, after which the data should not be
returned any more.
"""
raise NotImplementedError
def delete(self, key):
"""Delete data by key
:param key: key for which to delete the data
"""
raise NotImplementedError
def has(self, key):
"""Check if the key exists
:param key: key to check
:return: ``True/False`` wether the key exists
"""
raise NotImplementedError
def size(self):
"""Calculate the total size of the storage
:return: total size of storage in bytes (integer)
"""
raise NotImplementedError
| 3.015625
| 3
|
source/utils/db/manager.py
|
goakgun/falcon-simple-api
| 0
|
12780218
|
<filename>source/utils/db/manager.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging
import sqlalchemy
from sqlalchemy import orm
from sqlalchemy.orm import scoping
from utils.db import models
from passlib.hash import sha256_crypt
class DBManager(object):
def __init__(self, connection=None, pool_recycle=None):
self.connection = connection
self.engine = sqlalchemy.create_engine(self.connection, pool_recycle=pool_recycle, pool_pre_ping=True)
self.DBSession = scoping.scoped_session(
orm.sessionmaker(
bind=self.engine,
autocommit=True
)
)
logging.info('Database connection has been established succesfully.')
@property
def session(self):
return self.DBSession()
def setup(self, users):
# Normally we would add whatever db setup code we needed here.
# This will for fine for the ORM
try:
models.SAModel.metadata.create_all(self.engine)
except Exception as e:
logging.error('Could not initialize DB: {}'.format(e))
# Create Application Users
for user in users:
hashed_password = sha256_crypt.hash(user['password'])
user['password'] = <PASSWORD>
user_exists = self.DBSession.query(models.Users).filter_by(user_id=user['user_id'], email=user['email']).first() is not None
if not user_exists:
try:
self.DBSession.add(models.Users(**user))
except Exception as e:
logging.error('Could not insert user into DB: {}'.format(e))
| 2.53125
| 3
|
hello_helpers/src/hello_helpers/fit_plane.py
|
hcrlab/stretch_ros
| 0
|
12780219
|
<filename>hello_helpers/src/hello_helpers/fit_plane.py<gh_stars>0
#!/usr/bin/env python3
import numpy as np
import cv2
def fit_plane_to_height_image(height_image, mask):
# Perform a least squares fit of a plane to the masked region of
# the height_image. Find the 3 element vector a for the equation
# aX ~= z where X[:,i] = [x_i, y_i, 1]^T, z[i] = z_i and a=[alpha,
# beta, gamma] such that alpha*x + beta*y + gamma ~= z .
z = height_image[mask > 0]
nonzero = cv2.findNonZero(mask)
perform_test = False
if perform_test:
print('z.shape =', z.shape)
print(z)
for n in range(10):
test_x, test_y = nonzero[n][0]
test_z = height_image[test_y, test_x]
print('x, y, z, z_test =', test_x, test_y, test_z, z[n])
num_points, s1, s2 = nonzero.shape
nonzero = np.reshape(nonzero, (num_points, 2))
X_T = np.append(nonzero, np.ones((num_points,1)), axis=1)
a0 = np.matmul(z, X_T)
A1 = np.matmul(X_T.transpose(), X_T)
A1 = np.linalg.inv(A1)
a = np.matmul(a0, A1)
X = X_T.transpose()
# aX ~= z
return a, X, z
def fit_plane_to_height_image_error(a, X, z):
# Calculate the fit error for the plane.
z_fit = np.matmul(a, X)
fit_error = z - z_fit
return fit_error, z_fit
def svd_fit(points, verbose=False):
# calculate and subtract the mean
center = np.mean(points, axis=0)
if verbose:
print( 'center =', center )
# make the point distribution have zero mean
points_zero_mean = points - center
if verbose:
print( 'points_zero_mean[:5] =', points_zero_mean[:5] )
print( 'points_zero_mean.shape =', points_zero_mean.shape )
# find the covariance matrix, C, for the data
C = np.cov(points_zero_mean.transpose())
# find the SVD of the covariance matrix
u, s, vh = np.linalg.svd(C)
e0 = np.reshape(u[:, 0], (3,1))
e1 = np.reshape(u[:, 1], (3,1))
e2 = np.reshape(u[:, 2], (3,1))
center = np.reshape(center, (3,1))
return center, e0, e1, e2
class FitPlane():
def __init__(self):
self.d = None
self.n = None
# defines the direction from points to the camera
self.towards_camera = np.reshape(np.array([0.0, 0.0, -1.0]), (3,1))
def set_plane(self, n, d):
self.n = n
self.d = d
self.update()
def update(self):
return
def get_plane_normal(self):
return -self.n
def get_plane_coordinate_system(self):
z_p = -self.n
# two options to avoid selecting poor choice that is almost
# parallel to z_p
x_approx = np.reshape(np.array([1.0, 0.0, 0.0]), (3,1))
x_approx_1 = x_approx - (np.matmul(z_p.transpose(), x_approx) * z_p)
x_approx = np.reshape(np.array([0.0, 1.0, 0.0]), (3,1))
x_approx_2 = x_approx - (np.matmul(z_p.transpose(), x_approx) * z_p)
x_approx_1_mag = np.linalg.norm(x_approx_1)
x_approx_2_mag = np.linalg.norm(x_approx_2)
if x_approx_1_mag > x_approx_2_mag:
x_p = x_approx_1 / x_approx_1_mag
else:
x_p = x_approx_2 / x_approx_2_mag
y_p = np.reshape(np.cross(z_p.flatten(), x_p.flatten()), (3,1))
p_origin = self.d * self.n
return x_p, y_p, z_p, p_origin
def get_points_on_plane(self, plane_origin=None, side_length=1.0, sample_spacing=0.01):
x_p, y_p, z_p, p_origin = self.get_plane_coordinate_system()
h = side_length/2.0
if plane_origin is None:
plane_list = [np.reshape((x_p * alpha) + (y_p * beta) + p_origin, (3,))
for alpha in np.arange(-h, h, sample_spacing)
for beta in np.arange(-h, h, sample_spacing)]
else:
plane_origin = np.reshape(plane_origin, (3, 1))
plane_list = [np.reshape((x_p * alpha) + (y_p * beta) + plane_origin, (3,))
for alpha in np.arange(-h, h, sample_spacing)
for beta in np.arange(-h, h, sample_spacing)]
plane_array = np.array(plane_list)
return plane_array
def abs_dist(self, points_array):
out = np.abs(np.matmul(self.n.transpose(), points_array.transpose()) - self.d).flatten()
return out
def height(self, points_array):
# positive is closer to the camera (e.g., above floor)
# negative is farther from the camera (e.g., below floor)?
out = - (np.matmul(self.n.transpose(), points_array.transpose()) - self.d).flatten()
return out
def get_points_nearby(self, points_array, dist_threshold_mm):
# return points that are within a distance from the current plane
if (self.n is not None) and (self.d is not None):
dist = np.abs(np.matmul(self.n.transpose(), points_array.transpose()) - self.d).flatten()
# only points < dist_threshold meters away from the plane are
# considered in the fit dist_threshold = 0.2 #1.0 #0.5 #0.2
dist_threshold_m = dist_threshold_mm / 1000.0
thresh_test = np.abs(dist) < dist_threshold_m
points = points_array[thresh_test, :]
else:
points = points_array
return points
def fit_svd(self, points_array,
dist_threshold_mm=200.0,
prefilter_points=False,
verbose=True):
# relevant numpy documentation for SVD:
#
# "When a is a 2D array, it is factorized as u @ np.diag(s) @ vh"
#
#" The rows of vh are the eigenvectors of A^H A and the
# columns of u are the eigenvectors of A A^H. In both cases
# the corresponding (possibly non-zero) eigenvalues are given
# by s**2. "
if prefilter_points:
# only fit to points near the current plane
points = self.get_points_nearby(points_array, dist_threshold_mm)
else:
points = points_array
center, e0, e1, e2 = svd_fit(points, verbose)
# find the smallest eigenvector, which corresponds to the
# normal of the plane
n = e2
# ensure that the direction of the normal matches our convention
approximate_up = self.towards_camera
if np.matmul(n.transpose(), approximate_up) > 0.0:
n = -n
if verbose:
print( 'SVD fit' )
print( 'n =', n )
print( 'np.linalg.norm(n) =', np.linalg.norm(n) )
#center = np.reshape(center, (3,1))
d = np.matmul(n.transpose(), center)
if verbose:
print( 'd =', d )
self.d = d
self.n = n
if verbose:
print( 'self.d =', self.d )
print( 'self.n =', self.n )
self.update()
def fit_ransac(self, points_array,
dist_threshold=0.2,
ransac_inlier_threshold_m=0.04,
use_density_normalization=False,
number_of_iterations=100,
prefilter_points=False,
verbose=True):
# Initial RANSAC algorithm based on pseudocode on Wikipedia
# https://en.wikipedia.org/wiki/Random_sample_consensus
if prefilter_points:
# only fit to points near the current plane
dist_threshold_mm = dist_threshold * 1000.0
points = self.get_points_nearby(points_array, dist_threshold_mm)
else:
points = points_array
num_points = points.shape[0]
indices = np.arange(num_points)
ransac_threshold_m = ransac_inlier_threshold_m
min_num_inliers = 100
approximate_up = self.towards_camera
# should be well above the maximum achievable error, since
# error is average distance in meters
best_model_inlier_selector = None
best_model_inlier_count = 0
for i in range(number_of_iterations):
if verbose:
print( 'RANSAC iteration', i )
candidate_inliers = points[np.random.choice(indices, 3), :]
c0, c1, c2 = candidate_inliers
# fit plane to candidate inliers
n = np.cross(c1 - c0, c2 - c0)
if np.dot(n, approximate_up) > 0.0:
n = -n
n = np.reshape(n / np.linalg.norm(n), (3,1))
c0 = np.reshape(c0, (3,1))
d = np.matmul(n.transpose(), c0)
dist = np.abs(np.matmul(n.transpose(), points.transpose()) - d).flatten()
select_model_inliers = dist < ransac_threshold_m
if use_density_normalization:
inliers = points[select_model_inliers]
# square grid with this many bins to a side, small
# values (e.g., 10 and 20) can result in the fit being
# biased towards edges of the planar region
num_bins = 100 # num_bins x num_bins = total bins
density_image, mm_per_pix, x_indices, y_indices = create_density_image(inliers, self, image_width_pix=num_bins, view_width_m=5.0, return_indices=True)
density_image = np.reciprocal(density_image, where=density_image!=0.0)
number_model_inliers = np.int(np.round(np.sum(density_image[y_indices, x_indices])))
else:
number_model_inliers = np.count_nonzero(select_model_inliers)
if number_model_inliers > min_num_inliers:
if verbose:
print( 'model found with %d inliers' % number_model_inliers )
if number_model_inliers > best_model_inlier_count:
if verbose:
print( 'model has more inliers than the previous best model, so updating' )
best_model_n = n
best_model_d = d
best_model_inlier_count = number_model_inliers
best_model_inlier_selector = select_model_inliers
best_model_inliers = None
best_model_error = None
elif number_model_inliers == best_model_inlier_count:
if verbose:
print( 'model has the same number of inliers as the previous best model, so comparing' )
model_inliers = points[select_model_inliers]
# error is the average distance of points from the plane
# sum_i | n^T p_i - d |
# should be able to make this faster by selecting from the already computed distances
new_error = np.average(np.abs(np.matmul(n.transpose(), model_inliers.transpose()) - d))
if best_model_inliers is None:
best_model_inliers = points[best_model_inlier_selector]
if best_model_error is None:
# should be able to make this faster by
# selecting from the already computed
# distances
best_model_error = np.average(np.abs(np.matmul(best_model_n.transpose(), best_model_inliers.transpose()) - best_model_d))
if new_error < best_model_error:
if verbose:
print( 'model has a lower error than the previous model, so updating' )
best_model_n = n
best_model_d = d
best_model_inlier_count = number_model_inliers
best_model_inlier_selector = select_model_inliers
best_model_inliers = model_inliers
best_model_error = new_error
if best_model_inlier_count > 0:
if verbose:
print( 'RANSAC FINISHED' )
print( 'new model found by RANSAC:' )
self.d = best_model_d
self.n = best_model_n
if verbose:
print( 'self.d =', self.d )
print( 'self.n =', self.n )
self.update()
else:
print( 'RANSAC FAILED TO FIND A MODEL' )
| 2.984375
| 3
|
service_catalog/serializers/instance_serializer.py
|
a-belhadj/squest
| 0
|
12780220
|
<reponame>a-belhadj/squest<gh_stars>0
from rest_framework import serializers
from service_catalog.models import Instance
class InstanceSerializer(serializers.ModelSerializer):
class Meta:
model = Instance
fields = '__all__'
read_only_fields = ['service', 'state']
| 1.898438
| 2
|
external_tests/get_tweets.py
|
garnachod/ConcursoPolicia
| 0
|
12780221
|
<filename>external_tests/get_tweets.py
from DBbridge.ConsultasCassandra import ConsultasCassandra
import codecs
if __name__ == '__main__':
cq = ConsultasCassandra()
i = 0
with codecs.open("millon_tweets.txt", "w", "utf-8") as f_out:
for tweet in cq.getTweetsTextAndLangAndID('es', limit=1000000):
f_out.write(str(i) + ";")
f_out.write(str(tweet.id_twitter) + ";")
text = tweet.status.replace("\n", ". ").replace("\r", ". ").replace(u"\u0085", ". ").replace(u"\u2028", ". ").replace(u"\u2029", ". ").replace(";", ",")
f_out.write(text + u"\n")
i += 1
| 2.5625
| 3
|
tools/etnaviv/mmt.py
|
ilbers/etna_viv
| 121
|
12780222
|
import struct
from collections import namedtuple
def read_1(f):
return f.read(1)[0]
def read_2(f):
return struct.unpack('<H', f.read(2))[0]
def read_4(f):
return struct.unpack('<I', f.read(4))[0]
def read_8(f):
return struct.unpack('<Q', f.read(8))[0]
def read_buffer(f):
length = read_4(f)
return f.read(length)
def read_str(f):
s = read_buffer(f)
assert(s[-1] == 0)
return s[0:-1]
LogMessage = namedtuple('LogMessage', ['msg'])
Open = namedtuple('Open', ['flags', 'mode', 'fd', 'path'])
Mmap = namedtuple('Mmap', ['offset', 'prot', 'flags', 'fd', 'region_id', 'start', 'length'])
Munmap = namedtuple('Munmap', ['offset', 'region_id', 'start', 'length', 'unk1', 'unk2'])
StoreInfo = namedtuple('StoreInfo', ['msg'])
Store = namedtuple('Store', ['region_id', 'offset', 'data'])
ProcessMap = namedtuple('ProcessMap', ['msg'])
# etnaviv specific
Commit = namedtuple('Commit', [])
def parse_mmt_file(f):
while True:
ch = f.read(1)
if ch == b'':
return
elif ch == b'=' or ch == b'-': # Comment
s = b''
while True: # read until \n
ch = f.read(1)
if ch == b'\n':
break
else:
s += ch
yield LogMessage(s)
elif ch == b'o': # open
flags = read_4(f)
mode = read_4(f)
fd = read_4(f)
path = read_str(f)
assert(read_1(f) == 10)
yield Open(flags, mode, fd, path)
elif ch == b'M': # mmap
offset = read_8(f)
prot = read_4(f)
flags = read_4(f)
fd = read_4(f)
region_id = read_4(f)
start = read_8(f)
length = read_8(f)
assert(read_1(f) == 10)
yield Mmap(offset, prot, flags, fd, region_id, start, length)
elif ch == b'u': # munmap
offset = read_8(f)
region_id = read_4(f)
start = read_8(f)
length = read_8(f)
unk1 = read_8(f)
unk2 = read_8(f)
assert(read_1(f) == 10)
yield Munmap(offset, region_id, start, length, unk1, unk2)
elif ch == b'x': # store_info
info = read_str(f)
assert(read_1(f) == 10)
yield StoreInfo(info)
elif ch == b'w': # store
region_id = read_4(f)
offset = read_4(f)
length = read_1(f)
data = f.read(length)
assert(read_1(f) == 10)
yield Store(region_id, offset, data)
elif ch == b'c': # commit
assert(read_1(f) == 10)
yield Commit()
elif ch == b'y': # process map
assert(read_8(f) == 1)
msg = read_buffer(f)
assert(read_1(f) == 10)
yield ProcessMap(msg)
else:
print('Unknown ', ch)
exit(1)
| 2.40625
| 2
|
src/catalogo.py
|
pugaru/NGKS_Shop
| 0
|
12780223
|
from catalogo.models import Categoria, Produto
class Gerencia_categoria():
def Cria_categoria(request):
nome = request.POST.get("nome")
slug = request.POST.get("slug")
Categoria.objects.create(nome=nome, slug=slug)
def Atualiza_categoria(request, slug):
nome = request.POST.get("nome")
categoria = Categoria.objects.get(slug=slug)
categoria.nome = nome
categoria.save()
def Deleta_categoria(delete):
Categoria.objects.get(id=delete).delete()
class Gerencia_produto():
def Cria_produto(request):
nome = request.POST.get("nome")
slug = request.POST.get("slug")
categoria = request.POST.get("categoria")
descricao = request.POST.get("descricao")
price = request.POST.get("price")
Produto.objects.create(nome=nome, slug=slug, categoria_id=int(categoria), descricao=descricao, price=price)
def Atualiza_produto(request, slug):
nome = request.POST.get("nome")
categoria = request.POST.get("categoria")
descricao = request.POST.get("descricao")
price = request.POST.get("price")
produto = Produto.objects.get(slug=slug)
produto.nome = nome
produto.categoria_id = categoria
produto.descricao = descricao
produto.price = price
produto.save()
def Deleta_produto(delete):
Produto.objects.get(id=delete).delete()
| 2.578125
| 3
|
motor_controller/src/refactored/src/utilities.py
|
maf2418/Perceptbot
| 2
|
12780224
|
PKG_NAME = "motion_control"
PID_SETTINGS_FILE = "pid_settings"
ENCODER_SETTINGS_FILE = "encoder_settings"
LINE = 0.15 # M/S
TURN = 0.075 # M/S
BASE_WIDTH = 0.13 # m
TICKS_PER_METER = 209 # TICKS_PER_REV / (WHEEL_RADIUS * 2 * PI)
MAX_PWM = 16 # hard max to preserve motors via clamp
SPEED_AT_100_PWM = 3.0 # m/s, used to scale PID errors
DEFAULT_TURNING_RADIUS = 1 / (3.14 * BASE_WIDTH)
def update_PID_msg(msg, pwm, target_vel, encoder_vel, scale=1):
msg.PWM = pwm
msg.targetVel = target_vel * scale
msg.encoderVel = encoder_vel * scale
def clamp(val, min_val, max_val):
return max(min(val, max_val), min_val)
# numpad indexing
def twist_to_index(twist):
vec_angular, vec_linear = twist.angular, twist.linear
if vec_angular.z > 0.05:
turn = 1
elif vec_angular.z < -0.05:
turn = -1
else:
turn = 0
if vec_linear.x > 0.05:
forward = 1
elif vec_linear.x < -0.05:
forward = -1
else:
forward = 0
return 5 - turn + 3 * forward
def twist_to_wheel_vel(twist):
vec_angular, vec_linear = twist.angular, twist.linear
left_vel = vec_linear.x - 0.5 * vec_angular.z * BASE_WIDTH
right_vel = vec_linear.x + 0.5 * vec_angular.z * BASE_WIDTH
return left_vel, right_vel
| 2.171875
| 2
|
mesh2sh/vonmises_kde.py
|
le-Greg/mesh2sh
| 0
|
12780225
|
# Adaptation from https://github.com/williamjameshandley/spherical_kde
# For the rule of thumb : https://arxiv.org/pdf/1306.0517.pdf
# Exact risk improvement of bandwidth selectors for kernel density estimation with directional data
# <NAME>
import math
import scipy.optimize
import scipy.special
import torch
from .geometry import spherical_to_cartesian
def logsinh(x):
"""
Compute log(sinh(x)), stably for large x.
:param x : torch.tensor, argument to evaluate at, must be positive
:return torch.tensor, log(sinh(x))
"""
if torch.any(x < 0):
raise ValueError("logsinh only valid for positive arguments")
return x + torch.log(0.5 - torch.exp(-2 * x) / 2)
def maxlikelihood_kappa(data) -> float:
"""
Estimate kappa if data follows a Von Mises Fisher distribution
https://en.wikipedia.org/wiki/Von_Mises%E2%80%93Fisher_distribution#Estimation_of_parameters
:param data : torch.tensor, [Nx3], xyz coordinates of points on the sphere
:return float
"""
r = data.mean(dim=0).square().sum(0).sqrt().item()
def a_p(kappa):
return scipy.special.iv(3 / 2, kappa) / scipy.special.iv(3 / 2 - 1, kappa) - r
opti_kappa = scipy.optimize.brentq(a_p, 1e-4, 1e2)
return opti_kappa
def h_rot(data):
"""
Rule-of-thumb bandwidth hrot for Kernel Density Estimation using Von Mises Fisher kernels
Typo in the original paper, see : https://github.com/egarpor/DirStats/blob/master/R/bw-pi.R
:param data : torch.tensor, [Nx3], xyz coordinates of points on the sphere
:return float
"""
kappa = maxlikelihood_kappa(data)
n = data.shape[0]
num = 8 * math.sinh(kappa) ** 2
den = (-2 * kappa * math.cosh(2 * kappa) + (1 + 4 * kappa ** 2) * math.sinh(2 * kappa)) * n
return (num / den) ** (1 / 6)
class SphereKDE:
"""
Spherical kernel density estimator, using Von Mises Fisher kernels
Inspired by https://github.com/williamjameshandley/spherical_kde
"""
def __init__(self, data_pts, chunk_matmul=10000):
self.pts = data_pts
self.device = data_pts.device
self.chunk_matmul = chunk_matmul
def __call__(self, sampling_pts, bandwidth):
sampling = sampling_pts.to(self.device)
kappa = torch.tensor(1 / (bandwidth ** 2), device=self.device)
logc = torch.log(kappa / (4 * math.pi)) - logsinh(kappa)
# kernels = logc + torch.matmul(sampling, self.pts.T) * kappa
# pdf = torch.exp(torch.logsumexp(kernels, dim=1)) / self.pts.shape[0]
pdf = torch.empty([sampling.shape[0]], device=sampling.device, dtype=sampling.dtype)
for i in range(sampling.shape[0] // self.chunk_matmul + 1): # Solve memory limitations
chk_sampling = sampling[i * self.chunk_matmul:(i + 1) * self.chunk_matmul]
kernels = logc + torch.matmul(chk_sampling, self.pts.T) * kappa
pdf[i * self.chunk_matmul:(i + 1) * self.chunk_matmul] = \
torch.exp(torch.logsumexp(kernels, dim=1)) / self.pts.shape[0]
return pdf
def vonmisesfisher_kde(data_theta, data_phi, x_theta, x_phi, bandwidth=None):
"""
Perform Von Mises-Fisher Kernel Density Estimation (used for spherical data)
:param data_theta: 1D torch.float tensor, containing theta values for training data, between 0 and pi
:param data_phi: 1D torch.float tensor, containing phi values for training data, between 0 and 2*pi
:param x_theta: torch.float tensor, containing theta values for sampling points, between 0 and pi
:param x_phi: torch.float tensor, containing phi values for sampling points, between 0 and 2*pi
:param bandwidth: smoothing bandwith. If None, then uses rule-of-thumb
:return: torch tensor of interpolated values, of the same shape as x_theta and x_phi
"""
data_pts = torch.stack(spherical_to_cartesian(torch.ones_like(data_theta), data_theta, data_phi), dim=1)
if bandwidth is None:
bandwidth = h_rot(data_pts)
assert x_theta.shape == x_phi.shape
shape = x_theta.shape
x_theta, x_phi = x_theta.flatten(), x_phi.flatten()
sampling_pts = torch.stack(spherical_to_cartesian(torch.ones_like(x_theta), x_theta, x_phi), dim=1)
pdf = SphereKDE(data_pts)(sampling_pts, bandwidth=bandwidth)
return pdf.view(shape)
| 2.5625
| 3
|
learn/views/choose_exercise.py
|
Aigrefin/py3learn
| 0
|
12780226
|
from django.shortcuts import redirect
from learn.services.choice import random_choice, rythm_choice
def choose_rythm_notation_exercise(request, dictionary_pk):
if request.user.is_authenticated():
translation = rythm_choice(dictionary_pk, request.user)
else:
translation = random_choice(dictionary_pk)
if not translation:
return redirect('learn:come_back', dictionary_pk=dictionary_pk)
return redirect('learn:exercise', dictionary_pk=dictionary_pk, translation_pk=translation.id)
| 2.296875
| 2
|
solvcon/kerpak/gasdyn.py
|
j8xixo12/solvcon
| 16
|
12780227
|
# -*- coding: UTF-8 -*-
#
# Copyright (C) 2008-2011 <NAME> <<EMAIL>>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Gas dynamics solver of the Euler equations.
"""
from solvcon.kerpak.cuse import CuseSolver
from solvcon.kerpak.cuse import CuseCase
from solvcon.kerpak.cuse import CuseBC
from solvcon.anchor import Anchor
from solvcon.hook import BlockHook
################################################################################
# Utility.
################################################################################
class MovingShock(object):
"""
Define relations across a moving shock wave. Subscript 1 denotes
quantities before shock (have not touched by shock), subscript 2 denotes
quantities after shock (passed by shock).
@ivar ga: ratio of specific heat.
@itype ga: float
@ivar Ms: Mach number of shock wave.
@itype Ms: float
@ivar gasconst: gas constant.
@itype gasconst: float
"""
def __init__(self, ga, Ms, **kw):
self.ga = ga
self.Ms = Ms
self.gasconst = kw.pop('gasconst', 1.0)
@property
def ratio_p(self):
"""
ratio of upstream/downstream pressure.
"""
ga = self.ga
Ms = self.Ms
return (2*ga*Ms**2 - (ga-1))/(ga+1)
@property
def ratio_rho(self):
"""
ratio of upstream/downstream density.
"""
ga = self.ga
Ms = self.Ms
return (ga+1)*Ms**2/(2+(ga-1)*Ms**2)
@property
def ratio_T(self):
"""
ratio of upstream/downstream temperature.
"""
ga = self.ga
Ms = self.Ms
return self.ratio_p/self.ratio_rho
@property
def M2(self):
"""
Mach number behind standing normal shock wave.
"""
from math import sqrt
ga = self.ga
Ms = self.Ms
return sqrt(((ga-1)*Ms**2+2)/(2*ga*Ms**2-(ga-1)))
@property
def M2p(self):
"""
Mach number behind moving normal shock wave.
"""
from math import sqrt
M1 = self.Ms
M2 = self.M2
ratio_a = sqrt(self.ratio_T)
return M1/ratio_a - M2
def calc_temperature(self, p, rho):
"""
Calculate temperature according to given pressure and density.
@param p: pressure.
@type p: float
@param rho: density.
@type rho: float
@return: temperature
@rtype: float
"""
return p/(rho*self.gasconst)
def calc_speedofsound(self, p, rho):
"""
Calculate speed of sound according to given pressure and density.
@param p: pressure.
@type p: float
@param rho: density.
@type rho: float
@return: speed of sound
@rtype: float
"""
from math import sqrt
ga = self.ga
return sqrt(ga*p/rho)
def calc_speeds(self, p, rho):
"""
Calculate shock wave speed and upstream speed for static downstream.
@param p: downstream pressure.
@type p: float
@param rho: downstream density.
@type rho: float
@return: a 2-tuple for shock wave and upstream speeds.
@rtype: (float, float)
"""
M1 = self.Ms
M2 = self.M2
a1 = self.calc_speedofsound(p, rho)
a2 = self.calc_speedofsound(p*self.ratio_p, rho*self.ratio_rho)
return M1*a1, M1*a1 - M2*a2
###############################################################################
# Solver.
###############################################################################
class GasdynSolver(CuseSolver):
"""
Gas dynamics solver of the Euler equations.
"""
def __init__(self, blk, *args, **kw):
kw['nsca'] = 1
super(GasdynSolver, self).__init__(blk, *args, **kw)
from solvcon.dependency import getcdll
__clib_gasdyn_c = {
2: getcdll('gasdyn2d_c', raise_on_fail=False),
3: getcdll('gasdyn3d_c', raise_on_fail=False),
}
__clib_gasdyn_cu = {
2: getcdll('gasdyn2d_cu', raise_on_fail=False),
3: getcdll('gasdyn3d_cu', raise_on_fail=False),
}
del getcdll
@property
def _clib_gasdyn_c(self):
return self.__clib_gasdyn_c[self.ndim]
@property
def _clib_gasdyn_cu(self):
return self.__clib_gasdyn_cu[self.ndim]
@property
def _clib_mcu(self):
return self.__clib_gasdyn_cu[self.ndim]
_gdlen_ = 0
@property
def _jacofunc_(self):
return self._clib_gasdyn_c.calc_jaco
def calccfl(self, worker=None):
from ctypes import byref
if self.scu:
self._clib_gasdyn_cu.calc_cfl(self.ncuth,
byref(self.cumgr.exd), self.cumgr.gexd.gptr)
else:
self._clib_gasdyn_c.calc_cfl(byref(self.exd))
###############################################################################
# Case.
###############################################################################
class GasdynCase(CuseCase):
"""
Gas dynamics case.
"""
from solvcon.domain import Domain
defdict = {
'solver.solvertype': GasdynSolver,
'solver.domaintype': Domain,
}
del Domain
def load_block(self):
loaded = super(GasdynCase, self).load_block()
if hasattr(loaded, 'ndim'):
ndim = loaded.ndim
else:
ndim = loaded.blk.ndim
self.execution.neq = ndim+2
return loaded
###############################################################################
# Boundary conditions.
###############################################################################
class GasdynBC(CuseBC):
"""
Basic BC class for gas dynamics.
"""
from solvcon.dependency import getcdll
__clib_gasdynb_c = {
2: getcdll('gasdynb2d_c', raise_on_fail=False),
3: getcdll('gasdynb3d_c', raise_on_fail=False),
}
__clib_gasdynb_cu = {
2: getcdll('gasdynb2d_cu', raise_on_fail=False),
3: getcdll('gasdynb3d_cu', raise_on_fail=False),
}
del getcdll
@property
def _clib_gasdynb_c(self):
return self.__clib_gasdynb_c[self.svr.ndim]
@property
def _clib_gasdynb_cu(self):
return self.__clib_gasdynb_cu[self.svr.ndim]
class GasdynWall(GasdynBC):
_ghostgeom_ = 'mirror'
def soln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_wall_soln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_wall_soln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
def dsoln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_wall_dsoln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_wall_dsoln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
class GasdynNswall(GasdynWall):
def soln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_nswall_soln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_nswall_soln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
def dsoln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_nswall_dsoln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_nswall_dsoln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
class GasdynInlet(GasdynBC):
vnames = ['rho', 'v1', 'v2', 'v3', 'p', 'gamma']
vdefaults = {
'rho': 1.0, 'p': 1.0, 'gamma': 1.4, 'v1': 0.0, 'v2': 0.0, 'v3': 0.0,
}
_ghostgeom_ = 'mirror'
def soln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_inlet_soln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr,
self.value.shape[1], self.cuvalue.gptr)
else:
self._clib_gasdynb_c.bound_inlet_soln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_,
self.value.shape[1], self.value.ctypes._as_parameter_)
def dsoln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_inlet_dsoln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_inlet_dsoln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
###############################################################################
# Anchors.
###############################################################################
class GasdynIAnchor(Anchor):
"""
Basic initializing anchor class of GasdynSolver.
"""
def __init__(self, svr, **kw):
assert isinstance(svr, GasdynSolver)
self.gamma = float(kw.pop('gamma'))
super(GasdynIAnchor, self).__init__(svr, **kw)
def provide(self):
from solvcon.solver_legacy import ALMOST_ZERO
svr = self.svr
svr.amsca.fill(self.gamma)
svr.sol.fill(ALMOST_ZERO)
svr.soln.fill(ALMOST_ZERO)
svr.dsol.fill(ALMOST_ZERO)
svr.dsoln.fill(ALMOST_ZERO)
class UniformIAnchor(GasdynIAnchor):
def __init__(self, svr, **kw):
self.rho = float(kw.pop('rho'))
self.v1 = float(kw.pop('v1'))
self.v2 = float(kw.pop('v2'))
self.v3 = float(kw.pop('v3'))
self.p = float(kw.pop('p'))
super(UniformIAnchor, self).__init__(svr, **kw)
def provide(self):
super(UniformIAnchor, self).provide()
gamma = self.gamma
svr = self.svr
svr.soln[:,0].fill(self.rho)
svr.soln[:,1].fill(self.rho*self.v1)
svr.soln[:,2].fill(self.rho*self.v2)
vs = self.v1**2 + self.v2**2
if svr.ndim == 3:
vs += self.v3**2
svr.soln[:,3].fill(self.rho*self.v3)
svr.soln[:,svr.ndim+1].fill(self.rho*vs/2 + self.p/(gamma-1))
svr.sol[:] = svr.soln[:]
class GasdynOAnchor(Anchor):
"""
Calculates physical quantities for output. Implements (i) provide() and
(ii) postfull() methods.
@ivar gasconst: gas constant.
@itype gasconst: float.
"""
_varlist_ = ['v', 'rho', 'p', 'T', 'ke', 'a', 'M', 'sch']
def __init__(self, svr, **kw):
self.rsteps = kw.pop('rsteps', 1)
self.gasconst = kw.pop('gasconst', 1.0)
self.schk = kw.pop('schk', 1.0)
self.schk0 = kw.pop('schk0', 0.0)
self.schk1 = kw.pop('schk1', 1.0)
super(GasdynOAnchor, self).__init__(svr, **kw)
def _calculate_physics(self):
from ctypes import byref, c_double
svr = self.svr
der = svr.der
svr._clib_gasdyn_c.process_physics(byref(svr.exd),
c_double(self.gasconst),
der['v'].ctypes._as_parameter_,
der['w'].ctypes._as_parameter_,
der['wm'].ctypes._as_parameter_,
der['rho'].ctypes._as_parameter_,
der['p'].ctypes._as_parameter_,
der['T'].ctypes._as_parameter_,
der['ke'].ctypes._as_parameter_,
der['a'].ctypes._as_parameter_,
der['M'].ctypes._as_parameter_,
)
def _calculate_schlieren(self):
from ctypes import byref, c_double
svr = self.svr
sch = svr.der['sch']
svr._clib_gasdyn_c.process_schlieren_rhog(byref(svr.exd),
sch.ctypes._as_parameter_)
rhogmax = sch[svr.ngstcell:].max()
svr._clib_gasdyn_c.process_schlieren_sch(byref(svr.exd),
c_double(self.schk), c_double(self.schk0), c_double(self.schk1),
c_double(rhogmax), sch.ctypes._as_parameter_,
)
def provide(self):
from numpy import empty
svr = self.svr
der = svr.der
nelm = svr.ngstcell + svr.ncell
der['v'] = empty((nelm, svr.ndim), dtype=svr.fpdtype)
der['w'] = empty((nelm, svr.ndim), dtype=svr.fpdtype)
der['wm'] = empty(nelm, dtype=svr.fpdtype)
der['rho'] = empty(nelm, dtype=svr.fpdtype)
der['p'] = empty(nelm, dtype=svr.fpdtype)
der['T'] = empty(nelm, dtype=svr.fpdtype)
der['ke'] = empty(nelm, dtype=svr.fpdtype)
der['a'] = empty(nelm, dtype=svr.fpdtype)
der['M'] = empty(nelm, dtype=svr.fpdtype)
der['sch'] = empty(nelm, dtype=svr.fpdtype)
self._calculate_physics()
self._calculate_schlieren()
def postfull(self):
svr = self.svr
istep = self.svr.step_global
rsteps = self.rsteps
if istep > 0 and istep%rsteps == 0:
if svr.scu:
svr.cumgr.arr_from_gpu('amsca', 'soln', 'dsoln')
self._calculate_physics()
self._calculate_schlieren()
| 1.875
| 2
|
os_traits/hw/nic/__init__.py
|
mail2nsrajesh/os-traits
| 10
|
12780228
|
<reponame>mail2nsrajesh/os-traits
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# A few generalized capabilities of some NICs
TRAITS = [
'SRIOV', # NIC supports partitioning via SR-IOV
'MULTIQUEUE', # >1 receive and transmit queues
'VMDQ', # Virtual machine device queues
# Some NICs allow processing pipelines to be programmed via FPGAs embedded
# in the NIC itself...
'PROGRAMMABLE_PIPELINE',
]
| 0.914063
| 1
|
ml/ica.py
|
Max1993Liu/MLImplementation
| 0
|
12780229
|
"""
Independent Component analysis
Reference implementation: https://github.com/asdspal/dimRed/blob/master/ICA.ipynb
"""
| 1.148438
| 1
|
segmenters/nlp/JiebaSegmenter/tests/test_jiebasegmenter.py
|
saoc90/jina-hub
| 106
|
12780230
|
<reponame>saoc90/jina-hub
import os
import numpy as np
import pytest
from .. import JiebaSegmenter
cur_dir = os.path.dirname(os.path.abspath(__file__))
path_dict_file = os.path.join(cur_dir, 'dict.txt')
def test_jieba_segmenter():
segmenter = JiebaSegmenter(mode='accurate')
text = '今天是个大晴天!安迪回来以后,我们准备去动物园。'
docs_chunks = segmenter.segment(np.stack([text, text]))
assert len(docs_chunks) == 2
for chunks in docs_chunks:
assert len(chunks) == 14
def test_jieba_user_dir():
segmenter = JiebaSegmenter()
text = '今天是个大晴天!安迪回来以后,我们准备去动物园。thisisnotachineseword'
docs_chunks = segmenter.segment(np.stack([text, text]))
assert len(docs_chunks) == 2
for chunks in docs_chunks:
assert len(chunks) == 15
segmenter = JiebaSegmenter(user_dict_file=path_dict_file)
text = '今天是个大晴天!安迪回来以后,我们准备去动物园。thisisnotachineseword'
docs_chunks = segmenter.segment(np.stack([text, text]))
assert len(docs_chunks) == 2
for chunks in docs_chunks:
assert len(chunks) == 20
def test_jieba_user_dir_file_not_found():
with pytest.raises(FileNotFoundError):
JiebaSegmenter(user_dict_file='/this/path/does/not/exist.txt')
| 2.359375
| 2
|
main.py
|
citReyJoshua/assignment_statement_lexical_analyzer
| 0
|
12780231
|
from lexical_analyzer.assignment_analyzer import analyze
if __name__ == '__main__':
assignment_statements = []
# get string statements from file
with open('test-cases.txt', 'r') as file:
for line in file:
assignment_statements.append(line.rstrip('\n'))
# lexically analyze
validation_results = analyze(assignment_statements)
# display results
for statement, validation in zip(assignment_statements, validation_results):
print (f'\n{statement} -> {validation}')
print('-----------------------------------------------------')
| 3.25
| 3
|
pystatic/error/message.py
|
pystatic/pystatic
| 0
|
12780232
|
<filename>pystatic/error/message.py<gh_stars>0
from typing import Optional, Tuple
from pystatic.error.level import Level
from pystatic.error.position import Position
class Message:
__slots__ = ["level", "msg"]
def __init__(self, level: Level, msg: str) -> None:
self.level = level
self.msg = msg
def get_position(self) -> Optional[Position]:
return None
def __lt__(self, other: "Message"):
pos = self.get_position()
other_pos = other.get_position()
if pos is None:
return True
elif other_pos is None:
return False
else:
return pos < other_pos
def __str__(self):
return self.msg
class PositionMessage(Message):
def __init__(
self,
level: Level,
pos: Position,
msg: str,
):
super().__init__(level, msg)
self.pos = pos
def get_position(self) -> Optional[Position]:
return self.pos
def __str__(self):
return (
"line: "
+ str(self.pos.lineno)
+ " col: "
+ str(self.pos.col_offset)
+ " "
+ self.msg
)
| 2.59375
| 3
|
forfun/dice/rolldice.py
|
wy2136/wython
| 1
|
12780233
|
#!/usr/bin/env python
# <NAME> (<EMAIL>)
# Fri Jul 23 16:27:08 EDT 2021
#import xarray as xr, numpy as np, pandas as pd
import os.path
import matplotlib.pyplot as plt
#more imports
#from PIL import Image
import random
from matplotlib import image
#
#
#start from here
dice = range(1,6+1)
idir = os.path.dirname(__file__)
while True:
n = random.choice(dice)
ifile = os.path.join(idir, f'dice-{n}.jpg')
#img = Image.open(ifile)
#img.show()
#with Image.open(ifile) as img:
# img.show()
plt.ion()
plt.imshow(image.imread(ifile))
plt.axis('off')
#plt.show()
print(f'Your number is {n}')
s = input(f'Press Return to continue (or type q and press Return to quit):')
plt.close()
if s == 'q':
break
| 3.15625
| 3
|
day9.py
|
jborlik/AdventOfCode2015
| 0
|
12780234
|
# -*- coding: utf-8 -*-
# mostly from: http://stackoverflow.com/questions/30552656/python-traveling-salesman-greedy-algorithm
# credit to cMinor
import math
import random
import itertools
def indexOrNeg(arrr, myvalue):
try:
return arrr.index(myvalue)
except:
return -1
def printTour(tour, cities):
for icity in tour:
print(cities[icity], end=' ')
def readAdventDatafile(myfilename):
"""Reads in the Advent Of Code datafile, which has the list of
cities, connections, and distances between them.
Will return:
n - number of cities
D - distance matrix
cities - array (of length n) of city names
"""
D = {} # dictionary to hold n times n matrix
cities = []
with open(myfilename) as datafile:
for thisstring in datafile:
thisstring = thisstring.rstrip()
tokens = thisstring.split(' ')
index_city1 = indexOrNeg(cities, tokens[0])
if index_city1 < 0:
cities.append(tokens[0])
index_city1 = len(cities)-1
index_city2 = indexOrNeg(cities, tokens[2])
if index_city2 < 0:
cities.append(tokens[2])
index_city2 = len(cities)-1
D[index_city1, index_city2] = int(tokens[4])
D[index_city2, index_city1] = int(tokens[4])
return len(cities), D, cities
def mk_matrix(coord, dist):
"""Compute a distance matrix for a set of points.
Uses function 'dist' to calculate distance between
any two points. Parameters:
-coord -- list of tuples with coordinates of all points, [(x1,y1),...,(xn,yn)]
-dist -- distance function
"""
n = len(coord)
D = {} # dictionary to hold n times n matrix
for i in range(n-1):
for j in range(i+1,n):
(x1,y1) = coord[i]
(x2,y2) = coord[j]
D[i,j] = dist((x1,y1), (x2,y2))
D[j,i] = D[i,j]
return n,D
def mk_closest(D, n):
"""Compute a sorted list of the distances for each of the nodes.
For each node, the entry is in the form [(d1,i1), (d2,i2), ...]
where each tuple is a pair (distance,node).
"""
C = []
for i in range(n):
dlist = [(D[i,j], j) for j in range(n) if j != i]
dlist.sort()
C.append(dlist)
return C
def length(tour, D):
"""Calculate the length of a tour according to distance matrix 'D'."""
#z = D[tour[-1], tour[0]] # edge from last to first city of the tour
z = 0
for i in range(1,len(tour)):
z += D[tour[i], tour[i-1]] # add length of edge from city i-1 to i
return z
def randtour(n):
"""Construct a random tour of size 'n'."""
sol = list(range(n)) # set solution equal to [0,1,...,n-1]
random.shuffle(sol) # place it in a random order
return sol
def nearest(last, unvisited, D):
"""Return the index of the node which is closest to 'last'."""
near = unvisited[0]
min_dist = D[last, near]
for i in unvisited[1:]:
if D[last,i] < min_dist:
near = i
min_dist = D[last, near]
return near
def nearest_neighbor(n, i, D):
"""Return tour starting from city 'i', using the Nearest Neighbor.
Uses the Nearest Neighbor heuristic to construct a solution:
- start visiting city i
- while there are unvisited cities, follow to the closest one
- return to city i
"""
unvisited = list(range(n))
unvisited.remove(i)
last = i
tour = [i]
while unvisited != []:
next = nearest(last, unvisited, D)
tour.append(next)
unvisited.remove(next)
last = next
return tour
def exchange_cost(tour, i, j, D):
"""Calculate the cost of exchanging two arcs in a tour.
Determine the variation in the tour length if
arcs (i,i+1) and (j,j+1) are removed,
and replaced by (i,j) and (i+1,j+1)
(note the exception for the last arc).
Parameters:
-t -- a tour
-i -- position of the first arc
-j>i -- position of the second arc
"""
n = len(tour)
a,b = tour[i],tour[(i+1)%n]
c,d = tour[j],tour[(j+1)%n]
return (D[a,c] + D[b,d]) - (D[a,b] + D[c,d])
def exchange(tour, tinv, i, j):
"""Exchange arcs (i,i+1) and (j,j+1) with (i,j) and (i+1,j+1).
For the given tour 't', remove the arcs (i,i+1) and (j,j+1) and
insert (i,j) and (i+1,j+1).
This is done by inverting the sublist of cities between i and j.
"""
n = len(tour)
if i>j:
i,j = j,i
assert i>=0 and i<j-1 and j<n
path = tour[i+1:j+1]
path.reverse()
tour[i+1:j+1] = path
for k in range(i+1,j+1):
tinv[tour[k]] = k
def improve(tour, z, D, C):
"""Try to improve tour 't' by exchanging arcs; return improved tour length.
If possible, make a series of local improvements on the solution 'tour',
using a breadth first strategy, until reaching a local optimum.
"""
n = len(tour)
tinv = [0 for i in tour]
for k in range(n):
tinv[tour[k]] = k # position of each city in 't'
for i in range(n):
a,b = tour[i],tour[(i+1)%n]
dist_ab = D[a,b]
improved = False
for dist_ac,c in C[a]:
if dist_ac >= dist_ab:
break
j = tinv[c]
d = tour[(j+1)%n]
dist_cd = D[c,d]
dist_bd = D[b,d]
delta = (dist_ac + dist_bd) - (dist_ab + dist_cd)
if delta < 0: # exchange decreases length
exchange(tour, tinv, i, j);
z += delta
improved = True
break
if improved:
continue
for dist_bd,d in C[b]:
if dist_bd >= dist_ab:
break
j = tinv[d]-1
if j==-1:
j=n-1
c = tour[j]
dist_cd = D[c,d]
dist_ac = D[a,c]
delta = (dist_ac + dist_bd) - (dist_ab + dist_cd)
if delta < 0: # exchange decreases length
exchange(tour, tinv, i, j);
z += delta
break
return z
def localsearch(tour, z, D, C=None):
"""Obtain a local optimum starting from solution t; return solution length.
Parameters:
tour -- initial tour
z -- length of the initial tour
D -- distance matrix
"""
n = len(tour)
if C == None:
C = mk_closest(D, n) # create a sorted list of distances to each node
while 1:
newz = improve(tour, z, D, C)
if newz < z:
z = newz
else:
break
return z
def multistart_localsearch(k, n, D, report=None):
"""Do k iterations of local search, starting from random solutions.
Parameters:
-k -- number of iterations
-D -- distance matrix
-report -- if not None, call it to print verbose output
Returns best solution and its cost.
"""
C = mk_closest(D, n) # create a sorted list of distances to each node
bestt=None
bestz=None
for i in range(0,k):
tour = randtour(n)
z = length(tour, D)
z = localsearch(tour, z, D, C)
if bestz == None or z < bestz:
bestz = z
bestt = list(tour)
if report:
report(z, tour)
return bestt, bestz
def all_permutations(n, D, report=None):
"""Do all of the permutations of tours"""
icount = 0
bestt = None
bestz = None
worstt = None
worstz = None
for thistour in itertools.permutations(range(n)):
icount += 1
z = length(thistour,D)
if bestz == None or z < bestz:
bestz = z
bestt = list(thistour)
if report:
report(z,thistour)
if worstz == None or z > worstz:
worstz = z
worstt = list(thistour)
if report:
report(z,thistour)
return bestt, bestz, worstt, worstz
if __name__ == "__main__":
"""Local search for the Travelling Saleman Problem: sample usage."""
#
# test the functions:
#
# random.seed(1) # uncomment for having always the same behavior
import sys
# read in datafile
n, D, cities = readAdventDatafile('day9.dat')
# at this point, I need:
# n - number of cities
# D - distance matrix
# function for printing best found solution when it is found
from time import clock
init = clock()
def report_sol(obj, s=""):
print("cpu:%g\tobj:%g\ttour:%s" % \
(clock(), obj, s))
print("*** travelling salesman problem ***")
print
# random construction
print("random construction + local search:")
tour = randtour(n) # create a random tour
z = length(tour, D) # calculate its length
print("random:", tour, z, ' --> ',end='')
z = localsearch(tour, z, D) # local search starting from the random tour
# print(tour, z)
printTour(tour, cities)
print(" cost={0}".format(z))
print
# greedy construction
print("greedy construction with nearest neighbor + local search:")
for i in range(n):
tour = nearest_neighbor(n, i, D) # create a greedy tour, visiting city 'i' first
z = length(tour, D)
print("nneigh:", tour, z, ' --> ',end='')
z = localsearch(tour, z, D)
# print(tour, z)
printTour(tour, cities)
print(" cost={0}".format(z))
print
# multi-start local search
# print("random start local search:")
# niter = 10000
# tour,z = multistart_localsearch(niter, n, D, report_sol)
# assert z == length(tour, D)
# print("best found solution (%d iterations): z = %g" % (niter, z))
# printTour(tour, cities)
# print(" cost={0}".format(z))
# all the permutations
print("all the permutations!")
tour, z, worsttour, worstz = all_permutations(n, D, report_sol)
assert z == length(tour,D)
print("best found solution: z = %g" % z)
printTour(tour, cities)
print(" cost={0}".format(z))
print("worst found solution: z = %g" % worstz)
printTour(worsttour, cities)
print(" cost={0}".format(worstz))
| 3.859375
| 4
|
solutions/python3/633.py
|
sm2774us/amazon_interview_prep_2021
| 42
|
12780235
|
class Solution:
def judgeSquareSum(self, c: int) -> bool:
return not all(((c - i ** 2) ** 0.5) % 1 for i in range(int(c ** 0.5) + 1))
| 3.078125
| 3
|
tests/test_sign_poet.py
|
HHSIDEAlab/python-poetri
| 1
|
12780236
|
<reponame>HHSIDEAlab/python-poetri
#!/usr/bin/env python
import unittest, os
from poetri.sign_poet import sign_poet
#This key is for testing and the CN is "transparenthealth.org"
test_private_key = """
-----<KEY>"""
class TestSigning(unittest.TestCase):
def test_sign_poet_happy(self):
result = sign_poet({"sub":"someapp.foo.com"}, test_private_key, "tranparenthealth.org", 3600)
"""Test the POET JWT signing by ensuring exactly two periods in output."""
self.assertEqual(result.count('.'), 2)
if __name__ == '__main__':
unittest.main()
| 2.875
| 3
|
utils.py
|
lonesword/lottery-ticket-experiments
| 6
|
12780237
|
import torch
def get_zero_count(matrix):
# A utility function to count the number of zeroes in a 2-D matrix
return torch.sum(matrix == 0).item()
def apply_mask_dict_to_weight_dict(mask_dict, weight_dict):
# mask_dict - a dictionary where keys are layer names (string) and values are masks (bytetensor) for that layer
# weight_dict - a dictionary where keys are layer names and values are weights (tensor) for that layer
# Applies the mask to the weight for each layer. This is done by simple multiplying the weight by the mask
# (Hadamard product)
# Since every value in the mask is either 0 or 1, this is equivalent to either letting the weight go unchanged or
# setting it as 0
weights_after_masking = dict()
for layer_name, mask in mask_dict.items():
weight = weight_dict[layer_name]
# The mask should be copied to the cpu since `weights_after_masking` dict is always stored in memory, and not the GPU
weights_after_masking[layer_name] = weight * mask.cpu().float()
return weights_after_masking
| 3.328125
| 3
|
main.py
|
Pisun-afk/TOP3
| 0
|
12780238
|
<filename>main.py
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import *
from PyQt5.QtGui import QPixmap
import os
from PIL import Image, ImageFilter
app = QApplication([])
window = QWidget()
window.setWindowTitle("Easy Editor")
btn_levo = QPushButton("Лево")
btn_pravo = QPushButton("Право")
btn_zerkal = QPushButton("Зеракло")
btn_rezko = QPushButton("Резкость")
btn_bw = QPushButton("Ч/Б")
pol = QListWidget()
btn_papka = QPushButton("Папка")
pickcha = QLabel("Картинка")
col_1 = QVBoxLayout()
col_1.addWidget(btn_papka)
col_1.addWidget(pol)
col_2 = QVBoxLayout()
col_2.addWidget(pickcha)
row_1 = QHBoxLayout()
row_1.addWidget(btn_levo)
row_1.addWidget(btn_pravo)
row_1.addWidget(btn_rezko)
row_1.addWidget(btn_zerkal)
row_1.addWidget(btn_bw)
col_2.addLayout(row_1)
row_2 = QHBoxLayout()
row_2.addLayout(col_1,20)
row_2.addLayout(col_2,80)
window.setLayout(row_2)
workdir = ''
def filter(files, extensions):
result = []
for filename in files:
for ext in extensions:
if filename.endswith(ext):
result.append(filename)
return result
def chooseWorkdir():
global workdir
workdir = QFileDialog.getExistingDirectory()
def showFilenameList():
exceptions = ['jpg','jpeg','png','gif','bmp']
chooseWorkdir()
filenames = filter(os.listdir(workdir), exceptions)
pol.clear()
for filename in filenames:
pol.addItem(filename)
class ImageProcessor():
def __init__(self ):
self.filename=None
self.original=None
self.save_dir=("pap/")
def LoadImage(self,Filename):
self.filename = Filename
way = os.path.join(workdir,self.filename)
self.original=Image.open(way)
def showImage(self,path):
pickcha.hide()
pixmapimage=QPixmap(path)
w, h = pickcha.width() , pickcha.height()
pixmapimage = pixmapimage.scaled(w, h, Qt.KeepAspectRatio)
pickcha.setPixmap(pixmapimage)
pickcha.show()
def do_bw(self):
self.original = self.original.convert("L")
self.saveImage()
image_path = os.path.join(workdir, self.save_dir, self.filename)
self.showImage(image_path)
def saveImage(self):
path = os.path.join(workdir, self.save_dir)
if not(os.path.exists(path) or os.path.isdir(path)):
os.mkdir(path)
image_path = os.path.join(path, self.filename)
self.original.save(image_path)
workimage= ImageProcessor()
def showClosenImage():
if pol.currentRow() >=0:
filename = pol.currentItem().text()
workimage.LoadImage(filename)
image_path = os.path.join(workdir, workimage.filename)
workimage.showImage(image_path)
pol.currentRowChanged.connect(showClosenImage)
btn_papka.clicked.connect(showFilenameList)
btn_bw.clicked.connect(workimage.do_bw)
window.show()
app.exec_()
| 2.734375
| 3
|
prompt/gather.py
|
dyabel/detpro
| 23
|
12780239
|
<gh_stars>10-100
import os, sys
import torch
path = sys.argv[1]
save_name = os.path.join(path, sys.argv[2])
if os.path.exists(save_name):
print('Data: target already exists!')
exit(0)
feats = []
labels = []
ious = []
files = []
for splt in os.listdir(path):
print(splt)
files += [os.path.join(path, splt, f) for f in os.listdir(os.path.join(path, splt))]
print('total', len(files), 'files')
for pth in files:
feat, label, iou = torch.load(pth)
print(len(feat), len(label),len(iou))
iou = torch.cat([iou, iou.new_ones(len(label) - len(iou))]) # fix a bug in collect iou
feats.append(feat)
labels.append(label)
ious.append(iou)
feats = torch.cat(feats)
labels = torch.cat(labels)
ious = torch.cat(ious)
print(feats.shape, labels.shape, ious.shape)
torch.save((feats, labels, ious), save_name)
| 2.171875
| 2
|
yadi/datalog2sql/datalog2sqlconverter.py
|
saltzm/yadi
| 2
|
12780240
|
from .tokens2ast.ast_builder import *
from .parse2tokens.parser import Parser, SyntaxException
from .ast2sql.ast2sqlconverter import Ast2SqlConverter
from .ast2sql.exceptions import *
from ..sql_engine.db_state_tracker import DBStateTracker
from colorama import *
__author__ = 'caioseguin', 'saltzm'
class Datalog2SqlConverter:
def __init__(self, db_state_tracker):
self.db_state_tracker = db_state_tracker
def convertDatalog2Sql(self, datalog_statement, is_assertion = False):
sql_query_list = []
try:
parsed_statement = Parser().parsesentence(datalog_statement).asList()
ast_query_list = ASTBuilder().buildAST(
parsed_statement,
is_assertion
)
for ast_query in ast_query_list:
sql_query = \
Ast2SqlConverter(self.db_state_tracker).convertAst2Sql(ast_query)
sql_query_list.append(sql_query)
except SyntaxException as e:
print (Fore.RED+'SyntaxException: ' + str(e)+Fore.RESET)
except SafetyException as e:
print (Fore.RED+'SafetyException: ' + str(e)+Fore.RESET)
except Exception as e:
import traceback
traceback.print_exc()
return sql_query_list
def trim_assert(self, statement):
return statement[len('/assert '):]
| 2.390625
| 2
|
modules/new_user/cog.py
|
etandinnerman/jct-discord-bot
| 0
|
12780241
|
<gh_stars>0
from modules.new_user.greeter import Greeter
import discord
from utils import utils
from discord.ext import commands
class NewUserCog(commands.Cog, name="New User"):
"""Ask members who join to use the join command"""
def __init__(self, bot: commands.Bot):
self.bot = bot
self.greeter = Greeter(bot)
@commands.Cog.listener()
async def on_member_join(self, member: discord.Member):
"""Ask members who join to use the join command."""
print(f"{member.name} joined the server.")
await self.greeter.give_initial_role(member)
if not member.bot:
await self.greeter.server_greet(member)
await self.greeter.private_greet(member)
# setup functions for bot
def setup(bot):
bot.add_cog(NewUserCog(bot))
| 2.765625
| 3
|
antiqueProjectApp/migrations/0012_auto_20190812_1531.py
|
jennie6151/inwiththeold
| 0
|
12780242
|
# Generated by Django 2.2.3 on 2019-08-12 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('antiqueProjectApp', '0011_auto_20190812_1453'),
]
operations = [
migrations.AlterModelOptions(
name='antiquesale',
options={'permissions': (('can_buy', 'Set antique as purchased'),)},
),
migrations.AlterField(
model_name='antique',
name='AntiqueType',
field=models.ManyToManyField(help_text='Select a type for this antique', to='antiqueProjectApp.AntiqueType'),
),
]
| 1.75
| 2
|
whatsappweb.py
|
aromal17/Whatsapp-online-activity-tracker
| 0
|
12780243
|
<gh_stars>0
from selenium import webdriver
from win10toast import ToastNotifier
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
import time
import os
# XPath selectors
nameSearchField = '/html[1]/body[1]/div[1]/div[1]/div[1]/div[3]/div[1]/div[1]/div[1]/label[1]/div[1]/div[2]'
onlineStatusLabel = '/html[1]/body[1]/div[1]/div[1]/div[1]/div[4]/div[1]/header[1]/div[2]/div[2]'
# Replace below with the list of targets/contacts to be tracked along with their complete contact numbers
TARGETS = {'contact name 1': 'contact number 1','contact name 2': 'contact number 2'}
# Replace below path with the absolute path
browser = webdriver.Chrome(r'enter\path\for\chromedriver.exe')
# Load Whatsapp Web page
browser.get("https://web.whatsapp.com/")
wait = WebDriverWait(browser, 600)
while True:
# Clear screen
os.system('cls')
# For each target
for target in TARGETS:
tryAgain = True
while tryAgain:
try:
# Wait untill input text box is visible
input_box = wait.until(EC.presence_of_element_located((By.XPATH,nameSearchField)))
# Write phone number
input_box.send_keys(TARGETS[target])
# Press enter to confirm the phone number
input_box.send_keys(Keys.ENTER)
time.sleep(5)
tryAgain = False
# try:
try:
print(browser.find_element_by_xpath(onlineStatusLabel))
print(target + ' is online')
toaster = ToastNotifier()
toaster.show_toast(target + " is online")
except:
print(target + ' is offline')
toaster = ToastNotifier()
toaster.show_toast(target + " is OFFLINE")
except:
print('Error fetching input box details')
| 2.6875
| 3
|
code/numpy/numpy-tutorial-master/scripts/test.py
|
vicb1/python-reference
| 0
|
12780244
|
<gh_stars>0
Z = [[0,0,0,0,0,0],
[0,0,0,1,0,0],
[0,1,0,1,0,0],
[0,0,1,1,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0]]
def compute_neighbours(Z):
shape = len(Z), len(Z[0])
N = [[0,]*(shape[0]) for i in range(shape[1])]
for x in range(1,shape[0]-1):
for y in range(1,shape[1]-1):
N[x][y] = Z[x-1][y-1]+Z[x][y-1]+Z[x+1][y-1] \
+ Z[x-1][y] +Z[x+1][y] \
+ Z[x-1][y+1]+Z[x][y+1]+Z[x+1][y+1]
return N
def show(Z):
for l in Z[1:-1]:
print(l[1:-1])
print()
def iterate(Z):
shape = len(Z), len(Z[0])
N = compute_neighbours(Z)
for x in range(1,shape[0]-1):
for y in range(1,shape[1]-1):
if Z[x][y] == 1 and (N[x][y] < 2 or N[x][y] > 3):
Z[x][y] = 0
elif Z[x][y] == 0 and N[x][y] == 3:
Z[x][y] = 1
return Z
show(Z)
for i in range(4):
iterate(Z)
show(Z)
| 3.3125
| 3
|
arjuna/tpi/constant.py
|
bhargavkumar-65/arjuna
| 0
|
12780245
|
# This file is a part of Arjuna
# Copyright 2015-2021 <NAME>
# Website: www.RahulVerma.net
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum, auto
class ArjunaOption(Enum):
'''
Represents all built-in configuration options for Arjuna.
Any option name which is does not correspond to ArjunaOption enum constant is treated as a user defined option.
'''
ARJUNA_ROOT_DIR = auto()
'''Root Directory of Arjuna Installed/Imported in a session'''
ARJUNA_EXTERNAL_IMPORTS_DIR = auto()
'''Directory of third party libs directly included in Arjuna.'''
LOG_NAME = auto()
'''Name of Arjuna's log file'''
RUN_ID = auto()
'''An alnum string representing current test run. Default is **mrun**'''
RUN_SESSION_NAME = auto()
'''Current session name.'''
RUN_HOST_OS = auto()
'''Host Operating System type: Windows/Mac/Linux.'''
LOG_FILE_LEVEL = auto()
'''Minimum level for a message to be logged to log file.'''
LOG_CONSOLE_LEVEL = auto()
'''Minimum level for a message to be displayed on console'''
LOG_ALLOWED_CONTEXTS = auto()
'''Allowed context strings for logging (file as well as display). Messages without contexts always get logged.'''
L10N_LOCALE = auto()
'''Default Locale type to be used for Localization call. Values as per arjuna.tpi.constant.Locale'''
L10N_STRICT = auto()
'''Sets Localization mode to strict. Default is False.'''
L10N_DIR = auto()
'''Directory containing Localization files.'''
PROJECT_NAME = auto()
'''Test Project Name'''
PROJECT_ROOT_DIR = auto()
'''Test Project Root Directory'''
CONF_PROJECT_FILE = auto()
'''Project conf file path.'''
CONF_PROJECT_LOCAL_FILE = auto()
'''Local Project conf file path.'''
TESTS_DIR = auto()
'''Directory containing test modules.'''
HOOKS_PACKAGE = auto()
'''Arjuna Hooks Package Import Path.'''
HOOKS_CONFIG_PACKAGE = auto()
'''Arjuna Config Hooks Package Import Path.'''
HOOKS_ENTITY_PACKAGE = auto()
'''Arjuna Data Entity Hooks Package Import Path.'''
HOOKS_RESOURCE_PACKAGE = auto()
'''Arjuna Resource Package Import Path.'''
REPORTS_DIR = auto()
'''Root directory for test reports.'''
REPORT_FORMATS = auto()
'''Formats for Report Generation. XML/HTML'''
REPORT_DIR = auto()
'''Reporting directory for current test run under REPORTS_DIR. Name is generated with RUN_ID and Current Timestamp. With --static-rid CLI switch, timestamp is not appended.'''
REPORT_XML_DIR = auto()
'''Directory containing report.xml for current test run.'''
REPORT_HTML_DIR = auto()
'''Directory containing report.html for current test run.'''
REPORT_GROUP_RENAME = auto()
'''If True, for run-group command, reports are created without session and stage prefixes.'''
REPORT_SCREENSHOTS_ALWAYS = auto()
'''If True, Screenshots are always show in Report, else they are not shown for passed tests. Default is False.'''
REPORT_NETWORK_ALWAYS = auto()
'''If True, Network packets are always show in Report, else they are not shown for passed tests. Default is False.'''
REPORT_NETWORK_FILTER = auto()
'''If True, in reporting, the request/response for static resources like image files, css etc will be excluded. Only HTML/JSON/XML content is included. Default is True'''
LOG_DIR = auto()
'''Directory containing arjuna.log for current test run.'''
SCREENSHOTS_DIR = auto()
'''Directory containing screenshots for current test run.'''
TOOLS_DIR = auto()
'''Directory containing external tool binaries in Arjuna test project.'''
TOOLS_BMPROXY_DIR = auto()
'''Directory containing BrowerMob Proxy binaries.'''
DEPS_DIR = auto()
'''Directory containing external tool binaries in Arjuna test project.'''
DBAUTO_DIR = auto()
'''Directory containing Database interaction automation related input files.'''
DBAUTO_SQL_DIR = auto()
'''Directory containing SQL files for Database interaction automation.'''
TEMP_DIR = auto()
'''Temporary directory for this session.'''
CONF_DIR = auto()
'''Test Project configuration directory'''
CONF_DATA_FILE = auto()
'''File that contains all data configurations.'''
CONF_DATA_LOCAL_FILE = auto()
'''Local File that contains all data configurations.'''
CONF_ENVS_FILE = auto()
'''File that contains all environment configurations.'''
CONF_ENVS_LOCAL_FILE = auto()
'''Local File that contains all environment configurations.'''
CONF_SESSIONS_FILE = auto()
'''File that contains all test session definitions.'''
CONF_SESSIONS_LOCAL_FILE = auto()
'''Local File that contains all test session definitions.'''
CONF_STAGES_FILE = auto()
'''File that contains all test stage definitions.'''
CONF_STAGES_LOCAL_FILE = auto()
'''Local File that contains all test stage definitions.'''
CONF_GROUPS_FILE = auto()
'''File that contains all test group definitions.'''
CONF_GROUPS_LOCAL_FILE = auto()
'''Local File that contains all test group definitions.'''
CONF_WITHX_FILE = auto()
'''withx.yaml file used for writing custom locators for Gui Automation.'''
CONF_WITHX_LOCAL_FILE = auto()
'''Local withx.yaml file used for writing custom locators for Gui Automation.'''
DATA_DIR = auto()
'''Directory containing data files in test project.'''
DATA_SRC_DIR = auto()
'''Directory containing data source files in test project.'''
DATA_REF_DIR = auto()
'''Directory containing contextual data reference files in test project.'''
DATA_REF_CONTEXTUAL_DIR = auto()
'''Directory containing contextual data reference files in test project.'''
DATA_REF_INDEXED_DIR = auto()
'''Directory containing indexed data reference files in test project.'''
DATA_FILE_DIR = auto()
'''Directory containing files used as file data.'''
APP_URL = auto()
'''Base URL for a Web App. Used by launch() method if url is not specified for GuiApp.'''
SOCKET_TIMEOUT = auto()
'''Timeout for socket connections. Default is 60 seconds.'''
HTTP_PROXY_ENABLED = auto()
'''Is a proxy enabled for HTTP requests (GUIAuto as well as HttpAuto)'''
HTTP_PROXY_HOST = auto()
'''IP address/Name of HTTP proxy host. Default is localhost.'''
HTTP_PROXY_PORT = auto()
'''Network Port of HTTP proxy. Default is 8080.'''
HTTPAUTO_DIR = auto()
'''Root directory of all HTTP automation relation directories and files'''
HTTPAUTO_MESSAGE_DIR = auto()
'''Root directory of all HTTP message YAML files.'''
EMAILAUTO_IMAP_HOST = auto()
'''IP address/Name of EmailBox for IMAP Protocol. Default is localhost.'''
EMAILAUTO_IMAP_PORT = auto()
'''Network Port of EmailBox for IMAP Protocol. Default is 993 in SSL Mode and 143 in non-SSL Mode.'''
EMAILAUTO_IMAP_USESSL = auto()
'''Enables/Disables usage of SSL for connecting to EmailBox via IMAP. Default is True.'''
EMAILAUTO_USER = auto()
'''Default Email Address to be used in Arjuna.'''
EMAILAUTO_PASSWORD = auto()
'''Default Email password to be used in Arjuna.'''
EMAILAUTO_MAX_WAIT = auto()
'''Maximum time for selecting a mailbox or reading/parsing emails. Uses Dynamic Wait. Expressed in seconds. Default is 120 seconds.'''
BROWSER_NAME = auto()
'''Browser Name for Gui Automation. Chrome/Firefox. Default is Chrome'''
BROWSER_HEADLESS = auto()
'''Sets headless mode for browser for GUI Automation. Default is False.'''
BROWSER_VERSION = auto()
'''Browser Version for GUI Automation.'''
BROWSER_MAXIMIZE = auto()
'''Browser is maximized in GUI Automation after launch. Default is False.'''
BROWSER_DIM_HEIGHT = auto()
'''Browser Height for GUI Automation. If not set, Arjuna does not change the height of browser.'''
BROWSER_DIM_WIDTH = auto()
'''Browser Width for GUI Automation. If not set, Arjuna does not change the width of browser.'''
BROWSER_BIN_PATH = auto()
'''Path of the Browser binary on test system.'''
BROWSER_NETWORK_RECORDER_ENABLED = auto()
'''If True, Arjuna uses BrowserMob proxy, if available in test project, to capture Network requests made by browser. Default is False.'''
BROWSER_NETWORK_RECORDER_AUTOMATIC = auto()
'''If True, when a browser is launched, Arjuna automatically starts capturing all traffic. Default is False'''
ALLOW_INSECURE_SSL_CERT = auto()
'''If True, insecure SSL certificates are allowd. Default is True'''
GUIAUTO_NAME = auto()
'''Engine name. Currently set to Selenium which is the only supported engine.'''
GUIAUTO_DIR = auto()
'''Root directory of all Gui automation relation directories and files'''
GUIAUTO_NAMESPACE_DIR = auto()
'''Root directory of all Gui Namespace (GNS) files.'''
GUIAUTO_DEF_MULTICONTEXT = auto()
'''Sets multi context mode for GNS files. Currently not processed.'''
GUIAUTO_CONTEXT = auto()
'''Gui Automation Context. Currently not processed.'''
SCROLL_PIXELS = auto()
'''Number of pixels for each scroll call in Gui Automation. Default is 100.'''
GUIAUTO_MAX_WAIT = auto()
'''Maximum time for a Gui element locating or waitable interaction to occur. Uses Dynamic Wait. Expressed in seconds. Default is 60.'''
GUIAUTO_SLOMO_ON = auto()
'''Sets slow motion mode for Gui Automation. Default is False.'''
GUIAUTO_SLOMO_INTERVAL = auto()
'''Time Interval between successive Gui Automation actions when Slow Motion mode is ON. Expressed in seconds. Default is 2'''
MOBILE_OS_NAME = auto()
'''Mobile OS Name. iOs/Android. Default is Android.'''
MOBILE_OS_VERSION = auto()
'''Mobile OS Version. No default set.'''
MOBILE_DEVICE_NAME = auto()
'''Mobile Device name. No default set.'''
MOBILE_DEVICE_UDID = auto()
'''Mobile Device UDID. No default set.'''
MOBILE_APP_FILE_PATH = auto()
'''Mobile App path on test system. No default set.'''
SELENIUM_DRIVER_PROP = auto()
'''Selenium Environment variable for browser driver as per chosen browser. Automatically set as per chosen browser. Default is webdriver.chrome.driver'''
SELENIUM_DRIVERS_DIR = auto()
'''Root Directory containing OS specific browser drivers for Selenium. Has an impact only if SELENIUM_DRIVER_DOWNLOAD is set to False.'''
SELENIUM_DRIVER_PATH = auto()
'''Absolute path of Selenium browser driver. Automatically set to WebDriverManager's downloaded driver if SELENIUM_DRIVER_DOWNLOAD is True, else automatically set as per the test project structure, OS and browser.'''
SELENIUM_DRIVER_DOWNLOAD = auto()
'''Instructs Arjuna to automatically download Selenium browser driver for chosen browser. Default is True.'''
SELENIUM_SERVICE_URL = auto()
'''Selenium's Service URL. If set, Arjuna does not launch the browser service and uses this URL as the service URL.'''
APPIUM_SERVICE_URL = auto()
'''Appium Service URL. Currently not processed.'''
APPIUM_AUTO_LAUNCH = auto()
'''Instructs Arjuna to launch Appium programmatically. Default is True. Currently not processed.'''
IMG_COMP_MIN_SCORE = auto()
'''A fraction that represents minimum image comparison score to decide on an image match. Default is 0.7. Currently not processed.'''
class TimeUnit(Enum):
'''
Allowed time unit types.
'''
MILLI_SECONDS = auto()
SECONDS = auto()
MINUTES = auto()
class BrowserName(Enum):
'''
Allowed browser names for Gui Automation.
'''
CHROME = auto()
FIREFOX = auto()
class DomDirection(Enum):
'''
Directions in DOM movement.
'''
UP = auto()
DOWN = auto()
LEFT = auto()
RIGHT = auto()
class DomNodeType(Enum):
'''
Directions in DOM movement.
'''
NODE = auto()
BNODE = auto()
FNODE = auto()
import locale
import re
__locales = [i.upper() for i in locale.locale_alias.keys() if re.match('^[\w_]+$', i)]
Locale = Enum('Locale', dict(zip(__locales, range(len(__locales)))))
Locale.__doc__ = '''Allowed locale names in Arjuna.'''
| 2.140625
| 2
|
URI/1046.py
|
leilaapsilva/BabySteps
| 37
|
12780246
|
<filename>URI/1046.py
start, end = [int(x) for x in input().split(" ")]
time = 0
if(start<end):
time=end-start
else:
time=end+24-start
print("O JOGO DUROU "+str(time)+" HORA(S)")
| 2.953125
| 3
|
sample/summary_and_plot_without_label.py
|
kishiyamat/npbdaa
| 4
|
12780247
|
#%%
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import matplotlib.cm as cm
from tqdm import trange, tqdm
from sklearn.metrics import adjusted_rand_score
from argparse import ArgumentParser
from util.config_parser import ConfigParser_with_eval
#%% parse arguments
def arg_check(value, default):
return value if value else default
default_hypparams_model = "hypparams/model.config"
parser = ArgumentParser()
parser.add_argument("--model", help=f"hyper parameters of model, default is [{default_hypparams_model}]")
args = parser.parse_args()
hypparams_model = arg_check(args.model, default_hypparams_model)
#%%
def load_config(filename):
cp = ConfigParser_with_eval()
cp.read(filename)
return cp
#%%
def get_names():
return np.loadtxt("files.txt", dtype=str)
def get_datas_and_length(names):
datas = [np.loadtxt("DATA/" + name + ".txt") for name in names]
length = [len(d) for d in datas]
return datas, length
def get_results_of_word(names, length):
return _joblib_get_results(names, length, "s")
def get_results_of_letter(names, length):
return _joblib_get_results(names, length, "l")
def get_results_of_duration(names, length):
return _joblib_get_results(names, length, "d")
def _get_results(names, lengths, c):
return [np.loadtxt("results/" + name + "_" + c + ".txt").reshape((-1, l)) for name, l in zip(names, lengths)]
def _joblib_get_results(names, lengths, c):
from joblib import Parallel, delayed
def _component(name, length, c):
return np.loadtxt("results/" + name + "_" + c + ".txt").reshape((-1, length))
return Parallel(n_jobs=-1)([delayed(_component)(n, l, c) for n, l in zip(names, lengths)])
def _plot_discreate_sequence(feature, title, sample_data, cmap=None):
ax = plt.subplot2grid((2, 1), (0, 0))
plt.sca(ax)
ax.plot(feature)
ax.set_xlim((0, feature.shape[0]-1))
plt.ylabel('Feature')
#label matrix
ax = plt.subplot2grid((2, 1), (1, 0))
plt.suptitle(title)
plt.sca(ax)
ax.matshow(sample_data, aspect = 'auto', cmap=cmap)
#write x&y label
plt.xlabel('Frame')
plt.ylabel('Iteration')
plt.xticks(())
Path("figures").mkdir(exist_ok=True)
Path("summary_files").mkdir(exist_ok=True)
#%% config parse
print("Loading model config...")
config_parser = load_config(hypparams_model)
section = config_parser["model"]
word_num = section["word_num"]
letter_num = section["letter_num"]
print("Done!")
#%%
print("Loading results....")
names = get_names()
datas, length = get_datas_and_length(names)
l_results = get_results_of_letter(names, length)
w_results = get_results_of_word(names, length)
d_results = get_results_of_duration(names, length)
log_likelihood = np.loadtxt("summary_files/log_likelihood.txt")
resample_times = np.loadtxt("summary_files/resample_times.txt")
print("Done!")
train_iter = l_results[0].shape[0]
#%%
lcolors = ListedColormap([cm.tab20(float(i)/letter_num) for i in range(letter_num)])
wcolors = ListedColormap([cm.tab20(float(i)/word_num) for i in range(word_num)])
#%%
print("Plot results...")
for i, name in enumerate(tqdm(names)):
plt.clf()
_plot_discreate_sequence(datas[i], name + "_l", l_results[i], cmap=lcolors)
plt.savefig("figures/" + name + "_l.png")
plt.clf()
_plot_discreate_sequence(datas[i], name + "_s", w_results[i], cmap=wcolors)
plt.savefig("figures/" + name + "_s.png")
plt.clf()
_plot_discreate_sequence(datas[i], name + "_d", d_results[i], cmap=cm.binary)
plt.savefig("figures/" + name + "_d.png")
print("Done!")
#%%
plt.clf()
plt.title("Log likelihood")
plt.plot(range(train_iter+1), log_likelihood, ".-")
plt.savefig("figures/Log_likelihood.png")
#%%
plt.clf()
plt.title("Resample times")
plt.plot(range(train_iter), resample_times, ".-")
plt.savefig("figures/Resample_times.png")
#%%
with open("summary_files/Sum_of_resample_times.txt", "w") as f:
f.write(str(np.sum(resample_times)))
| 2.109375
| 2
|
RasPiBaseStation/ExampleData.py
|
jgOhYeah/Farm-PJON-LoRa-network
| 1
|
12780248
|
data = [
bytearray(b'\x00\x00\x00\x00\x00'),
bytearray(b'\xff\x16:Wos\x02\x00C\xbd\x01\x02\xd1hS'),
bytearray(b'\xff\xe6_R\x93\\\x9f\x03h\xc0\x01\xb0af\xf8'),
bytearray(b'\xff&\x0f\x07ZP_\x00a\xae\x01\xd9}q\xeb'),
bytearray(b'\xff&\x0f\x07ZP:\x00a\xb2\x01n\xd8\xd4\x8b'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa2\x01\xcf\x8bf\xfd'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa3\x01\xd6\x90W\xbc'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa4\x01\x99\xd1\xc1{'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa5\x01\x80\xca\xf0:'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa6\x01\xab\xe7\xa3\xf9'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa7\x01\xb2\xfc\x92\xb8'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa8\x015d\x8ew'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa9\x01'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xaa\x01\x07R\xec\xf5'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xab\x01\x1eI\xdd\xb4'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xac\x01Q\x08Ks'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xad\x01H\x13z2'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xaf\x01z%\x18\xb0'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xb5\x01\xca\x08\xe2k'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xb6\x01\xe1%\xb1\xa8'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xbc\x01\x1b\xcaY"'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xbf\x010\xe7\n\xe1'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xc5\x01\xe57\x9b\x9d'),
bytearray(b'\xff&\x0f\x07ZP\x0e\x01a\xb1\x01\xa9\xe8\xfek'),
bytearray(b'\xff&\x0f\x07ZP\x10\x15a\xbe\x01)\xfc;\xa0'),
bytearray(b'\xff&\x0f\x07ZP\x8a\x01a\xae\x01 \xd4\xe6\xa7'),
bytearray(b'\xff&\x0f\x07ZP\x9e\x04a\xa3\x017\xc4Y\x9a'),
bytearray(b'\xff&\x0f\x07ZP\xaf\x00a\xb4\x01!d\xdbm'),
bytearray(b'\xff&\x0f\x07Zp\xbb \xeb\x90\x05_N\x88\xb9'),
bytearray(b'\xff&\x0f\x07ZP\xf5\ra\xa7\x01\xc0B\x95H'),
bytearray(b'\xff&\x0f\x07ZP\xfd\x00a\xa5\x010nd\xd6'),
bytearray(b'\xff&\x0f\x07ZP2\x00a\xa8\x01\xee\x85e\x91'),
bytearray(b'\xff&\x0f\x07ZP4\x00a\xa3\x01\x821I\xfa'),
bytearray(b'\xff&\x0f\x07ZP4\x00a\xa8\x01a\xc5\x901'),
bytearray(b'\xff&\x0f\x07ZP5\x00a\xaa\x01n\x93\xdb\x03'),
bytearray(b'\xff&\x0f\x07ZP5\x00a\xb8\x01\x16g\xab\xd0'),
bytearray(b'\xff&\x0f\x07ZP6\x00a\xb9\x01H\xdc\xe0A'),
bytearray(b'\xff&\x0f\x07ZP7\x00a\xaf\x01i$|&'),
bytearray(b'\xff&\x0f\x07ZP7\x00a\xb3\x01\x8fS!{'),
bytearray(b'\xff&\x0f\x07ZP7\x00a\xb5\x01\xd9\t\x86\xfd'),
bytearray(b'\xff&\x0f\x07ZP8\x00a\xae\x01\xf2o\xda\xb6'),
bytearray(b'\xff&\x0f\x07ZP8\x00a\xb4\x01BB m'),
bytearray(b'\xff&\x0f\x07ZPc\x08a\xa9\x017Y\x9aD'),
bytearray(b'\xff&\x0f\x07ZPm\x00a\xa5\x01\xe1hC\xc6'),
bytearray(b'\xff&\x0f\x07ZPW\x01a\xb5\x01\xf8\x87\x96\xd5'),
bytearray(b'\xff&\x10_JV}\x00F\x01r\x013\xf7\xa7R'),
bytearray(b'\xff&\x10_JV\x7f\x00F\x01r\x01~?\x06Y'),
bytearray(b'\xff&\x10_JV\x80\x00F\x01r\x01\x9a\xfb\xad\x95'),
bytearray(b'\xff&\x10_JV\x81\x00F\x01r\x01Q\xa7~0'),
bytearray(b'\xff&\x10_JV\x82\x00F\x01r\x01\xd73\x0c\x9e'),
bytearray(b'\xff&\x10_JV\x83\x00F\x01r\x01\x1co\xdf;'),
bytearray(b'\xff&\x10_JV\x84\x00F\x01B\x01\x01j\xef\x83'),
bytearray(b'\xff&\x10_JV\x84\x00F\x01r\x01\x01j\xef\x83'),
bytearray(b'\xff&\x10_JV\x85\x00F\x01r\x01\xca6<&'),
bytearray(b'\xff&\x10_JV\x86\x00F\x00r\x01M`$\xbf'),
bytearray(b'\xff&\x10_JV\x86\x00F\x01r\x01L\xa2N\x88'),
bytearray(b'\xff&\x10_JV+\x00F\x01r\x01\xb5c\xd5\xfc'),
bytearray(b'\xff&\x10_JV~\x00F\x01r\x01\xb5c\xd5\xfc'),
bytearray(b'\xff&\x10_JVx\x00F\x01r\x01c:6\xe1'),
bytearray(b'\xff&\x10_JVy\x06F7r\x01~?\x06Y'),
bytearray(b'\xff&\x10UJV~\x00F\x01r\x01\xb5c\xd5\xfc'),
bytearray(b'\xff&\xd5UJV}\x00N\x01ra3\xf7\xa7R'),
bytearray(b'\xff>?\x9aC\x19G\x04-\xbf\x02\xf7\xfb\xff\xe5'),
bytearray(b'\xffF\x0f\x0bZd:\x8ak\x1a\x085\x88W\xb3'),
bytearray(b'J\x06\x07\xff\xff\xf3~'),
bytearray(b'J\x06\x08\x90\xff\xc6\x00\xc6'),
bytearray(b'J\x06\x08\x90\xff\xc6\x01@'),
bytearray(b"\xff&\x0f\x07ZP\x00\x00a\xb1\x01\xaed\'o")
]
| 1.710938
| 2
|
lte/gateway/python/magma/pipelined/tests/old_tests/test_meter.py
|
remo5000/magma
| 3
|
12780249
|
<reponame>remo5000/magma<filename>lte/gateway/python/magma/pipelined/tests/old_tests/test_meter.py
"""
Copyright (c) 2016-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
import time
from test_controller import BaseMagmaTest
class MeterTest(BaseMagmaTest.MagmaControllerTest):
def setUp(self):
super(MeterTest, self).setUp()
self.apps_under_test = ['pipelined.app.meter']
def _generate_topology(self):
# import here, after we've checked the environment
from ovstest import util
from magma.pkt_tester.topology_builder import TopologyBuilder
self._topo_builder = TopologyBuilder()
# set up a simple topology
bridge = self._topo_builder.create_bridge(self.TEST_BRIDGE)
self._port_no = {}
for iface_name, ip_address in self.TEST_IPS.items():
port = self._topo_builder.bind(iface_name, bridge)
self._topo_builder.create_interface(iface_name,
ip_address,
self.TEST_NETMASK)
self._port_no[iface_name] = port.port_no
self.assertFalse(self._topo_builder.invalid_devices())
def test_add_meter_flows(self):
from ovstest import util
from magma.pkt_tester.topology_builder import OvsException
self._generate_topology()
self.controller_thread.start()
self._wait_for_controller("MeterController")
# clear out any existing in_blocks and set up for the test
in_net = self.TEST_NETS[self.SRC_PORT]
# clear out existing net block to of port mappings
for k in list(self.mc.IPBLOCK_TO_OFPORT.keys()):
del self.mc.IPBLOCK_TO_OFPORT[k]
self.mc.IPBLOCK_TO_OFPORT[in_net] = self._port_no[self.SRC_PORT]
self._setup_ovs()
self._wait_for_datapath()
ret, out, err = util.start_process(["ovs-ofctl", "dump-flows",
self.TEST_BRIDGE])
dpid = list(self.mc.datapaths.keys())[0]
self.mc._poll_stats(self.mc.datapaths[dpid])
time.sleep(0.5) # give the vswitch some time to respond
# check if we're tracking usage for each user
# it should be zero since there's no traffic
for sid in self.mc.ip_to_sid.values():
self.assertTrue(sid in self.mc.usage)
ur = self.mc.usage[sid]
self.assertTrue(ur.bytes_tx == 0)
self.assertTrue(ur.bytes_rx == 0)
self.assertTrue(ur.pkts_tx == 0)
self.assertTrue(ur.pkts_rx == 0)
| 1.976563
| 2
|
givenergy_modbus/modbus.py
|
zaheerm/givenergy-modbus
| 0
|
12780250
|
from __future__ import annotations
import logging
from pymodbus.client.sync import ModbusTcpClient
from pymodbus.exceptions import ModbusIOException
from givenergy_modbus.decoder import GivEnergyResponseDecoder
from givenergy_modbus.framer import GivEnergyModbusFramer
from givenergy_modbus.model.register import HoldingRegister, InputRegister # type: ignore
from givenergy_modbus.pdu import (
ModbusPDU,
ReadHoldingRegistersRequest,
ReadHoldingRegistersResponse,
ReadInputRegistersRequest,
ReadInputRegistersResponse,
WriteHoldingRegisterRequest,
WriteHoldingRegisterResponse,
)
from givenergy_modbus.transaction import GivEnergyTransactionManager
_logger = logging.getLogger(__package__)
class GivEnergyModbusTcpClient(ModbusTcpClient):
"""GivEnergy Modbus Client implementation.
This class ties together all the pieces to create a functional client that can converse with a
GivEnergy Modbus implementation over TCP. It exists as a thin wrapper around the ModbusTcpClient
to hot patch in our own Framer and TransactionManager since they are hardcoded classes for Decoder
and TransactionManager throughout constructors up the call chain.
We also provide a few convenience methods to read and write registers.
"""
def __init__(self, **kwargs):
kwargs.setdefault("port", 8899) # GivEnergy default instead of the standard 502
super().__init__(**kwargs)
self.framer = GivEnergyModbusFramer(GivEnergyResponseDecoder(), client=self)
self.transaction = GivEnergyTransactionManager(client=self, **kwargs)
self.timeout = 2
def __repr__(self):
return f"GivEnergyModbusTcpClient({self.host}:{self.port}): timeout={self.timeout})"
def execute(self, request: ModbusPDU = None) -> ModbusPDU | None:
"""Send the given PDU to the remote device and return any PDU returned in response."""
_logger.debug(f'Sending request {request}')
try:
response = super().execute(request)
if isinstance(response, ModbusIOException):
_logger.exception(response)
return response
except ModbusIOException as e:
_logger.exception(e)
self.close()
return None
except Exception as e:
# This seems to help with inverters becoming unresponsive from the portal."""
_logger.exception(e)
self.close()
return None
def read_registers(
self, kind: type[HoldingRegister | InputRegister], base_address: int, register_count: int, **kwargs
) -> dict[int, int]:
"""Read out registers from the correct location depending on type specified."""
# match types of register to their request/response types
t_req, t_res = {
HoldingRegister: (ReadHoldingRegistersRequest, ReadHoldingRegistersResponse),
InputRegister: (ReadInputRegistersRequest, ReadInputRegistersResponse),
}[kind]
request = t_req(base_register=base_address, register_count=register_count, **kwargs)
_logger.debug(
f'Attempting to read {t_req}s #{request.base_register}-'
f'{request.base_register + request.register_count} from device {hex(request.slave_address)}...'
)
response = self.execute(request)
if response and isinstance(response, t_res):
if response.base_register != base_address:
_logger.error(
f'Returned base register ({response.base_register}) '
f'does not match that from request ({base_address}).'
)
return {}
if response.register_count != register_count:
_logger.error(
f'Returned register count ({response.register_count}) '
f'does not match that from request ({register_count}).'
)
return {}
return response.to_dict()
_logger.error(f'Did not receive expected response type: {t_res.__name__} != {response.__class__.__name__}')
# FIXME this contract needs improving
return {}
def read_holding_registers(self, address, count=1, **kwargs) -> dict[int, int]:
"""Convenience method to help read out holding registers."""
return self.read_registers(HoldingRegister, address, count, **kwargs)
def read_input_registers(self, address, count=1, **kwargs) -> dict[int, int]:
"""Convenience method to help read out input registers."""
return self.read_registers(InputRegister, address, count, **kwargs)
def write_holding_register(self, register: HoldingRegister, value: int) -> None:
"""Write a value to a single holding register."""
if not register.write_safe: # type: ignore # shut up mypy
raise ValueError(f'Register {register.name} is not safe to write to')
if value != value & 0xFFFF:
raise ValueError(f'Value {value} must fit in 2 bytes')
_logger.info(f'Attempting to write {value}/{hex(value)} to Holding Register {register.value}/{register.name}')
request = WriteHoldingRegisterRequest(register=register.value, value=value)
result = self.execute(request)
if isinstance(result, WriteHoldingRegisterResponse):
if result.value != value:
raise AssertionError(f'Register read-back value 0x{result.value:04x} != written value 0x{value:04x}')
else:
raise AssertionError(f'Unexpected response from remote end: {result}')
| 2.28125
| 2
|