blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a8f1083d0034cf0ea0ac5869d94510ae7f8a3773
|
337d17b845f5fdd7f32f6a0607e494eed488a601
|
/leetcode/047-permutations-2.py
|
3f0e2b6bd2fe0d3f2338eb7e108827b6fcdfe67f
|
[] |
no_license
|
karsibali/solutions
|
e6130abe026a26558434239cde39c6a14a9712ba
|
4ba5d7ac41fecc87491cae2c88293bd798db31fd
|
refs/heads/master
| 2020-04-29T00:13:34.168323
| 2018-12-27T15:43:26
| 2018-12-27T15:43:26
| 175,686,183
| 1
| 0
| null | 2019-03-14T19:27:00
| 2019-03-14T19:27:00
| null |
UTF-8
|
Python
| false
| false
| 445
|
py
|
class Solution(object):
def permuteUnique(self, nums):
results, seen = [], set()
for i, n in enumerate(nums):
if n not in seen:
results.extend([n] + sub for sub in self.permuteUnique(nums[:i] + nums[i+1:]) or [[]])
seen.add(n)
return results
if __name__ == '__main__':
assert Solution().permute([1, 1, 2]) == [
[1, 1, 2],
[1, 2, 1],
[2, 1, 1]
]
|
[
"ozan.onay@gmail.com"
] |
ozan.onay@gmail.com
|
940ddedb1a7b5c356b8dfafbda260579a472ce85
|
ffacb9d209701729e95304b421d0d4944350ca17
|
/ppline/utils/__init__.py
|
9b6c2b714c095afb7dc82119a2295f08643fc690
|
[
"MIT"
] |
permissive
|
5x12/ppline
|
fc13d26bee90c8bfedfbd085c6fb9b3300b7b4b0
|
a4f7bd9aae0752a8abe7c4580c808792bb044ff6
|
refs/heads/main
| 2023-07-17T23:19:48.418388
| 2021-09-03T12:01:07
| 2021-09-03T12:01:07
| 347,499,398
| 9
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 515
|
py
|
from collections import Mapping
def deep_update(source, overrides):
"""Update a nested dictionary or similar mapping. Modify `source` in place."""
for key, value in overrides.items():
if isinstance(value, Mapping) and value:
# if key not in source:
# source[key] = {}
# s_c = source[key]
returned = deep_update(source.get(key, {}), value)
source[key] = returned
else:
source[key] = overrides[key]
return source
|
[
"andrew.wolf@ru.ey.com"
] |
andrew.wolf@ru.ey.com
|
6dbd2b65e546059c8b29a553876c2040d6df0aff
|
c7c348ea93c5172d53508ace6fe5d0f18e874e1c
|
/plot.py
|
abb4324f8fa4a8f5ecdf0f1fd67934c3a4bb90e5
|
[] |
no_license
|
Droggelbecher/mtg-analysis
|
dec203d7f6df20c2d3af1e84b2c1aae1908ba134
|
b076317016d0e0d40b8973a418957289cdc4e55a
|
refs/heads/master
| 2022-10-26T23:29:06.746146
| 2019-10-03T10:16:04
| 2019-10-03T10:16:04
| 89,160,835
| 0
| 0
| null | 2022-09-23T22:33:29
| 2017-04-23T17:47:34
|
Python
|
UTF-8
|
Python
| false
| false
| 4,134
|
py
|
from matplotlib import pyplot as plt
from matplotlib import cm
from matplotlib.patches import FancyArrowPatch
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d import proj3d
from numpy import linalg as LA
import numpy as np
from plot_annotator import PlotAnnotator
def plot_pca_multi_2d(Xtrans, pca_components, filename_base, dimensions = 3, labels = None, pointlabels = None):
for i, c in enumerate(np.linspace(0, 1, dimensions)):
plot_pca_2d(
Xtrans[:, i:(i+2)],
pca_components[i:(i+2)],
filename_base + str(i),
labels = labels,
pointlabels = pointlabels,
color = cm.spectral(c)
)
def plot_pca_2d(Xtrans, pca_components, filename_base, labels = None, pointlabels = None, color = 'blue'):
"""
Xtrans: matrix transformed by the PCA
pca_components: PCA vectors (pca.components_)
"""
arrow_scale = 10
# We'll only plot the first two compnonts
Xtrans = Xtrans[:, :2]
if labels is None:
labels = ['x{}'.format(i) for i in range(pca_compnonets.shape[1])]
plt.clf()
plt.gcf().set_size_inches(20, 15)
plt.scatter(Xtrans[:, 0], Xtrans[:, 1], marker = 'o', s = 0.5, color = color)
ax = plt.gca()
# Now draw the nice arrows
# rows of pca.components_ are the individual components, columns
# are features
for feature, label in zip(pca_components.T * arrow_scale, labels):
#print(LA.norm(feature[:2]))
if LA.norm(feature[:2]) < (arrow_scale / 10.0 ):
continue
plt.arrow(0, 0, feature[0], feature[1],
color = 'k', head_width = arrow_scale * 0.01,
head_length = arrow_scale * 0.01, alpha = 0.5)
sp0 = ax.transData.transform_point((0, 0))
sp = ax.transData.transform_point(feature[:2])
angle = np.arctan2(sp[1] - sp0[1], sp[0] - sp0[0]) * 180.0 / np.pi
s = 1.5
plt.text(feature[0] * s, feature[1] * s, label,
color = 'k', ha = 'left', va = 'bottom',
rotation = angle, rotation_mode = 'anchor')
plt.xlabel('PC0')
plt.ylabel('PC1')
plt.axis('off')
print('creating: {}'.format(filename_base + '.png'))
plt.savefig(filename_base + '.png', bbox_inches = 'tight')
#plt.show()
class Arrow3D(FancyArrowPatch):
def __init__(self, xs, ys, zs, *args, **kwargs):
FancyArrowPatch.__init__(self, (0,0), (0,0), *args, **kwargs)
self._verts3d = xs, ys, zs
def draw(self, renderer):
xs3d, ys3d, zs3d = self._verts3d
xs, ys, zs = proj3d.proj_transform(xs3d, ys3d, zs3d, renderer.M)
self.set_positions((xs[0],ys[0]),(xs[1],ys[1]))
FancyArrowPatch.draw(self, renderer)
def plot_pca_3d(Xtrans, pca_components, labels = None, pointlabels = None):
"""
Xtrans: matrix transformed by the PCA
pca_components: PCA vectors (pca.components_)
"""
arrow_scale = 10
# We'll only plot the first three compnonts
Xtrans = Xtrans[:, :3]
if labels is None:
labels = ['x{}'.format(i) for i in range(pca_compnonets.shape[1])]
fig = plt.figure()
plt.clf()
ax = Axes3D(fig, elev = 48, azim = 134)
plt.cla()
colors = np.random.rand(Xtrans.shape[0])
ax.scatter(Xtrans[:, 0], Xtrans[:, 1], Xtrans[:, 2], c = colors, alpha = 0.5, gid = np.arange(Xtrans.shape[0]), picker = True)
# Now draw the nice arrows
# rows of pca.components_ are the individual components, columns
# are features
for feature, label in zip(pca_components.T * arrow_scale, labels):
if LA.norm(feature[:2]) < (arrow_scale / 10.0 ):
continue
a = Arrow3D(
[0, feature[0]], [0, feature[1]], [0, feature[2]],
color = 'k',
arrowstyle = '->',
mutation_scale = 20,
alpha = 0.5)
ax.add_artist(a)
ax.text(feature[0] * 1.15, feature[1] * 1.15, feature[2] * 1.15, label, color = 'k', ha = 'center', va = 'center')
annotator = PlotAnnotator(ax, pointlabels)
fig.canvas.mpl_connect('pick_event', annotator)
plt.show()
|
[
"henning@hasemail.de"
] |
henning@hasemail.de
|
6ba613d9fccfb8ff74b47aaeacc8495eba851cb8
|
1c3eb97aefdb9b9a4c0786469fa771f5b9c715f0
|
/exchange/providers/__init__.py
|
5be50455c8c88c835c7434a019fcd01715380265
|
[] |
no_license
|
badprogrammist/exchange
|
34acd5e8bd98e1ed701e2efc9367e34f31063883
|
74eabeaa99f0b5f420f32bdb173de3c386563c72
|
refs/heads/master
| 2021-09-23T10:36:32.792276
| 2020-03-10T17:23:22
| 2020-03-10T17:23:22
| 245,658,475
| 0
| 0
| null | 2021-09-22T18:43:03
| 2020-03-07T15:35:25
|
Python
|
UTF-8
|
Python
| false
| false
| 104
|
py
|
from .factory import create_provider
from .provider import ExchangeRatesProvider, ExchangeRateLoadError
|
[
"badprogrammist@gmail.com"
] |
badprogrammist@gmail.com
|
c2e55d5b74527b2b8bbc2239cc6363069aae721a
|
ceb494c7473fcee8b3268e594348be941169492d
|
/mysite/users/forms.py
|
9bbe8c87282ee8a1415e2e922450780179bd4dd5
|
[] |
no_license
|
smurf123444/DjangoPersonal
|
b212d3221ecfe129c9cbdfc41ed950db0913b017
|
2425369c7662aa7204040db0484b5bb7937018bc
|
refs/heads/master
| 2020-04-04T09:24:40.727714
| 2018-11-14T00:09:47
| 2018-11-14T00:09:47
| 155,816,812
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 380
|
py
|
from django import forms
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import CustomUser
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm):
model = CustomUser
fields = ('username', 'email')
class CustomUserChangeForm(UserChangeForm):
class Meta:
model = CustomUser
fields = UserChangeForm.Meta.fields
|
[
"chris@Chriss-iMac.fios-router.home"
] |
chris@Chriss-iMac.fios-router.home
|
27ce06a0c72c7720eb38dc815b3e31a8feda6ac9
|
79f96860a68df26d54e51b9b1deb885f9fc45ce2
|
/week11/Blog/Blog/urls.py
|
f8d2f8606cb180e76a33636f0bb39c322c487a58
|
[] |
no_license
|
sbashkeyeva/BFDjango
|
fc3cd867cc79e20b866240f4a29544e161f3a0f9
|
08d1ea5a7a858d7397f575a4116d233ddf349610
|
refs/heads/master
| 2020-03-28T13:54:20.816607
| 2018-11-10T08:41:56
| 2018-11-10T08:41:56
| 148,440,026
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 956
|
py
|
"""Blog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('auth/', include('auth_.urls')),
path('', include('main.urls')),
path('api1/', include('api1.urls')),
path('api2/', include('api2.urls')),
path('api3/', include('api3.urls')),
]
|
[
"sbashkeyeva@gmail.com"
] |
sbashkeyeva@gmail.com
|
41aa7c8dfeb54afec88d4ea43f8a41b9b2d91fff
|
26f6313772161851b3b28b32a4f8d255499b3974
|
/Python/LinkedListinBinaryTree.py
|
bd1d5c8f035ba5eb54509a9bbf3fabf861bb754c
|
[] |
no_license
|
here0009/LeetCode
|
693e634a3096d929e5c842c5c5b989fa388e0fcd
|
f96a2273c6831a8035e1adacfa452f73c599ae16
|
refs/heads/master
| 2023-06-30T19:07:23.645941
| 2021-07-31T03:38:51
| 2021-07-31T03:38:51
| 266,287,834
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,582
|
py
|
"""
Given a binary tree root and a linked list with head as the first node.
Return True if all the elements in the linked list starting from the head correspond to some downward path connected in the binary tree otherwise return False.
In this context downward path means a path that starts at some node and goes downwards.
Example 1:
Input: head = [4,2,8], root = [1,4,4,null,2,2,null,1,null,6,8,null,null,null,null,1,3]
Output: true
Explanation: Nodes in blue form a subpath in the binary Tree.
Example 2:
Input: head = [1,4,2,6], root = [1,4,4,null,2,2,null,1,null,6,8,null,null,null,null,1,3]
Output: true
Example 3:
Input: head = [1,4,2,6,8], root = [1,4,4,null,2,2,null,1,null,6,8,null,null,null,null,1,3]
Output: false
Explanation: There is no path in the binary tree that contains all the elements of the linked list from head.
Constraints:
1 <= node.val <= 100 for each node in the linked list and binary tree.
The given linked list will contain between 1 and 100 nodes.
The given binary tree will contain between 1 and 2500 nodes.
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
#Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def isSubPath(self, head: ListNode, root: TreeNode) -> bool:
def dfs(node,target):
if not target:
return True
if not node:
return False
return node.val == target.val and (dfs(node.left, target.next) or dfs(node.right, target.next))
if not head:
return True
if not root:
return False
return dfs(root,head) or self.isSubPath(head, root.left) or self.isSubPath(head, root.right)
class Solution:
def isSubPath(self, head: ListNode, root: TreeNode) -> bool:
def dfs(head,root):
if not head:
return True
if not root:
return False
return head.val == root.val and (dfs(head.next, root.left) or dfs(head.next, root.right))
if not head:
return True
if not root:
return False
return dfs(head, root) or self.isSubPath(head, root.left) or self.isSubPath(head, root.right)
[4,2,8]
[1,4,4,null,2,2,null,1,null,6,8,null,null,null,null,1,3]
[1,4,2,6]
[1,4,4,null,2,2,null,1,null,6,8,null,null,null,null,1,3]
[1,4,2,6,8]
[1,4,4,null,2,2,null,1,null,6,8,null,null,null,null,1,3]
|
[
"here0009@163.com"
] |
here0009@163.com
|
275303fa77363fee01b4e91b1fe16e3bb510088f
|
79e827b9159344b92716990083fa552f3ce466fa
|
/Others/python-raw-input.py
|
58849c329cf560b712bbc0562c29b0109d23dcc1
|
[
"MIT"
] |
permissive
|
PratikSaha198/Hackerrank
|
55faf6a13cc21cf4cb7a5eebc6b57c1fda9ca9e2
|
5ba7fb2a881e925a7a02d5c588923eb188dfc998
|
refs/heads/master
| 2020-08-04T21:45:24.658703
| 2019-05-11T08:40:10
| 2019-05-11T08:40:10
| 212,288,694
| 2
| 0
|
MIT
| 2019-10-02T08:21:22
| 2019-10-02T08:21:21
| null |
UTF-8
|
Python
| false
| false
| 41
|
py
|
def read():
s = input()
return s
|
[
"rajatsri94@gmail.com"
] |
rajatsri94@gmail.com
|
1280ea5538b7606979adb304851d00d11e5cbaad
|
34aa999d3699eb8bfb45ca507367952270455d81
|
/script.py
|
60f936d1bcd68acf0019c1f8cc2c53814932ea46
|
[] |
no_license
|
sametormanli/numeric-matrix-processor
|
1878fb38d2a1fc6b36f9445a35c80a3eef82a452
|
2568669165a9cce1cb126d487e4d2e51b6e85eb0
|
refs/heads/master
| 2023-04-17T20:06:24.720874
| 2021-05-01T15:24:38
| 2021-05-01T15:24:38
| 357,673,402
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,163
|
py
|
def add_matrices():
row_A, col_A = input('Enter size of the first matrix: ').split()
print('Enter the first matrix:')
A = [input().split() for _ in range(int(row_A))]
row_B, col_B = input('Enter size of the second matrix: ').split()
print('Enter the second matrix:')
B = [input().split() for _ in range(int(row_B))]
if row_A != row_B or col_A != col_B:
print('The operation cannot be performed.')
else:
A = [[float(num) for num in row] for row in A]
B = [[float(num) for num in row] for row in B]
result = []
for row in range(int(row_A)):
result.append([])
for col in range(int(col_A)):
result[row].append(A[row][col] + B[row][col])
print('The result is:')
[print(*row) for row in result]
def multiply_constant():
row_A, col_A = input('Enter size of the matrix: ').split()
print('Enter the matrix:')
A = [input().split() for _ in range(int(row_A))]
const = float(input('Enter the constant: '))
result = []
for i, row in enumerate(A):
result.append([])
for col in row:
result[i].append(float(col) * const)
print('The result is:')
[print(*row) for row in result]
def multiply_matrices():
row_A, col_A = input('Enter size of the first matrix: ').split()
print('Enter the first matrix:')
A = [input().split() for _ in range(int(row_A))]
row_B, col_B = input('Enter size of the second matrix: ').split()
print('Enter the second matrix:')
B = [input().split() for _ in range(int(row_B))]
if col_A != row_B:
print('The operation cannot be performed.')
else:
A = [[float(num) for num in row] for row in A]
B = [[float(num) for num in row] for row in B]
result = []
for i in range(int(row_A)):
result.append([])
for j in range(int(col_B)):
result[i].append(dot_product(A, B, i, j))
print('The result is:')
[print(*row) for row in result]
def dot_product(A, B, m, n):
vector_A = A[m]
vector_B = [sub[n] for sub in B]
total = 0
for i in range(len(A[m])):
total += vector_A[i] * vector_B[i]
return total
def transpose():
while True:
print('1. Main diagonal\n'
'2. Side diagonal\n'
'3. Vertical line\n'
'4. Horizantal line')
entry = input('Select a method: ')
if entry in ('1', '2', '3', '4'):
break
print('Invalid entry. Try again!')
row, col = [int(num) for num in input('Enter the matrix size: ').split()]
print('Enter the matrix:')
matrix = [[float(num) for num in input().split()] for _ in range(row)]
result = [[None for _ in range(col)] for __ in range(row)]
for i in range(row):
for j in range(col):
if entry == '1':
result[i][j] = matrix[j][i]
if entry == '2':
result[i][j] = matrix[col - 1 - j][row - 1 - i]
if entry == '3':
result[i][j] = matrix[i][col - 1 - j]
if entry == '4':
result[i][j] = matrix[row - 1 - i][j]
print('The result is:')
[print(*row) for row in result]
def determinant():
row, col = [int(num) for num in input('Enter the matrix size: ').split()]
if row != col:
print('The operation cannot be performed.')
return
print('Enter the matrix:')
matrix = [[float(num) for num in input().split()] for _ in range(row)]
print('The result is:')
print(calculate_det(matrix, row))
def calculate_det(matrix, size):
if size == 1:
return matrix[0][0]
elif size == 2:
return matrix[0][0] * matrix[1][1] - matrix[0][1] * matrix[1][0]
elif size == 3:
return matrix[0][0] * matrix[1][1] * matrix[2][2] \
+ matrix[0][1] * matrix[1][2] * matrix[2][0] \
+ matrix[1][0] * matrix[2][1] * matrix[0][2] \
- matrix[0][2] * matrix[1][1] * matrix[2][0] \
- matrix[0][1] * matrix[1][0] * matrix[2][2] \
- matrix[1][2] * matrix[2][1] * matrix[0][0]
else:
total = 0
for i in range(size):
total += matrix[0][i] * ((-1) ** i) * calculate_det([[row[j] for j in range(size) if j != i] for row in matrix[1:]], size - 1)
return total
def inverse():
row, col = [int(num) for num in input('Enter the matrix size: ').split()]
if row != col:
print('The operation cannot be performed.')
return
print('Enter the matrix:')
matrix = [[float(num) for num in input().split()] for _ in range(row)]
det = calculate_det(matrix, row)
if det == 0:
print('This matrix does not have an inverse.')
else:
print('The result is:')
adj = adjoint(matrix, row)
result = adj[:]
for i in range(len(adj)):
for j in range(len(adj)):
result[i][j] = adj[i][j] / det
[print(*row) for row in result]
def adjoint(matrix, size):
process = [row[:] for row in matrix]
result = [row[:] for row in matrix]
for i in range(size):
for j in range(size):
result[j][i] = ((-1) ** (i + j)) * calculate_det([[row[k] for k in range(size) if k != j] for row in (process[:i] + process[i + 1:])], size - 1)
return result
def main():
while True:
print('\n'
'1. Add matrices\n'
'2. Multiply matrix by a constant\n'
'3. Multiply matrices\n'
'4. Transpose matrix\n'
'5. Calculate determinant\n'
'6. Inverse matrix\n'
'0. Exit\n')
entry = input()
if entry == '1':
add_matrices()
elif entry == '2':
multiply_constant()
elif entry == '3':
multiply_matrices()
elif entry == '4':
transpose()
elif entry == '5':
determinant()
elif entry == '6':
inverse()
elif entry == '0':
break
else:
print('Invalid entry.')
main()
|
[
"65285753+sametormanli@users.noreply.github.com"
] |
65285753+sametormanli@users.noreply.github.com
|
f7d484587c443c062e7843d6f4d6d3e0a3f868da
|
1e381f2b974bc82cd2f0bd0cc5029cbda1baedb2
|
/Hackerearth/shopsense-hiring-challenge/Oz and his game-board.py
|
bfb0fb972d3a6921d3a3fc32ffd0d540ec73ac57
|
[] |
no_license
|
rajat189/Competetive_programming
|
7655678935d40cada5a3d39ed400ee430f0311db
|
709065b3527eceb3923c13091608c174ae3a5d64
|
refs/heads/master
| 2021-01-19T05:53:35.790236
| 2016-04-12T19:12:34
| 2016-04-12T19:12:34
| 38,609,439
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 181
|
py
|
t=input()
while t>0:
t-=1
n,m,a,b,c=map(int,raw_input().split())
if 2*c>=a+b:
if n<=m:
ans = 2*n*c+(m-n)*b
else :
ans = 2*m*c+(n-m)*a
else:
ans = n*a+m*b
print ans
|
[
"coolrajatsharma18@gmail.com"
] |
coolrajatsharma18@gmail.com
|
b7dab92c03a23d7788cd6b1dd58126f32a322ebc
|
339bebc67f573f52db5fbdb19175dfefe797fcd8
|
/Server/cps_sec/cau/migrations/0010_auto_20200609_2135.py
|
894cf3494939f357e8a2fef1e4017cd257987248
|
[] |
no_license
|
BornDosirak/CPS
|
5888f07c52117365befcc9b1f89ae07e3ae669ea
|
a0b0795c390cffe4c6da796226cb921fc0b490ac
|
refs/heads/master
| 2022-10-30T12:22:09.216977
| 2020-06-14T15:54:25
| 2020-06-14T15:54:25
| 267,052,019
| 1
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 527
|
py
|
# Generated by Django 3.0.7 on 2020-06-09 12:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cau', '0009_auto_20200609_2003'),
]
operations = [
migrations.AlterField(
model_name='room',
name='door',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='room',
name='motion',
field=models.IntegerField(default=0),
),
]
|
[
"wlq7410@naver.com"
] |
wlq7410@naver.com
|
407adba72a22608b5d5662a40b3d6144e1c42712
|
2955c7c6982c58959c41e93e84353c90345b7f26
|
/partify/modules/instagram/filters.py
|
8227ba6f9eccc5c4b3dbcb9baf051cb3b7dbe3c2
|
[
"MIT"
] |
permissive
|
Shokesu/Partify
|
44f2c57d8121bf39b42e030ccb24adf2f33681b4
|
579718e0a9294365ee82860155eb1d373e9938ff
|
refs/heads/master
| 2021-03-30T21:14:58.766298
| 2014-08-08T10:15:33
| 2014-08-08T10:15:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,063
|
py
|
def filter_instagram_medias(medias):
"""
Takes a list of Media objects (Instagram-python) and filters out stuff that you want to see.
:param medias: List of Media objects.
:return: list of dictionaries
"""
filtered_media = []
for media in medias:
filtered_media.append(filter_instagram_media(media))
return filtered_media
def filter_instagram_media(media):
"""
Takes a Media object (Instagram-python) and filters out stuff that you want to see.
:param media: Media objects.
:return: dictionary
"""
# Extract user info
ret = {
"media": {
"low_resolution": media.get_thumbnail_url(),
"standard_resolution": media.get_standard_resolution_url(),
},
"user": {
"username": media.user.username,
"profile_picture": media.user.profile_picture
}
}
if media.caption:
ret["caption"] = {"text": media.caption.text, "user": media.caption.user.username}
else:
ret["caption"] = ""
return ret
|
[
"haukur@hauxi.is"
] |
haukur@hauxi.is
|
08332de2ae029001a90674cc93c98c084235c7ce
|
000b23276e74534db1587473b3e360c5952f664d
|
/code/FrameCapture.py
|
0447dc43f3a0c077897961ead768f5e7657ec7c2
|
[] |
no_license
|
yzy1421/ppp
|
55f946867aaf37b6085c345f71de0c194553ae67
|
176b8baf4af6342b94588fdd73115ed335287705
|
refs/heads/master
| 2020-06-28T14:59:31.291769
| 2019-08-14T12:13:47
| 2019-08-14T12:13:47
| 200,261,747
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,397
|
py
|
import os
import cv2
def videoname(path):
name=path.split('.')[0].split('/')[-1]
#print('videoname:%s'%name)
return name
# Function to extract frames
def FrameCapture01(path):
# Path to video file`
vidObj = cv2.VideoCapture(path)
#c=1
# Used as counter variable
count = 0
# checks whether frames were extracted
success = 1
timeF = 100 #the internal of frames
while success:
# vidObj object calls read
# function extract frames
success, image = vidObj.read()
if(count%timeF == 0):
# Saves the frames with frame-count
cv2.imwrite("%s_time%d.jpg" % (videoname(path),count), image)
count += 1
cv2.waitKey(1)
# Function to put all frames of each video into their own file
def CreateFrameFile(path,finalpath):
os.chdir(path)
folder = os.walk(path)
files = list(folder)[0][2]
for file in files:
#file=files[i]
filepath = path + '/' + file
filename = file.split('.')[0]
os.chdir(finalpath)
#os.mkdir(filename)
#os.chdir(finalpath + '/' + filename)
# Driver Code
if __name__ == '__main__':
# Calling the function
FrameCapture01(filepath)
os.chdir(path)
# path='/home/zy3/Documents/'+'FrameData'
# finalpath='/home/zy3/Documents'
# CreateFrameFile(path,finalpath,1)
# path='/home/zy3/Project_SC'
#
# video_path='/home/zy3/Documents/FINALDATA/VIDS'
#
#
#
# CreateFrameFile(video_path,os.path.join(path,'scene_classification_oringin'),1,3)
print('Successfully Create Files!')
#Function of timeF according to the duration of video
def get_Framenumber(file_path):
# videoname(path)
cap = cv2.VideoCapture(file_path)
if cap.isOpened():
cap.isOpened()
rate = cap.get(5) #frame rate
FrameNumber = cap.get(7) #number of frames
duration = FrameNumber/rate / 60 #time of video
return FrameNumber, rate,duration
#path='/home/zy3/Documents/'+'FrameData/20160211_083022_2018-03-20.mp4'
# path='/home/zy3/Documents/FINALDATA/VIDS/20160211_083309_2018-03-27.mp4'
# finalpath='/home/zy3/Documents'
# print(get_Framenumber(path))
# print(get_Framenumber(path1))
path='/home/zy3/Documents/FINALDATA/VIDS'
finalpath='/home/zy3/Project_SC/scene_classification_oringin'
CreateFrameFile(path,finalpath)
|
[
"zy3@csg25-05.ucc.ie"
] |
zy3@csg25-05.ucc.ie
|
16422538aa8332d0ab7d587c9c27c5731f45a232
|
5c97df625cce644e066a96e4c4688b6ee0e38c6a
|
/TMDetecter/TMDetectTest.py
|
3bab377908668d835fe00b466aadec2fdd78520a
|
[] |
no_license
|
ResearchingDexter/TinyMind-RMB-Recognition
|
f8139f28de0cf5cf4e225b90ec35237d3f5ef13b
|
d27a6a639762cfac7c67a48bc70552aca167851c
|
refs/heads/master
| 2020-06-07T03:20:02.457003
| 2020-01-11T15:19:35
| 2020-01-11T15:19:35
| 192,909,196
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,521
|
py
|
import torch
from torch.utils.data import DataLoader
import torch.nn as nn
import pdb
from typing import List
from torch.autograd import Variable
from IPython.display import clear_output
from datetime import datetime
from torchvision.transforms import transforms
from TMDetecter.TMDetectDataSet import FBDataSet,img_transform,load_img
from Logging import *
from TMDetecter.TMDetectUtils import fovea2boxes,box_nms
from TMUtils import TMcrop_img
from TMDetecter.TMRPN import TMRPN
from TMDetecter.TMDetectConfigure import *
@torch.no_grad()
def test(model=TMRPN,cfg=TMcfg):
if cfg.DEVICE[:4]=='cuda':
if torch.cuda.is_available()==False:
logging.info("can't find a GPU device")
pdb.set_trace()
model=model(cfg.NUM_CLASS)#transforms.ColorJitter(brightness=0.5,contrast=0.5,saturation=0.5,hue=0.3),
model.load_state_dict(torch.load(cfg.MODEL_PATH+cfg.MODEL_NAME,map_location=cfg.DEVICE))
dataset=FBDataSet(cfg.IMAGE_PATH,expected_img_size=cfg.EXPECTED_IMG_SIZE,
img_transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))]),
cfg=cfg,train=False)
dataloader=DataLoader(dataset,batch_size=cfg.BATCH_SIZE,num_workers=4,collate_fn=dataset.collate)
if cfg.DISTRIBUTED==True:
model=nn.DataParallel(module=model,device_ids=cfg.DEVICE_ID).cuda().eval()
else:
model.to(cfg.DEVICE).eval()
length=len(dataloader)
start_time=datetime.now()
empty_num=0
f=open('cannot_detect.txt','w')
multiscale_imgs_name=[]
for step,data in enumerate(dataloader):
step_time=datetime.now()
imgs,imgs_name,min_ratioes,images=data
#keep_images=list(range(imgs.size(0)))
imgs = Variable(imgs, requires_grad=False).to(cfg.DEVICE)
batch_size=imgs.size(0)
h,w=imgs.size(2),imgs.size(3)
loc_preds,cls_preds=model(imgs)
batch_cls,batch_score,batch_coordinate=fovea2boxes(loc_preds.cpu(),cls_preds.cpu(),torch.Tensor([w,h]),tuple_l=cfg.TUPLE_L)
logging.debug("length:{}|step:{}|imgs_name:{}".format(length,step,imgs_name))
for b in range(batch_size):
cls,score,coordinate=batch_cls[b],batch_score[b],batch_coordinate[b]
cls_list,score_list,coordinate_list=[],[],[]
for num_class in range(1,cfg.NUM_CLASS+1):
num_class_index=(cls==num_class)
cls_class,score_class,coordinate_class=cls[num_class_index],score[num_class_index],coordinate[num_class_index]
keep=box_nms(coordinate_class,score_class,threshold=0.3)
cls_class,score_class,coordinate_class=cls_class[keep],score_class[keep],coordinate_class[keep]
coordinate_class/=min_ratioes[b]
coordinate_class[:,[0,1]].floor_()
coordinate_class[:,[2,3]].ceil_()
if len(score_class)==0:
multiscale_imgs_name.append(imgs_name[b])
empty_num+=1
f.write(imgs_name[b]+'\n')
continue
#else:
#keep_images.pop(b)
_,index=score_class.max(0)
TMcrop_img(images[b],coordinate_class[index].tolist(),img_name=imgs_name[b],path=cfg.CROP_PATH)
if step%200==0:
clear_output(wait=True)
logging.debug("step_time cost :{}".format(datetime.now()-step_time))
multiscale_test(multiscale_imgs_name,model,cfg=cfg)
f.close()
logging.info("finshed and total cost of time is :{}|number of empty:{}".format(datetime.now()-start_time,empty_num))
@torch.no_grad()
def multiscale_test(imgs_name:List,model:nn.Module,cfg=TMcfg):#148KLEW0:fliped
img2tensor = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))])
f=open('multi_cannot_detect.txt','w')
logging.info("starting multiscale test")
for i,img_name in enumerate(imgs_name):
try:
image = load_img(cfg.IMAGE_PATH + img_name)
except FileNotFoundError:
break
flag = False
break_time=0
for j,scale in enumerate(cfg.MULTISCALE_SIZE):
if flag==True:
break
#image = load_img(cfg.IMAGE_PATH + img_name)
logging.debug("length:{}|step:{}|img_name:{}|scale:{}".format(len(imgs_name),i,img_name,scale))
img,min_ratio=img_transform(image,scale)
#w,h=img.size
#pad=transforms.Pad(padding=(0,0,scale[0]-w,scale[1]-h),fill=0)
#img=pad(img_)
img_tensor=img2tensor(img)
h,w=img_tensor.size(1),img_tensor.size(2)
loc_preds,cls_preds=model(img_tensor.unsqueeze(0).to(cfg.DEVICE))
batch_cls,batch_score,batch_coordinate=fovea2boxes(loc_preds.cpu(),cls_preds.cpu(),torch.Tensor([w,h]),tuple_l=cfg.TUPLE_L)
cls,score,coordinate=batch_cls[0],batch_score[0],batch_coordinate[0]
for num_class in range(1,cfg.NUM_CLASS+1):
num_class_index=(cls==num_class)
cls_class,score_class,coordinate_class=cls[num_class_index],score[num_class_index],coordinate[num_class_index]
if len(cls_class)==0:
break_time+=1
continue
print('cls_class:{}'.format(cls_class))
keep=box_nms(coordinate_class,score_class,threshold=0.3)
cls_class,score_class,coordinate_class=cls_class[keep],score_class[keep],coordinate_class[keep]
print('coordinate:{}'.format(coordinate_class))
coordinate_class/=min_ratio
coordinate_class[:,[0,1]]=coordinate_class[:,[0,1]].floor()
coordinate_class[:,[2,3]]=coordinate_class[:,[2,3]].ceil()
print('coordinate:{}'.format(coordinate_class))
if len(score_class) == 0:
break_time+=1
break
_, index = score_class.max(0)
print('index:{}|coordinate_classs:{}'.format(index,coordinate_class[index]))
#TMcrop_img(image,temp,img_name=img_name,path=cfg.CROP_PATH)
TMcrop_img(image,coordinate_class[index],img_name=img_name,path=cfg.CROP_PATH_COMPLEMENT)
flag=True
if break_time==len(cfg.MULTISCALE_SIZE):
f.write(img_name + '\n')
#image.show(title='test')
#break
logging.info("flag:{}".format(break_time))
f.close()
def write_submission(cls_class:List,score_class:List,coordinate:List,img_name:str)->None:
f=open(img_name+'.txt','a+')
for i in range(len(cls_class)):
coordinate_i=coordinate[i]
coordinate_i=list(map(str,coordinate_i))
f.write(','.join(coordinate_i)+','+str(cls_class[i])+','+str(score_class[i]))
f.close()
if __name__=='__main__':
#test(TMRPN,TMcfg)
with open('cannot_detect.txt','r') as f:
imgs_name=f.read()
#print(imgs_name.split('\n'))
model = TMRPN(TMcfg.NUM_CLASS) # transforms.ColorJitter(brightness=0.5,contrast=0.5,saturation=0.5,hue=0.3),
model.load_state_dict(torch.load(TMcfg.MODEL_PATH + TMcfg.MODEL_NAME, map_location=TMcfg.DEVICE))
model.eval()
multiscale_test(imgs_name.split('\n'),model.to(TMcfg.DEVICE))
|
[
"yingsunwangjian@gmail.com"
] |
yingsunwangjian@gmail.com
|
ff5018bf66bd87cd7a7db9b1e21fcc3c7b630983
|
c6c40e6f11cb7dc57ec6bd51fe677a868f0197ae
|
/cw1/zad1.py
|
888f5624d36e7c9524742cfaea6eb347705d3f70
|
[] |
no_license
|
Lukkai/Embedded-Systems
|
4b6ed7dc4108f44e4b49150dfb49e102a21fae30
|
1fa0feeac287477d60d9c7ad1e5a6d9b9584521a
|
refs/heads/master
| 2023-05-31T12:37:52.021898
| 2021-06-18T10:26:48
| 2021-06-18T10:26:48
| 367,486,480
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 643
|
py
|
''' Program wyznaczający pierwiastki
trójmianu dla przypadków rzeczywistych
'''
import math
while True:
a = input('a=').upper() # Wczytaj a
if a == "KONIEC":
break
a = float(a)
b = float(input('b='))
c = float(input('c='))
delta = b**2 - 4*a*c
if delta > 0:
x1 = (-1*b + math.sqrt(delta))/(2*a)
x2 = (-1*b - math.sqrt(delta))/(2*a)
print('Dwa pierwiastki',x1,',',x2)
elif delta == 0:
x = -1*b/(2*a)
print('Jeden pierwiastek',x)
else:
print('MicroPython nie osbługuje liczb zepolonych')#CPython owszem
print('Koniec')
|
[
"lukaszchh@gmail.com"
] |
lukaszchh@gmail.com
|
c43314e6b1a3c8bc088a5afdd5d4e7885bec44d0
|
c5767b5277035eed91305b2dd74f1c8916cd0ee8
|
/code/chapter5/5.5-address_matching.py
|
604f34c8d06f6eb3e7f31b61dea6c188fc09b158
|
[] |
no_license
|
pkxpp/tensorflow_cookbook_for_tf_1.12
|
8eae4f6a0989df27218249669fad34c8bc5e80ce
|
e22ab4232b9ce118144991558588b4607e2c207f
|
refs/heads/master
| 2020-06-04T06:01:40.757274
| 2019-06-22T03:14:17
| 2019-06-22T03:14:17
| 191,897,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,816
|
py
|
# Address Matching with k-Nearest Neighbors
#----------------------------------
#
# This function illustrates a way to perform
# address matching between two data sets.
#
# For each test address, we will return the
# closest reference address to it.
#
# We will consider two distance functions:
# 1) Edit distance for street number/name and
# 2) Euclidian distance (L2) for the zip codes
import random
import string
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
ops.reset_default_graph()
# 创建一个计算图会话
sess = tf.Session()
# Load the data
n = 10
street_names = ['abbey', 'baker', 'canal', 'donner', 'elm']
street_types = ['rd', 'st', 'ln', 'pass', 'ave']
rand_zips = [random.randint(65000,65999) for i in range(5)]
# Function to randomly create one typo in a string w/ a probability
def create_typo(s, prob=0.75):
if random.uniform(0,1) < prob:
rand_ind = random.choice(range(len(s)))
s_list = list(s)
s_list[rand_ind]=random.choice(string.ascii_lowercase)
s = ''.join(s_list)
return(s)
# Generate the reference dataset
numbers = [random.randint(1, 9999) for i in range(n)]
streets = [random.choice(street_names) for i in range(n)]
street_suffs = [random.choice(street_types) for i in range(n)]
zips = [random.choice(rand_zips) for i in range(n)]
full_streets = [str(x) + ' ' + y + ' ' + z for x,y,z in zip(numbers, streets, street_suffs)]
reference_data = [list(x) for x in zip(full_streets, zips)]
print(full_streets)
print(reference_data)
# Generate test dataset with some typos
typo_streets = [create_typo(x) for x in streets]
typo_full_streets = [str(x) + ' ' + y + ' ' + z for x,y,z in zip(numbers, typo_streets, street_suffs)]
test_data = [list(x) for x in zip(typo_full_streets, zips)]
test_address = tf.sparse_placeholder(dtype=tf.string)
test_zip = tf.placeholder(shape=[None, 1], dtype=tf.float32)
ref_address = tf.sparse_placeholder(dtype=tf.string)
ref_zip = tf.placeholder(shape=[None, n], dtype=tf.float32)
zip_dist = tf.square(tf.subtract(ref_zip, test_zip))
address_dist = tf.edit_distance(test_address, ref_address, normalize=True)
zip_max = tf.gather(tf.squeeze(zip_dist), tf.argmax(zip_dist, 1))
zip_min = tf.gather(tf.squeeze(zip_dist), tf.argmin(zip_dist, 1))
zip_sim = tf.div(tf.subtract(zip_max, zip_dist), tf.subtract(zip_max, zip_min))
address_sim = tf.subtract(1., address_dist)
address_weight = 0.5
zip_weight = 1. - address_weight
weighted_sim = tf.add(tf.transpose(tf.multiply(address_weight, address_sim)), tf.multiply(zip_weight, zip_sim))
top_match_index = tf.argmax(weighted_sim, 1)
def sparse_from_word_vec(word_vec):
num_words = len(word_vec)
indices = [[xi, 0, yi] for xi,x in enumerate(word_vec) for yi,y in enumerate(x)]
chars = list(''.join(word_vec))
return(tf.SparseTensorValue(indices, chars, [num_words, 1, 1]))
reference_addresses = [x[0] for x in reference_data]
reference_zips = np.array([[x[1] for x in reference_data]])
sparse_ref_set = sparse_from_word_vec(reference_addresses)
for i in range(n):
test_address_entry = test_data[i][0]
test_zip_entry = [[test_data[i][1]]]
# Create sparse address vectors
test_address_repeated = [test_address_entry] * n
sparse_test_set = sparse_from_word_vec(test_address_repeated)
feeddict={test_address: sparse_test_set,
test_zip: test_zip_entry,
ref_address: sparse_ref_set,
ref_zip: reference_zips}
best_match = sess.run(top_match_index, feed_dict=feeddict)
best_street = reference_addresses[best_match[0]]
[best_zip] = reference_zips[0][best_match]
[[test_zip_]] = test_zip_entry
print('Address: ' + str(test_address_entry) + ', ' + str(test_zip_))
print('Match : ' + str(best_street) + ', ' + str(best_zip))
|
[
"pkxpp@126.com"
] |
pkxpp@126.com
|
bf0181e1651f9909702210b03cb90cf2db570f52
|
a7e11a29e23bc1674afbe98ebeb110f499ee8586
|
/venv/Scripts/django-admin.py
|
e2386b4c9ccdfe91f489178d783d9d0a0900439d
|
[] |
no_license
|
Keicee32/Kotlin
|
1c9d7a5fba4551b9849841dceb199f52a8e8afd8
|
98e20e6e8a3fccedf1ade98b9174e6824249d911
|
refs/heads/master
| 2022-11-06T15:18:59.667822
| 2020-06-11T01:58:55
| 2020-06-11T01:58:55
| 256,632,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 173
|
py
|
#!C:\Users\Keicee32\PycharmProjects\Django\venv\Scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
[
"kelss451@gmail.com"
] |
kelss451@gmail.com
|
070c3610c2ae6875098dcec622589c78087d23ea
|
0cb01a55d18a20e20e913a99b9976c300313d60f
|
/venv/Scripts/pip-script.py
|
eba510cd7c9dfec2959afaad77f30da4d6c67a60
|
[] |
no_license
|
jcosta191/borabora
|
5bb3d3ffe28bd4e26e3dc694ce43365b7b1105f1
|
485927c12d3c2e34185b1fff77a4a114194760fa
|
refs/heads/master
| 2020-05-14T09:07:59.284946
| 2019-04-16T17:21:01
| 2019-04-16T17:21:01
| 181,734,762
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 417
|
py
|
#!C:\Users\Utilizador\PycharmProjects\untitled\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
|
[
"48892223+jcosta191@users.noreply.github.com"
] |
48892223+jcosta191@users.noreply.github.com
|
e759a79b83f0502d1390ed516f8e5cfb6aa62ed9
|
9a6e38e72be5b76ad136cd0ecf354019ab1f11ca
|
/ssig_site/metrics/models.py
|
0397536c5ae01f8a001c91e65279b322446ec15f
|
[
"MIT"
] |
permissive
|
LeoMcA/103P_2018_team51
|
dd5474fdb9bd63f74081426c4cfad7ebea43ac16
|
cca9e022456b1e2653f0b69420ea914661c39b27
|
refs/heads/master
| 2021-05-01T18:48:40.689418
| 2018-04-28T00:17:59
| 2018-04-28T00:17:59
| 121,009,540
| 0
| 0
| null | 2018-02-10T11:26:52
| 2018-02-10T11:26:52
| null |
UTF-8
|
Python
| false
| false
| 330
|
py
|
from django.db import models
from django.utils.timezone import now
from django.contrib.postgres.fields import JSONField
class Metric(models.Model):
name = models.CharField(max_length=200)
datetime = models.DateTimeField(default=now)
increment = models.SmallIntegerField(default=1)
data = JSONField(default=dict)
|
[
"leo.mcardle.17@ucl.ac.uk"
] |
leo.mcardle.17@ucl.ac.uk
|
bc77030b6316760f8c2337921c655be3d16e06b6
|
864580fb04b763b154c79ecded78439478f0e3f8
|
/Push.py
|
94ec39f2b7b0ad9da33149541caba1f855a6898c
|
[] |
no_license
|
technofications/ableton_push
|
4dd976024b93c4d3aa3a69f6e9b9924fb06fb4b2
|
89ae7b59461fdc1070af96c59f7d7473bb4a9589
|
refs/heads/master
| 2020-12-03T05:20:42.762587
| 2013-07-17T00:01:03
| 2013-07-17T00:01:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 57,323
|
py
|
#Embedded file name: /Users/versonator/Jenkins/live/Projects/AppLive/Resources/MIDI Remote Scripts/Push/Push.py
from __future__ import with_statement
import Live
from contextlib import contextmanager
from functools import partial
from _Framework.Dependency import inject
from _Framework.ControlSurface import ControlSurface
from _Framework.InputControlElement import MIDI_CC_TYPE, MIDI_NOTE_TYPE, MIDI_CC_STATUS, MIDI_NOTE_ON_STATUS
from _Framework.ButtonMatrixElement import ButtonMatrixElement
from _Framework.ModesComponent import AddLayerMode, MultiEntryMode, ModesComponent, SetAttributeMode, CancellableBehaviour, AlternativeBehaviour, ReenterBehaviour, DynamicBehaviourMixin, ExcludingBehaviourMixin
from _Framework.SysexValueControl import SysexValueControl
from _Framework.Layer import Layer
from _Framework.Resource import PrioritizedResource
from _Framework.DeviceBankRegistry import DeviceBankRegistry
from _Framework.SubjectSlot import subject_slot, subject_slot_group
from _Framework.Util import find_if, clamp, nop, mixin, const
from _Framework.Defaults import TIMER_DELAY
from OptionalElement import OptionalElement
from ComboElement import ComboElement
from HandshakeComponent import HandshakeComponent, make_dongle_message
from ValueComponent import ValueComponent, ParameterValueComponent
from ConfigurableButtonElement import ConfigurableButtonElement, PadButtonElement
from SpecialSessionComponent import SpecialSessionComponent, SpecialSessionZoomingComponent
from SpecialMixerComponent import SpecialMixerComponent
from SpecialTransportComponent import SpecialTransportComponent
from SpecialPhysicalDisplay import SpecialPhysicalDisplay
from InstrumentComponent import InstrumentComponent
from StepSeqComponent import StepSeqComponent
from LoopSelectorComponent import LoopSelectorComponent
from ViewControlComponent import ViewControlComponent
from ClipControlComponent import ClipControlComponent
from DisplayingDeviceComponent import DisplayingDeviceComponent
from DeviceNavigationComponent import DeviceNavigationComponent
from SessionRecordingComponent import SessionRecordingComponent
from NoteRepeatComponent import NoteRepeatComponent
from ClipCreator import ClipCreator
from MatrixMaps import PAD_TRANSLATIONS, FEEDBACK_CHANNELS
from BackgroundComponent import BackgroundComponent, ModifierBackgroundComponent
from BrowserComponent import BrowserComponent
from BrowserModes import BrowserHotswapMode
from Actions import CreateInstrumentTrackComponent, CreateDefaultTrackComponent, CaptureAndInsertSceneComponent, DuplicateLoopComponent, SelectComponent, DeleteComponent, DeleteSelectedClipComponent, DeleteSelectedSceneComponent, CreateDeviceComponent
from M4LInterfaceComponent import M4LInterfaceComponent
from UserSettingsComponent import UserComponent
from MessageBoxComponent import DialogComponent, NotificationComponent
from TouchEncoderElement import TouchEncoderElement
from TouchStripElement import TouchStripElement
from TouchStripController import TouchStripControllerComponent, TouchStripEncoderConnection
from Selection import L9CSelection
from AccentComponent import AccentComponent
from AutoArmComponent import AutoArmComponent
from WithPriority import WithPriority
from Settings import make_pad_parameters, SETTING_WORKFLOW, SETTING_THRESHOLD, SETTING_CURVE
from PadSensitivity import PadUpdateComponent, pad_parameter_sender
import Skin
import consts
import Colors
import Sysex
import Settings
GLOBAL_MAP_MODE = Live.MidiMap.MapMode.relative_smooth_two_compliment
class Push(ControlSurface):
"""
Push controller script.
Disclaimer: Any use of the Push control surface code (the "Code")
or parts thereof for commercial purposes or in a commercial context
is not allowed. Though we do not grant a license for non-commercial
use of the Code, you may use it in this regard but should be aware that
(1) we reserve the right to deny the future use any time and
(2) you need to check whether the use is allowed under the national law
applicable to your use.
"""
def __init__(self, c_instance):
super(Push, self).__init__(c_instance)
injecting = inject(expect_dialog=const(self.expect_dialog), show_notification=const(self.show_notification), selection=lambda : L9CSelection(application=self.application(), device_component=self._device, navigation_component=self._device_navigation))
self._push_injector = injecting.everywhere()
with self.component_guard():
self._suppress_sysex = False
self._skin = Skin.make_default_skin()
self._clip_creator = ClipCreator()
self._device_selection_follows_track_selection = True
self._create_pad_sensitivity_update()
self._create_controls()
self._init_settings()
self._init_message_box()
self._init_background()
self._init_touch_strip_controller()
self._init_accent()
self._init_transport_and_recording()
self._init_global_actions()
self._init_mixer()
self._init_session()
self._init_step_sequencer()
self._init_instrument()
self._init_note_repeat()
self._init_user()
self._init_matrix_modes()
self._init_track_modes()
self._init_device()
self._init_browser()
self._init_clip_settings()
self._init_main_modes()
self._init_m4l_interface()
self._init_handshake()
self.set_pad_translations(PAD_TRANSLATIONS)
self._on_selected_track_changed()
self._on_session_record_changed.subject = self.song()
self._on_session_record_changed()
self.set_highlighting_session_component(self._session)
self.set_feedback_channels(FEEDBACK_CHANNELS)
self.log_message('Push script loaded')
self._send_midi(Sysex.WELCOME_MESSAGE)
def disconnect(self):
self._pre_serialize()
with self.component_guard():
self._user.mode = Sysex.USER_MODE
super(Push, self).disconnect()
self._send_midi(Sysex.GOOD_BYE_MESSAGE)
self.log_message('Push script unloaded')
@contextmanager
def component_guard(self):
with super(Push, self).component_guard():
with self._push_injector:
song_view = self.song().view
old_selected_track = song_view.selected_track
yield
if song_view.selected_track != old_selected_track:
self._track_selection_changed_by_action()
def _track_selection_changed_by_action(self):
if self._matrix_modes.selected_mode == 'note':
self._session_recording.deactivate_recording()
if self._auto_arm.needs_restore_auto_arm:
self._auto_arm.restore_auto_arm()
def refresh_state(self):
super(Push, self).refresh_state()
if self._user.mode == Sysex.LIVE_MODE:
self.schedule_message(5, self._start_handshake)
def _pre_serialize(self):
"""
This will pre-serialize all settings, as a later access to
Push's objects might cause problems with Pickle
"""
from pickle import dumps
from encodings import ascii
nop(ascii)
preferences = self._c_instance.preferences('Push')
dump = dumps(self._pref_dict)
preferences.set_serializer(lambda : dump)
def _init_settings(self):
from pickle import loads, dumps
from encodings import ascii
nop(ascii)
preferences = self._c_instance.preferences('Push')
self._pref_dict = {}
try:
self._pref_dict = loads(str(preferences))
except Exception:
pass
pref_dict = self._pref_dict
preferences.set_serializer(lambda : dumps(pref_dict))
self._settings = Settings.create_settings(preferences=self._pref_dict)
self._on_pad_curve.subject = self._settings[SETTING_CURVE]
self._on_pad_threshold.subject = self._settings[SETTING_THRESHOLD]
def _init_handshake(self):
dongle_message, dongle = make_dongle_message(Sysex.DONGLE_ENQUIRY_PREFIX)
identity_control = SysexValueControl(Sysex.IDENTITY_PREFIX, Sysex.IDENTITY_ENQUIRY)
dongle_control = SysexValueControl(Sysex.DONGLE_PREFIX, dongle_message)
presentation_control = SysexValueControl(Sysex.DONGLE_PREFIX, Sysex.make_presentation_message(self.application()))
self._handshake = HandshakeComponent(identity_control=identity_control, dongle_control=dongle_control, presentation_control=presentation_control, dongle=dongle)
self._on_handshake_success.subject = self._handshake
self._on_handshake_failure.subject = self._handshake
def _start_handshake(self):
for control in self.controls:
receive_value_backup = getattr(control, '_receive_value_backup', nop)
if receive_value_backup != nop:
control.receive_value = receive_value_backup
send_midi_backup = getattr(control, '_send_midi_backup', nop)
if send_midi_backup != nop:
control.send_midi = send_midi_backup
self._handshake._start_handshake()
self.update()
def update(self):
self._on_session_record_changed()
self._on_note_repeat_mode_changed(self._note_repeat.selected_mode)
self.set_feedback_channels(FEEDBACK_CHANNELS)
self._update_calibration()
super(Push, self).update()
@subject_slot('success')
def _on_handshake_success(self):
self.log_message('Handshake succeded!')
self.update()
@subject_slot('failure')
def _on_handshake_failure(self):
self.log_message('Handshake failed, performing harakiri!')
for control in self.controls:
receive_value_backup = getattr(control, 'receive_value', nop)
if receive_value_backup != nop:
control._receive_value_backup = receive_value_backup
send_midi_backup = getattr(control, 'send_midi', nop)
if send_midi_backup != nop:
control._send_midi_backup = send_midi_backup
control.receive_value = nop
control.send_midi = nop
def _update_calibration(self):
self._send_midi(Sysex.CALIBRATION_SET)
def _create_pad_sensitivity_update(self):
all_pad_sysex_control = SysexValueControl(Sysex.ALL_PADS_SENSITIVITY_PREFIX)
pad_sysex_control = SysexValueControl(Sysex.PAD_SENSITIVITY_PREFIX)
sensitivity_sender = pad_parameter_sender(all_pad_sysex_control, pad_sysex_control)
self._pad_sensitivity_update = PadUpdateComponent(all_pads=range(64), parameter_sender=sensitivity_sender, default_profile=Settings.action_pad_sensitivity, update_delay=TIMER_DELAY)
def _create_controls(self):
is_momentary = True
def create_button(note, name, **k):
button = ConfigurableButtonElement(is_momentary, MIDI_CC_TYPE, 0, note, name=name, skin=self._skin, **k)
return button
def create_modifier_button(note, name, **k):
button = create_button(note, name, resource_type=PrioritizedResource, **k)
return button
undo_handler = self.song()
self._nav_up_button = create_button(46, 'Up_Arrow')
self._nav_down_button = create_button(47, 'Down_Arrow')
self._nav_left_button = create_button(44, 'Left_Arrow')
self._nav_right_button = create_button(45, 'Right_Arrow')
self._nav_up_button.default_states = consts.SCROLL_SIDE_BUTTON_STATES
self._nav_down_button.default_states = consts.SCROLL_SIDE_BUTTON_STATES
self._nav_left_button.default_states = consts.SCROLL_SIDE_BUTTON_STATES
self._nav_right_button.default_states = consts.SCROLL_SIDE_BUTTON_STATES
self._shift_button = create_modifier_button(49, 'Shift_Button')
self._select_button = create_modifier_button(48, 'Select_Button')
self._delete_button = create_modifier_button(118, 'Delete_Button', undo_step_handler=undo_handler)
self._duplicate_button = create_modifier_button(88, 'Duplicate_Button', undo_step_handler=undo_handler)
self._quantize_button = create_modifier_button(116, 'Quantization_Button', undo_step_handler=undo_handler)
self._accent_button = create_modifier_button(57, 'Accent_Button')
self._in_button = create_button(62, 'In_Button')
self._out_button = create_button(63, 'Out_Button')
self._master_select_button = create_button(28, 'Master_Select_Button')
self._octave_down_button = create_button(54, 'Octave_Down_Button')
self._octave_up_button = create_button(55, 'Octave_Up_Button')
self._repeat_button = create_button(56, 'Repeat_Button')
self._octave_up_button.default_states = consts.SCROLL_SIDE_BUTTON_STATES
self._octave_down_button.default_states = consts.SCROLL_SIDE_BUTTON_STATES
self._global_mute_button = create_modifier_button(60, 'Global_Mute_Button')
self._global_solo_button = create_modifier_button(61, 'Global_Solo_Button')
self._global_track_stop_button = create_modifier_button(29, 'Track_Stop_Button')
self._scale_presets_button = create_button(58, 'Scale_Presets_Button')
self._vol_mix_mode_button = create_button(114, 'Vol_Mix_Mode_Button')
self._device_mode_button = create_button(110, 'Device_Mode_Button')
self._clip_mode_button = create_button(113, 'Clip_Mode_Button')
self._browse_mode_button = create_button(111, 'Browse_Mode_Button')
self._single_track_mix_mode_button = create_button(112, 'Single_Track_Mode_Button')
self._pan_send_mix_mode_button = create_button(115, 'Pan_Send_Mode_Button', resource_type=PrioritizedResource)
self._note_mode_button = create_button(50, 'Note_Mode_Button')
self._session_mode_button = create_button(51, 'Session_Mode_Button')
self._play_button = create_button(85, 'Play_Button')
self._new_button = create_button(87, 'New_Button')
self._automation_button = create_button(89, 'Automation_Button')
self._tap_tempo_button = create_button(3, 'Tap_Tempo_Button')
self._metronome_button = create_button(9, 'Metronome_Button')
self._fixed_length_button = create_button(90, 'Fixed_Length_Button')
self._record_button = create_button(86, 'Record_Button')
self._undo_button = create_button(119, 'Undo_Button')
self._create_device_button = create_button(52, 'Create_Device_Button', undo_step_handler=undo_handler)
self._create_track_button = create_button(53, 'Create_Track_Button', undo_step_handler=undo_handler)
self._double_button = create_button(117, 'Double_Button', undo_step_handler=undo_handler)
self._user_button = create_button(59, 'User_Button', undo_step_handler=undo_handler)
self._select_buttons = ButtonMatrixElement(name='Track_Select_Buttons', rows=[[ create_button(20 + idx, 'Track_Select_Button' + str(idx)) for idx in xrange(8) ]])
self._track_state_buttons = ButtonMatrixElement(name='Track_State_Buttons', rows=[[ create_button(102 + idx, 'Track_State_Button' + str(idx), is_rgb=True) for idx in xrange(8) ]])
self._side_buttons = ButtonMatrixElement(name='Scene_Launch_Buttons', rows=[[ create_button(36 + idx, 'Scene_Launch_Button' + str(idx), default_states={True: 'Session.SceneSelected',
False: 'Session.SceneUnselected'}) for idx in reversed(xrange(8)) ]])
def create_display_line(clear_cmd, write_cmd, index):
line = SpecialPhysicalDisplay(consts.DISPLAY_LENGTH, 1)
line.set_clear_all_message(clear_cmd)
line.set_message_parts(write_cmd, (247,))
line.name = 'Display_Line_%d' % index
line.reset()
return line
self._display_line1 = create_display_line(Sysex.CLEAR_LINE1, Sysex.WRITE_LINE1, 0)
self._display_line2 = create_display_line(Sysex.CLEAR_LINE2, Sysex.WRITE_LINE2, 1)
self._display_line3 = create_display_line(Sysex.CLEAR_LINE3, Sysex.WRITE_LINE3, 2)
self._display_line4 = create_display_line(Sysex.CLEAR_LINE4, Sysex.WRITE_LINE4, 3)
self._display_lines = [self._display_line1,
self._display_line2,
self._display_line3,
self._display_line4]
def create_pad_button(pad_id, name, **k):
return PadButtonElement(pad_id, self._pad_sensitivity_update, is_momentary, MIDI_NOTE_TYPE, 0, (36 + pad_id), skin=self._skin, name=name, **k)
self._matrix = ButtonMatrixElement(name='Button_Matrix', rows=[ [ create_pad_button((7 - row) * 8 + column, str(column) + '_Clip_' + str(row) + '_Button', is_rgb=True, default_states={True: 'DefaultMatrix.On',
False: 'DefaultMatrix.Off'}) for column in xrange(8) ] for row in xrange(8) ])
def create_note_button(note, name, **k):
return ConfigurableButtonElement(is_momentary, MIDI_NOTE_TYPE, 0, note, skin=self._skin, name=name, **k)
self._touch_strip_tap = create_note_button(12, 'Touch_Strip_Tap')
self._touch_strip_control = TouchStripElement(name='Touch_Strip_Control', touch_button=self._touch_strip_tap)
self._touch_strip_control.set_feedback_delay(-1)
self._touch_strip_control.set_needs_takeover(False)
self._touch_strip_control.set_touch_button(self._touch_strip_tap)
class Deleter(object):
@property
def is_deleting(_):
return self._delete_component.is_deleting
def delete_clip_envelope(_, param):
return self._delete_component.delete_clip_envelope(param)
deleter = Deleter()
self._tempo_control_tap = create_note_button(10, 'Tempo_Control_Tap')
self._tempo_control = TouchEncoderElement(MIDI_CC_TYPE, 0, 14, GLOBAL_MAP_MODE, name='Tempo_Control', undo_step_handler=self.song(), delete_handler=deleter, encoder_sensitivity=consts.ENCODER_SENSITIVITY, touch_button=self._tempo_control_tap)
self._swing_control_tap = create_note_button(9, 'Swing_Control_Tap')
self._swing_control = TouchEncoderElement(MIDI_CC_TYPE, 0, 15, GLOBAL_MAP_MODE, name='Swing_Control', undo_step_handler=self.song(), delete_handler=deleter, encoder_sensitivity=consts.ENCODER_SENSITIVITY, touch_button=self._swing_control_tap)
self._master_volume_control_tap = create_note_button(8, 'Master_Volume_Tap')
self._master_volume_control = TouchEncoderElement(MIDI_CC_TYPE, 0, 79, GLOBAL_MAP_MODE, undo_step_handler=self.song(), delete_handler=deleter, name='Master_Volume_Control', encoder_sensitivity=consts.ENCODER_SENSITIVITY, touch_button=self._master_volume_control_tap)
self._global_param_touch_buttons = ButtonMatrixElement(name='Track_Control_Touches', rows=[[ create_note_button(index, 'Track_Control_Touch_' + str(index)) for index in range(8) ]])
self._global_param_controls = ButtonMatrixElement(name='Track_Controls', rows=[[ TouchEncoderElement(MIDI_CC_TYPE, 0, 71 + index, GLOBAL_MAP_MODE, undo_step_handler=self.song(), delete_handler=deleter, encoder_sensitivity=consts.ENCODER_SENSITIVITY, name='Track_Control_' + str(index), touch_button=self._global_param_touch_buttons[index]) for index in xrange(8) ]])
self._on_param_encoder_touched.replace_subjects(self._global_param_touch_buttons)
def _init_background(self):
self._background = BackgroundComponent()
self._background.layer = Layer(display_line1=self._display_line1, display_line2=self._display_line2, display_line3=self._display_line3, display_line4=self._display_line4, top_buttons=self._select_buttons, bottom_buttons=self._track_state_buttons, scales_button=self._scale_presets_button, octave_up=self._octave_up_button, octave_down=self._octave_down_button, side_buttons=self._side_buttons, repeat_button=self._repeat_button, accent_button=self._accent_button, in_button=self._in_button, out_button=self._out_button, param_controls=self._global_param_controls, param_touch=self._global_param_touch_buttons, tempo_control_tap=self._tempo_control_tap, master_control_tap=self._master_volume_control_tap, touch_strip=self._touch_strip_control, touch_strip_tap=self._touch_strip_tap, nav_up_button=self._nav_up_button, nav_down_button=self._nav_down_button, nav_left_button=self._nav_left_button, nav_right_button=self._nav_right_button, _notification=self._notification.use_single_line(2))
self._background.layer.priority = consts.BACKGROUND_PRIORITY
self._matrix_background = BackgroundComponent()
self._matrix_background.set_enabled(False)
self._matrix_background.layer = Layer(matrix=self._matrix)
self._mod_background = ModifierBackgroundComponent()
self._mod_background.layer = Layer(shift_button=self._shift_button, select_button=self._select_button, delete_button=self._delete_button, duplicate_button=self._duplicate_button, quantize_button=self._quantize_button)
def _can_auto_arm_track(self, track):
routing = track.current_input_routing
return routing == 'Ext: All Ins' or routing == 'All Ins' or routing.startswith('Push Input')
def _init_touch_strip_controller(self):
strip_controller = TouchStripControllerComponent()
strip_controller.set_enabled(False)
strip_controller.layer = Layer(touch_strip=self._touch_strip_control)
strip_controller.layer.priority = consts.MODAL_DIALOG_PRIORITY
self._strip_connection = TouchStripEncoderConnection(strip_controller, self._touch_strip_tap)
self._tempo_control.set_observer(self._strip_connection)
self._swing_control.set_observer(self._strip_connection)
self._master_volume_control.set_observer(self._strip_connection)
for encoder in self._global_param_controls:
encoder.set_observer(self._strip_connection)
def _init_matrix_modes(self):
self._auto_arm = AutoArmComponent(name='Auto_Arm')
self._auto_arm.can_auto_arm_track = self._can_auto_arm_track
self._auto_arm.notification_layer = Layer(display_line1=self._display_line3)
self._auto_arm.notification_layer.priority = consts.NOTIFICATION_PRIORITY
self._note_modes = ModesComponent(name='Note_Modes')
self._note_modes.add_mode('sequencer', [self._note_repeat, self._accent_component, self._step_sequencer])
self._note_modes.add_mode('looper', self._audio_loop if consts.PROTO_AUDIO_NOTE_MODE else self._matrix_background)
self._note_modes.add_mode('instrument', [self._note_repeat, self._accent_component, self._instrument])
self._note_modes.add_mode('disabled', self._matrix_background)
self._note_modes.selected_mode = 'disabled'
self._note_modes.set_enabled(False)
self._matrix_modes = ModesComponent(name='Matrix_Modes')
self._matrix_modes.add_mode('session', [(self._zooming, self._zooming_layer), (self._session, self._session_layer), AddLayerMode(self._session, self._restricted_session_layer)])
self._matrix_modes.add_mode('note', [self._view_control,
self._note_modes,
self._delete_clip,
(self._session, self._restricted_session_layer)], behaviour=self._auto_arm.auto_arm_restore_behaviour)
self._matrix_modes.selected_mode = 'note'
self._matrix_modes.layer = Layer(session_button=self._session_mode_button, note_button=self._note_mode_button)
self._on_matrix_mode_changed.subject = self._matrix_modes
self._matrix_modes.selected_mode = 'note'
def _init_accent(self):
self._accent_component = AccentComponent(self._c_instance.full_velocity)
self._accent_component.set_enabled(False)
self._accent_component.layer = Layer(toggle_button=self._accent_button)
self._on_accent_mode_changed.subject = self._accent_component
def _init_user(self):
sysex_control = SysexValueControl(Sysex.MODE_CHANGE)
self._user = UserComponent(value_control=sysex_control)
self._user.layer = Layer(action_button=self._user_button)
self._user.settings_layer = Layer(display_line1=self._display_line1, display_line2=self._display_line2, display_line3=self._display_line3, display_line4=self._display_line4, encoders=self._global_param_controls)
self._user.settings_layer.priority = consts.DIALOG_PRIORITY
self._user.settings = self._settings
self._on_hardware_mode_changed.subject = self._user
self._on_before_hardware_mode_sent.subject = self._user
self._on_after_hardware_mode_sent.subject = self._user
self._update_pad_params()
def _init_session(self):
self._session = SpecialSessionComponent(8, 8, name='Session_Control')
self._session.set_mixer(self._mixer)
for scene_index in xrange(8):
scene = self._session.scene(scene_index)
scene.set_triggered_value(self._skin['Session.SceneTriggered'])
scene.name = 'Scene_' + str(scene_index)
scene.layer = Layer(select_button=self._select_button, delete_button=self._delete_button)
scene._do_select_scene = self._selector.on_select_scene
for track_index in xrange(8):
clip_slot = scene.clip_slot(track_index)
clip_slot.name = str(track_index) + '_Clip_Slot_' + str(scene_index)
clip_slot.set_triggered_to_play_value('Session.ClipTriggeredPlay')
clip_slot.set_triggered_to_record_value('Session.ClipTriggeredRecord')
clip_slot.set_clip_palette(Colors.CLIP_COLOR_TABLE)
clip_slot.set_clip_rgb_table(Colors.RGB_COLOR_TABLE)
clip_slot.set_record_button_value('Session.RecordButton')
clip_slot.set_started_value('Session.ClipStarted')
clip_slot.set_recording_value('Session.ClipRecording')
clip_slot._do_select_clip = self._selector.on_select_clip
clip_slot.layer = Layer(delete_button=self._delete_button, select_button=self._select_button, duplicate_button=self._duplicate_button)
self._session_layer = Layer(page_up_button=self._octave_up_button, page_down_button=self._octave_down_button, track_bank_left_button=self._nav_left_button, track_bank_right_button=self._nav_right_button, scene_bank_up_button=self._nav_up_button, scene_bank_down_button=self._nav_down_button, clip_launch_buttons=self._matrix, scene_launch_buttons=self._side_buttons, duplicate_button=self._duplicate_button)
self._restricted_session_layer = Layer(stop_all_clips_button=ComboElement((self._shift_button,), self._global_track_stop_button))
self._session_stop_track_layer = Layer(stop_track_clip_buttons=self._track_state_buttons)
self._session.duplicate_layer = Layer(scene_buttons=self._side_buttons)
self._zooming = SpecialSessionZoomingComponent(self._session, name='Session_Overview')
self._zooming.set_enabled(False)
self._zooming.set_stopped_value(self._skin['Zooming.Stopped'])
self._zooming.set_selected_value(self._skin['Zooming.Selected'])
self._zooming.set_playing_value(self._skin['Zooming.Playing'])
self._zooming.set_empty_value(self._skin['Zooming.Empty'])
self._zooming_layer = Layer(button_matrix=self._matrix, zoom_button=self._shift_button, nav_up_button=self._nav_up_button, nav_down_button=self._nav_down_button, nav_left_button=self._nav_left_button, nav_right_button=self._nav_right_button)
self._session.set_enabled(True)
self._zooming.set_enabled(True)
def _init_track_modes(self):
self._track_modes = ModesComponent(name='Track_Modes')
self._track_modes.set_enabled(False)
self._track_modes.add_mode('stop', AddLayerMode(self._session, self._session_stop_track_layer))
self._track_modes.add_mode('solo', AddLayerMode(self._mixer, self._mixer_solo_layer))
self._track_modes.add_mode('mute', AddLayerMode(self._mixer, self._mixer_mute_layer))
self._track_modes.layer = Layer(stop_button=self._global_track_stop_button, mute_button=self._global_mute_button, solo_button=self._global_solo_button, shift_button=self._shift_button)
self._track_modes.selected_mode = 'mute'
def _init_main_modes(self):
enable_stop_mute_solo_as_modifiers = AddLayerMode(self._mod_background, Layer(stop=self._global_track_stop_button, mute=self._global_mute_button, solo=self._global_solo_button))
self._main_modes = ModesComponent()
self._main_modes.add_mode('volumes', [self._track_modes, (self._mixer, self._mixer_volume_layer)])
self._main_modes.add_mode('pan_sends', [self._track_modes, (self._mixer, self._mixer_pan_send_layer)])
self._main_modes.add_mode('track', [self._track_modes, (self._mixer, self._mixer_track_layer)])
self._main_modes.add_mode('clip', [self._track_modes,
partial(self._view_control.show_view, 'Detail/Clip'),
(self._mixer, self._mixer_layer),
self._clip_control])
self._main_modes.add_mode('device', [enable_stop_mute_solo_as_modifiers,
partial(self._view_control.show_view, 'Detail/DeviceChain'),
self._device,
self._device_navigation], behaviour=ReenterBehaviour(self._device_navigation.back_to_top))
self._main_modes.add_mode('browse', [enable_stop_mute_solo_as_modifiers,
partial(self._view_control.show_view, 'Browser'),
self._browser.back_to_top,
self._browser_hotswap_mode,
self._browser_mode,
self._browser.reset_load_memory], groups=['add_effect', 'add_track', 'browse'], behaviour=mixin(DynamicBehaviourMixin, CancellableBehaviour)(lambda : not self._browser_hotswap_mode._mode.can_hotswap() and 'add_effect_left'))
self._main_modes.add_mode('add_effect_right', [enable_stop_mute_solo_as_modifiers, self._browser.back_to_top, self._create_device_right], behaviour=mixin(ExcludingBehaviourMixin, CancellableBehaviour)(['add_track', 'browse']), groups=['add_effect'])
self._main_modes.add_mode('add_effect_left', [enable_stop_mute_solo_as_modifiers, self._browser.back_to_top, self._create_device_left], behaviour=mixin(ExcludingBehaviourMixin, CancellableBehaviour)(['add_track', 'browse']), groups=['add_effect'])
self._main_modes.add_mode('add_instrument_track', [enable_stop_mute_solo_as_modifiers, self._browser.back_to_top, self._create_instrument_track], behaviour=mixin(ExcludingBehaviourMixin, AlternativeBehaviour)(excluded_groups=['browse', 'add_effect'], alternative_mode='add_default_track'), groups=['add_track'])
self._main_modes.add_mode('add_default_track', [enable_stop_mute_solo_as_modifiers, self._browser.back_to_top, self._create_default_track], groups=['add_track'])
self._main_modes.selected_mode = 'device'
self._main_modes.layer = Layer(volumes_button=self._vol_mix_mode_button, pan_sends_button=self._pan_send_mix_mode_button, track_button=self._single_track_mix_mode_button, clip_button=self._clip_mode_button, device_button=self._device_mode_button, browse_button=self._browse_mode_button, add_effect_right_button=self._create_device_button, add_effect_left_button=ComboElement((self._shift_button,), self._create_device_button), add_instrument_track_button=self._create_track_button)
self._on_main_mode_button_value.replace_subjects([self._vol_mix_mode_button,
self._pan_send_mix_mode_button,
self._single_track_mix_mode_button,
self._clip_mode_button,
self._device_mode_button,
self._browse_mode_button])
@subject_slot_group('value')
def _on_main_mode_button_value(self, value, sender):
if value:
self._instrument._scales_modes.selected_mode = 'disabled'
def _init_mixer(self):
self._mixer = SpecialMixerComponent(self._matrix.width())
self._mixer.set_enabled(False)
self._mixer.name = 'Mixer'
self._mixer_layer = Layer(track_names_display=self._display_line4, track_select_buttons=self._select_buttons)
self._mixer_pan_send_layer = Layer(track_names_display=self._display_line4, track_select_buttons=self._select_buttons, pan_send_toggle=self._pan_send_mix_mode_button, pan_send_controls=self._global_param_controls, pan_send_touch_buttons=self._global_param_touch_buttons, pan_send_names_display=self._display_line1, pan_send_graphics_display=self._display_line2, pan_send_alt_display=self._display_line3)
self._mixer_volume_layer = Layer(track_names_display=self._display_line4, track_select_buttons=self._select_buttons, volume_controls=self._global_param_controls, volume_touch_buttons=self._global_param_touch_buttons, volume_names_display=self._display_line1, volume_graphics_display=self._display_line2, volume_alt_display=self._display_line3)
self._mixer_track_layer = Layer(track_names_display=self._display_line4, track_select_buttons=self._select_buttons, selected_controls=self._global_param_controls, track_mix_touch_buttons=self._global_param_touch_buttons, selected_names_display=self._display_line1, selected_graphics_display=self._display_line2, track_mix_alt_display=self._display_line3)
self._mixer_solo_layer = Layer(solo_buttons=self._track_state_buttons)
self._mixer_mute_layer = Layer(mute_buttons=self._track_state_buttons)
self._mixer.layer = self._mixer_layer
for track in xrange(self._matrix.width()):
strip = self._mixer.channel_strip(track)
strip.name = 'Channel_Strip_' + str(track)
strip.set_invert_mute_feedback(True)
strip._do_select_track = self._selector.on_select_track
strip.layer = Layer(shift_button=self._shift_button, delete_button=self._delete_button, duplicate_button=self._duplicate_button, selector_button=self._select_button)
self._mixer.selected_strip().name = 'Selected_Channel_strip'
self._mixer.master_strip().name = 'Master_Channel_strip'
self._mixer.master_strip()._do_select_track = self._selector.on_select_track
self._mixer.master_strip().layer = Layer(volume_control=self._master_volume_control, cue_volume_control=ComboElement((self._shift_button,), self._master_volume_control), select_button=self._master_select_button, selector_button=self._select_button)
self._mixer.set_enabled(True)
def _init_device(self):
self._device_bank_registry = DeviceBankRegistry()
self._device = DisplayingDeviceComponent(device_bank_registry=self._device_bank_registry, name='DeviceComponent')
self._device.set_enabled(False)
self.set_device_component(self._device)
self._device.layer = Layer(parameter_controls=self._global_param_controls, encoder_touch_buttons=self._global_param_touch_buttons, name_display_line=self._display_line1, value_display_line=self._display_line2, alternating_display=self._display_line3)
self._device_navigation = DeviceNavigationComponent(self._device_bank_registry)
self._device_navigation.set_enabled(False)
self._device_navigation.layer = Layer(enter_button=self._in_button, delete_button=self._delete_button, exit_button=self._out_button, select_buttons=self._select_buttons, state_buttons=self._track_state_buttons, display_line=self._display_line4)
self._device_navigation.info_layer = Layer(display_line1=self._display_line1, display_line2=self._display_line2, display_line3=self._display_line3, display_line4=self._display_line4, _notification=self._notification.use_full_display(2))
self._device_navigation.info_layer.priority = consts.MODAL_DIALOG_PRIORITY
def _init_transport_and_recording(self):
self._view_control = ViewControlComponent(name='View_Control')
self._view_control.set_enabled(False)
self._view_control.layer = Layer(prev_track_button=self._nav_left_button, next_track_button=self._nav_right_button, prev_scene_button=OptionalElement(self._nav_up_button, self._settings[SETTING_WORKFLOW], False), next_scene_button=OptionalElement(self._nav_down_button, self._settings[SETTING_WORKFLOW], False), prev_scene_list_button=OptionalElement(self._nav_up_button, self._settings[SETTING_WORKFLOW], True), next_scene_list_button=OptionalElement(self._nav_down_button, self._settings[SETTING_WORKFLOW], True))
self._session_recording = SessionRecordingComponent(self._clip_creator, self._view_control, name='Session_Recording')
self._session_recording.layer = Layer(new_button=OptionalElement(self._new_button, self._settings[SETTING_WORKFLOW], False), scene_list_new_button=OptionalElement(self._new_button, self._settings[SETTING_WORKFLOW], True), record_button=self._record_button, automation_button=self._automation_button, new_scene_button=ComboElement((self._shift_button,), self._new_button), re_enable_automation_button=ComboElement((self._shift_button,), self._automation_button), delete_automation_button=ComboElement((self._delete_button,), self._automation_button), length_button=self._fixed_length_button)
self._session_recording.length_layer = Layer(display_line=self._display_line4, label_display_line=self._display_line3, blank_display_line2=self._display_line2, blank_display_line1=self._display_line1, select_buttons=self._select_buttons, state_buttons=self._track_state_buttons, _notification=self._notification.use_single_line(1))
self._session_recording.length_layer.priority = consts.DIALOG_PRIORITY
self._transport = SpecialTransportComponent(name='Transport')
self._transport.layer = Layer(shift_button=self._shift_button, play_button=self._play_button, tap_tempo_button=self._tap_tempo_button, metronome_button=self._metronome_button, quantization_button=self._quantize_button, tempo_encoder=self._tempo_control, undo_button=self._undo_button)
self._transport.quantization_layer = Layer(encoder_controls=self._global_param_controls, display_line1=self._display_line1, display_line2=self._display_line2, display_line3=self._display_line3, display_line4=self._display_line4, select_buttons=self._select_buttons, state_buttons=self._track_state_buttons)
self._transport.quantization_layer.priority = consts.DIALOG_PRIORITY
def _init_clip_settings(self):
self._clip_control = ClipControlComponent(name='Clip_Control')
self._clip_control.set_enabled(False)
self._clip_control.layer = Layer(controls=self._global_param_controls, param_display=self._display_line1, value_display=self._display_line2, clip_name_display=self._display_line3, shift_button=self._shift_button)
def _init_browser(self):
self._browser_hotswap_mode = MultiEntryMode(BrowserHotswapMode(application_view=self.application().view))
self._browser = BrowserComponent(name='Browser')
self._browser.set_enabled(False)
self._browser.layer = Layer(encoder_controls=self._global_param_controls, display_line1=self._display_line1, display_line2=self._display_line2, display_line3=self._display_line3, display_line4=self._display_line4, select_buttons=self._select_buttons, state_buttons=self._track_state_buttons, enter_button=self._in_button, exit_button=self._out_button, shift_button=WithPriority(consts.SHARED_PRIORITY, self._shift_button), _notification=self._notification.use_full_display(2))
self._browser.layer.priority = consts.BROWSER_PRIORITY
self._browser_mode = MultiEntryMode([SetAttributeMode(self._instrument._scales, 'release_info_display_with_encoders', False), self._browser])
self._browser_dialog_mode = MultiEntryMode([SetAttributeMode(self._browser.layer, 'priority', consts.MODAL_DIALOG_PRIORITY), self._browser_mode])
self._create_device_right = CreateDeviceComponent(name='Create_Device_Right', browser_component=self._browser, browser_mode=self._browser_dialog_mode, browser_hotswap_mode=self._browser_hotswap_mode, insert_left=False)
self._create_device_right.set_enabled(False)
self._create_device_left = CreateDeviceComponent(name='Create_Device_Right', browser_component=self._browser, browser_mode=self._browser_dialog_mode, browser_hotswap_mode=self._browser_hotswap_mode, insert_left=True)
self._create_device_left.set_enabled(False)
self._create_default_track = CreateDefaultTrackComponent(name='Create_Default_Track')
self._create_default_track.options.layer = Layer(display_line=self._display_line4, label_display_line=self._display_line3, blank_display_line2=self._display_line2, blank_display_line1=self._display_line1, select_buttons=self._select_buttons, state_buttons=self._track_state_buttons)
self._create_default_track.options.layer.priority = consts.MODAL_DIALOG_PRIORITY
self._create_default_track.set_enabled(False)
self._create_instrument_track = CreateInstrumentTrackComponent(name='Create_Instrument_Track', browser_component=self._browser, browser_mode=self._browser_dialog_mode, browser_hotswap_mode=self._browser_hotswap_mode)
self._create_instrument_track.set_enabled(False)
self._on_browse_mode_changed.subject = self.application().view
@subject_slot('browse_mode')
def _on_browse_mode_changed(self):
if not self.application().browser.hotswap_target:
if self._main_modes.selected_mode == 'browse' or self._browser_hotswap_mode.is_entered:
self._main_modes.selected_mode = 'device'
def _init_instrument(self):
self._instrument = InstrumentComponent(name='Instrument_Component')
self._instrument.set_enabled(False)
self._instrument.layer = Layer(matrix=self._matrix, touch_strip=self._touch_strip_control, scales_toggle_button=self._scale_presets_button, presets_toggle_button=self._shift_button, octave_up_button=self._octave_up_button, octave_down_button=self._octave_down_button)
self._instrument.scales_layer = Layer(top_display_line=self._display_line3, bottom_display_line=self._display_line4, top_buttons=self._select_buttons, bottom_buttons=self._track_state_buttons, encoder_touch_buttons=self._global_param_touch_buttons, _notification=self._notification.use_single_line(1))
self._instrument.scales_layer.priority = consts.MODAL_DIALOG_PRIORITY
self._instrument._scales.presets_layer = Layer(top_display_line=self._display_line3, bottom_display_line=self._display_line4, top_buttons=self._select_buttons, bottom_buttons=self._track_state_buttons)
self._instrument._scales.presets_layer.priority = consts.DIALOG_PRIORITY
self._instrument._scales.scales_info_layer = Layer(info_line=self._display_line1, blank_line=self._display_line2)
self._instrument._scales.scales_info_layer.priority = consts.MODAL_DIALOG_PRIORITY
def _init_step_sequencer(self):
self._step_sequencer = StepSeqComponent(self._clip_creator, self._c_instance.playhead, self._skin, name='Step_Sequencer')
self._step_sequencer._drum_group._do_select_drum_pad = self._selector.on_select_drum_pad
self._step_sequencer._drum_group._do_quantize_pitch = self._transport._quantization.quantize_pitch
self._step_sequencer.set_enabled(False)
self._step_sequencer.layer = Layer(button_matrix=self._matrix.submatrix[:8, :4], drum_matrix=self._matrix.submatrix[:4, 4:8], loop_selector_matrix=self._matrix.submatrix[4:8, 4:8], touch_strip=self._touch_strip_control, quantization_buttons=self._side_buttons, mute_button=self._global_mute_button, solo_button=self._global_solo_button, select_button=self._select_button, delete_button=self._delete_button, shift_button=self._shift_button, drum_bank_up_button=self._octave_up_button, drum_bank_down_button=self._octave_down_button, quantize_button=self._quantize_button)
self._step_sequencer.note_settings_layer = Layer(top_display_line=self._display_line1, bottom_display_line=self._display_line2, clear_display_line1=self._display_line3, clear_display_line2=self._display_line4, encoder_controls=self._global_param_controls, encoder_touch_buttons=self._global_param_touch_buttons, full_velocity_button=self._accent_button)
self._step_sequencer.note_settings_layer.priority = consts.MODAL_DIALOG_PRIORITY
self._audio_loop = LoopSelectorComponent(follow_detail_clip=True, measure_length=1.0, name='Loop_Selector')
self._audio_loop.set_enabled(False)
self._audio_loop.layer = Layer(loop_selector_matrix=self._matrix)
def _init_note_repeat(self):
self._note_repeat = NoteRepeatComponent(self._c_instance.note_repeat, name='Note_Repeat')
self._note_repeat.set_enabled(False)
self._note_repeat.layer = Layer(toggle_button=self._repeat_button)
self._note_repeat.options_layer = Layer(select_buttons=self._side_buttons)
self._note_repeat.options_layer.priority = consts.DIALOG_PRIORITY
self._on_note_repeat_mode_changed.subject = self._note_repeat
def _init_message_box(self):
self._notification = NotificationComponent(display_lines=self._display_lines)
self._notification.set_enabled(True)
self._dialog = DialogComponent()
self._dialog.message_box_layer = Layer(display_line1=self._display_line1, display_line2=self._display_line2, display_line3=self._display_line3, display_line4=self._display_line4, top_buttons=self._select_buttons, bottom_buttons=self._track_state_buttons, scales_button=self._scale_presets_button, octave_up=self._octave_up_button, octave_down=self._octave_down_button, side_buttons=self._side_buttons, repeat_button=self._repeat_button, accent_button=self._accent_button, in_button=self._in_button, out_button=self._out_button, param_controls=self._global_param_controls, param_touch=self._global_param_touch_buttons, tempo_control_tap=self._tempo_control_tap, master_control_tap=self._master_volume_control_tap, touch_strip=self._touch_strip_control, touch_strip_tap=self._touch_strip_tap, matrix=self._matrix, nav_up_button=self._nav_up_button, nav_down_button=self._nav_down_button, nav_left_button=self._nav_left_button, nav_right_button=self._nav_right_button, shift_button=self._shift_button, select_button=self._select_button, delete_button=self._delete_button, duplicate_button=self._duplicate_button, double_button=self._double_button, quantize_button=self._quantize_button, play_button=self._play_button, new_button=self._new_button, automation_button=self._automation_button, tap_tempo_button=self._tap_tempo_button, metronome_button=self._metronome_button, fixed_length_button=self._fixed_length_button, record_button=self._record_button, undo_button=self._undo_button, tempo_control=self._tempo_control, swing_control=self._swing_control, master_volume_control=self._master_volume_control, global_param_controls=self._global_param_controls, swing_control_tap=self._swing_control_tap, master_volume_tap=self._master_volume_control_tap, global_param_tap=self._global_param_touch_buttons, volumes_button=self._vol_mix_mode_button, pan_sends_button=self._pan_send_mix_mode_button, track_button=self._single_track_mix_mode_button, clip_button=self._clip_mode_button, device_button=self._device_mode_button, browse_button=self._browse_mode_button, user_button=self._user_button, master_select_button=self._master_select_button, create_device_button=self._create_device_button, create_track_button=self._create_track_button, global_track_stop_button=self._global_track_stop_button, global_mute_button=self._global_mute_button, global_solo_button=self._global_solo_button, note_mode_button=self._note_mode_button, session_mode_button=self._session_mode_button)
self._dialog.message_box_layer.priority = consts.MESSAGE_BOX_PRIORITY
self._dialog.set_enabled(True)
def _init_global_actions(self):
self._capture_and_insert_scene = CaptureAndInsertSceneComponent(name='Capture_And_Insert_Scene')
self._capture_and_insert_scene.set_enabled(True)
self._capture_and_insert_scene.layer = Layer(action_button=self._duplicate_button)
self._duplicate_loop = DuplicateLoopComponent(name='Duplicate_Loop')
self._duplicate_loop.layer = Layer(action_button=self._double_button)
self._duplicate_loop.set_enabled(True)
self._selector = SelectComponent(name='Selector')
self._selector.layer = Layer(select_button=self._select_button)
self._selector.selection_display_layer = Layer(display_line=self._display_line3)
self._selector.selection_display_layer.priority = consts.DIALOG_PRIORITY
self._swing_amount = ValueComponent('swing_amount', self.song(), display_label='Swing Amount:', display_format='%d%%', model_transform=lambda x: clamp(x / 200.0, 0.0, 0.5), view_transform=lambda x: x * 200.0, encoder_factor=100.0, encoder=self._swing_control)
self._swing_amount.layer = Layer(button=self._swing_control_tap, encoder=self._swing_control)
self._swing_amount.display_layer = Layer(label_display=self._display_line1, value_display=self._display_line3, graphic_display=self._display_line2, clear_display1=self._display_line4)
self._swing_amount.display_layer.priority = consts.DIALOG_PRIORITY
self._tempo = ValueComponent('tempo', self.song(), display_label='Tempo:', display_format='%0.2f BPM', encoder=self._tempo_control)
self._tempo.layer = Layer(button=self._tempo_control_tap)
self._tempo.display_layer = Layer(label_display=self._display_line1, value_display=self._display_line2, clear_display1=self._display_line3, clear_display2=self._display_line4)
self._tempo.display_layer.priority = consts.DIALOG_PRIORITY
self._master_vol = ParameterValueComponent(self.song().master_track.mixer_device.volume, display_label='Master Volume:', display_seg_start=3, name='Master_Volume_Display', encoder=self._master_volume_control)
self._master_vol.layer = Layer(button=self._master_volume_control_tap)
self._master_vol.display_layer = Layer(label_display=self._display_line1, value_display=self._display_line3, graphic_display=self._display_line2, clear_display2=self._display_line4)
self._master_vol.display_layer.priority = consts.DIALOG_PRIORITY
self._master_cue_vol = ParameterValueComponent(self.song().master_track.mixer_device.cue_volume, display_label='Cue Volume:', display_seg_start=3, name='Cue_Volume_Display', encoder=ComboElement((self._shift_button,), self._master_volume_control))
self._master_cue_vol.layer = Layer(button=ComboElement((self._shift_button,), self._master_volume_control_tap))
self._master_cue_vol.display_layer = Layer(label_display=self._display_line1, value_display=self._display_line3, graphic_display=self._display_line2, clear_display2=self._display_line4)
self._master_cue_vol.display_layer.priority = consts.DIALOG_PRIORITY
self._value_components = [self._swing_amount,
self._tempo,
self._master_vol,
self._master_cue_vol]
self._delete_component = DeleteComponent(name='Deleter')
self._delete_component.layer = Layer(delete_button=self._delete_button)
self._delete_clip = DeleteSelectedClipComponent(name='Selected_Clip_Deleter')
self._delete_clip.layer = Layer(action_button=self._delete_button)
self._delete_scene = DeleteSelectedSceneComponent(name='Selected_Scene_Deleter')
self._delete_scene.layer = Layer(action_button=ComboElement((self._shift_button,), self._delete_button))
def _init_m4l_interface(self):
self._m4l_interface = M4LInterfaceComponent(self.controls)
self.get_control_names = self._m4l_interface.get_control_names
self.get_control = self._m4l_interface.get_control
self.grab_control = self._m4l_interface.grab_control
self.release_control = self._m4l_interface.release_control
def _on_selected_track_changed(self):
super(Push, self)._on_selected_track_changed()
self.set_controlled_track(self.song().view.selected_track)
self._on_devices_changed.subject = self.song().view.selected_track
self._select_note_mode()
self._main_modes.pop_groups(['add_effect'])
self._note_repeat.selected_mode = 'disabled'
def _send_midi(self, midi_event_bytes, optimized = True):
if not self._suppress_sysex or not self.is_sysex_message(midi_event_bytes):
return super(Push, self)._send_midi(midi_event_bytes, optimized)
@subject_slot('devices')
def _on_devices_changed(self):
self._select_note_mode()
@subject_slot('session_record')
def _on_session_record_changed(self):
status = self.song().session_record
playhead_color = int(self._skin['NoteEditor.PlayheadRecord'] if status else self._skin['NoteEditor.Playhead'])
feedback_color = int(self._skin['Instrument.FeedbackRecord'] if status else self._skin['Instrument.Feedback'])
self._c_instance.playhead.velocity = playhead_color
self._c_instance.set_feedback_velocity(feedback_color)
@subject_slot('selected_mode')
def _on_note_repeat_mode_changed(self, mode_name):
aftertouch_mode = 0 if mode_name == 'enabled' else 1
self._send_midi(Sysex.SET_AFTERTOUCH_MODE + (aftertouch_mode, 247))
@subject_slot('selected_mode')
def _on_accent_mode_changed(self, mode_name):
accent_is_active = mode_name == 'enabled'
self._step_sequencer.set_full_velocity(accent_is_active)
@subject_slot('value')
def _on_pad_threshold(self, setting):
self._user.set_settings_info_text('' if setting.value >= consts.CRITICAL_THRESHOLD_LIMIT else consts.MessageBoxText.STUCK_PAD_WARNING)
self._update_pad_params()
@subject_slot('value')
def _on_pad_curve(self, setting):
self._update_pad_params()
def _update_pad_params(self):
new_pad_parameters = make_pad_parameters(self._settings[SETTING_CURVE].value, self._settings[SETTING_THRESHOLD].value)
self._pad_sensitivity_update.set_profile('instrument', new_pad_parameters)
self._pad_sensitivity_update.set_profile('drums', new_pad_parameters)
@subject_slot('before_mode_sent')
def _on_before_hardware_mode_sent(self, mode):
self._suppress_sysex = False
@subject_slot('after_mode_sent')
def _on_after_hardware_mode_sent(self, mode):
if mode == Sysex.USER_MODE:
self._suppress_sysex = True
@subject_slot('mode')
def _on_hardware_mode_changed(self, mode):
if mode == Sysex.LIVE_MODE:
self.update()
elif mode == Sysex.USER_MODE:
self._suppress_sysex = True
self._update_auto_arm()
@subject_slot('selected_mode')
def _on_matrix_mode_changed(self, mode):
self._update_auto_arm(selected_mode=mode)
def _update_auto_arm(self, selected_mode = None):
self._auto_arm.set_enabled(self._user.mode == Sysex.LIVE_MODE and (selected_mode or self._matrix_modes.selected_mode == 'note'))
def _select_note_mode(self):
"""
Selects which note mode to use depending on the kind of
current selected track and its device chain...
"""
track = self.song().view.selected_track
drum_device = find_if(lambda d: d.can_have_drum_pads, track.devices)
if "drum_rack" in track.name.lower(): drum_device = True # If the track title has "drum_rack in the name.. force a sequencer config for the track"
self._step_sequencer.set_drum_group_device(drum_device)
if track == None or track.is_foldable or track in self.song().return_tracks or track == self.song().master_track:
self._note_modes.selected_mode = 'disabled'
elif track and track.has_audio_input:
self._note_modes.selected_mode = 'looper'
elif drum_device:
self._note_modes.selected_mode = 'sequencer'
else:
self._note_modes.selected_mode = 'instrument'
def _on_toggle_encoder(self, value):
pass
@subject_slot_group('value')
def _on_param_encoder_touched(self, value, encoder):
"""
When using the parameter encoders, other encoders around it are often accidentally
touched and will take over the screen. By putting all ValueComponents into timer
based displaying mode while touching a parameter, this noise is prevented.
"""
param_encoder_touched = find_if(lambda encoder: encoder.is_pressed(), self._global_param_touch_buttons) != None
new_display_mode = ValueComponent.TIMER_BASED if param_encoder_touched else ValueComponent.TOUCH_BASED
for value_component in self._value_components:
value_component.display_mode = new_display_mode
def expect_dialog(self, message):
self.schedule_message(1, partial(self._dialog.expect_dialog, message))
def show_notification(self, message):
self._notification.show_notification(message)
def handle_nonsysex(self, midi_bytes):
status, _, value = midi_bytes
if (status & MIDI_CC_STATUS or status & MIDI_NOTE_ON_STATUS) and value != 0:
self._notification.hide_notification()
super(Push, self).handle_nonsysex(midi_bytes)
|
[
"parabuzzle@gmail.com"
] |
parabuzzle@gmail.com
|
0531d5b55f094c2aaf796d394fa1daa6d38cb3fd
|
694df92026911544a83df9a1f3c2c6b321e86916
|
/Python/Dictionary/Test.py
|
d502859587f6d4e0928749399878d0e29f1905a0
|
[
"MIT"
] |
permissive
|
taku-xhift/labo
|
f485ae87f01c2f45e4ef1a2a919cda7e571e3f13
|
89dc28fdb602c7992c6f31920714225f83a11218
|
refs/heads/main
| 2021-12-10T21:19:29.152175
| 2021-08-14T21:08:51
| 2021-08-14T21:08:51
| 81,219,052
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 44
|
py
|
dic = {
"subdic" : {
"value" : 4
}
}
|
[
"shishido_takuya@xhift.com"
] |
shishido_takuya@xhift.com
|
6f9e5befc43928b8bcf75d75f1cf8eb17da8c5a0
|
f693c9c487d31a677f009afcdf922b4e7f7d1af0
|
/biomixer-venv/lib/python3.9/site-packages/docutils/languages/__init__.py
|
54e16bee96880c82e32f65c0fbd2b3bbfa6a61fa
|
[
"MIT"
] |
permissive
|
Shellowb/BioMixer
|
9048b6c07fa30b83c87402284f0cebd11a58e772
|
1939261589fe8d6584a942a99f0308e898a28c1c
|
refs/heads/master
| 2022-10-05T08:16:11.236866
| 2021-06-29T17:20:45
| 2021-06-29T17:20:45
| 164,722,008
| 1
| 3
|
MIT
| 2022-09-30T20:23:34
| 2019-01-08T19:52:12
|
Python
|
UTF-8
|
Python
| false
| false
| 2,902
|
py
|
# $Id: __init__.py 8467 2020-01-26 21:23:42Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
# Internationalization details are documented in
# <http://docutils.sf.net/docs/howto/i18n.html>.
"""
This package contains modules for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
import sys
from importlib import import_module
from docutils.utils import normalize_language_tag
class LanguageImporter(object):
"""Import language modules.
When called with a BCP 47 language tag, instances return a module
with localisations from `docutils.languages` or the PYTHONPATH.
If there is no matching module, warn (if a `reporter` is passed)
and fall back to English.
"""
packages = ('docutils.languages.', '')
warn_msg = ('Language "%s" not supported: '
'Docutils-generated text will be in English.')
fallback = 'en'
# TODO: use a dummy module returning emtpy strings?, configurable?
def __init__(self):
self.cache = {}
def import_from_packages(self, name, reporter=None):
"""Try loading module `name` from `self.packages`."""
module = None
for package in self.packages:
try:
module = import_module(package+name)
self.check_content(module)
except (ImportError, AttributeError):
if reporter and module:
reporter.info('%s is no complete Docutils language module.'
%module)
elif reporter:
reporter.info('Module "%s" not found.'%(package+name))
continue
break
return module
def check_content(self, module):
"""Check if we got a Docutils language module."""
if not (isinstance(module.labels, dict)
and isinstance(module.bibliographic_fields, dict)
and isinstance(module.author_separators, list)):
raise ImportError
def __call__(self, language_code, reporter=None):
try:
return self.cache[language_code]
except KeyError:
pass
for tag in normalize_language_tag(language_code):
tag = tag.replace('-', '_') # '-' not valid in module names
module = self.import_from_packages(tag, reporter)
if module is not None:
break
else:
if reporter:
reporter.warning(self.warn_msg % language_code)
if self.fallback:
module = self.import_from_packages(self.fallback)
if reporter and (language_code != 'en'):
reporter.info('Using %s for language "%s".'%(module, language_code))
self.cache[language_code] = module
return module
get_language = LanguageImporter()
|
[
"marcelo.becerra@ug.uchile.cl"
] |
marcelo.becerra@ug.uchile.cl
|
dd2e47e0904e7628fedcb1c767a0adf44403dbf4
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/Hdthzjmr5fRqEX93E_10.py
|
a97dfd845d7c14b80201bf511f538ba573ba5d9a
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 66
|
py
|
def get_sequence(low, high):
return list(range(low,high + 1))
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
818da250390ddfa46fdc100c049e6d5ee24570dd
|
a8459cf79465f3587708721ac4185268af39b3db
|
/Score_Analysis/rnn_cell_backward.py
|
afef0b8ff43064b4c112590d7974f329b92c87c6
|
[] |
no_license
|
mousecpn/MMYZ_score_analysis
|
a5131cc8c8bf1764aff00ea2225a3b35b3c54c35
|
0aa9e20537ae24becb200688470e8a0dee32783a
|
refs/heads/master
| 2023-08-26T11:13:39.918372
| 2021-10-11T06:56:45
| 2021-10-11T06:56:45
| 415,800,058
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 498
|
py
|
import numpy as np
def rnn_cell_backward(da_next,cache):
(a_next,a_prev,xt,parameters) = cache
Wax = parameters["Wax"]
Waa = parameters["Waa"]
ba = parameters["ba"]
dtanh = (1-a_next**2)*da_next
dxt = np.dot(Wax.T,dtanh)
dWax = np.dot(dtanh,xt.T)
da_prev = np.dot(Waa.T,dtanh)
dWaa = np.dot(dtanh,a_prev.T)
dba = np.sum(dtanh,keepdims=True,axis=-1)
gradients = {"dxt":dxt,"da_prev":da_prev,"dWax":dWax,"dWaa":dWaa,"dba":dba}
return gradients
|
[
"609731730@qq.com"
] |
609731730@qq.com
|
322fa68de103b1e30184dc10a00a1943c82407e4
|
eb9110e91fdc78acbe4e34fb2966523de65536b4
|
/carts/views.py
|
aed3f5b2b53e803a63fa6b1cc21cdfbb48b89482
|
[] |
no_license
|
NaveenMulaga/HealthyHarvest-sdp-2
|
050b7694f9345449c0a10a993560936504ff49b1
|
a785e29b979c6abda4fb2b30843134f765b53129
|
refs/heads/main
| 2023-04-24T07:15:40.781190
| 2021-05-18T12:46:52
| 2021-05-18T12:46:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,062
|
py
|
from django.shortcuts import render,redirect, get_object_or_404
from store.models import Product, Variation
from .models import Cart, CartItem
from django.core.exceptions import ObjectDoesNotExist
# Create your views here
from django.http import HttpResponse
def _cart_id(request):
cart = request.session.session_key
if not cart:
cart = request.session.create()
return cart
def add_cart(request, product_id):
product = Product.objects.get(id=product_id) #to get the product
product_variation = []
if request.method == 'POST':
for item in request.POST:
key = item
value = request.POST[key]
try:
variation = Variation.objects.get(product=product, variation_category__iexact=key, variation_value__iexact=value)
product_variation.append(variation)
except:
pass
try:
cart = Cart.objects.get(cart_id=_cart_id(request)) #get the cart using the cart_id present in session
except Cart.DoesNotExist:
cart = Cart.objects.create(
cart_id = _cart_id(request)
)
cart.save()
is_cart_item_exists = CartItem.objects.filter(product=product, cart=cart).exists()
if is_cart_item_exists:
cart_item = CartItem.objects.filter(product=product, cart=cart)
# existing_variations --->database
# current_variations ----->product_variation
# item_id---->database
ex_var_list = []
id = []
for item in cart_item:
existing_variation = item.variations.all()
ex_var_list.append(list(existing_variation))
id.append(item.id)
print(ex_var_list)
if product_variation in ex_var_list:
# increase the cart item quantity
index = ex_var_list.index(product_variation)
item_id = id[index]
item = CartItem.objects.get(product=product, id=item_id)
item.quantity +=1
item.save()
else:
item =CartItem.objects.create(product=product, quantity=1,cart=cart)
if len(product_variation) > 0:
item.variations.clear()
item.variations.add(*product_variation)
item.save()
else:
cart_item = CartItem.objects.create(
product = product ,
quantity = 1,
cart = cart,
)
if len(product_variation) > 0:
cart_item.variations.clear()
cart_item.variations.add(*product_variation)
cart_item.save()
return redirect('cart')
def remove_cart(request, product_id, cart_item_id):
cart = Cart.objects.get(cart_id=_cart_id(request))
product = get_object_or_404(Product, id=product_id)
try:
cart_item = CartItem.objects.get(product=product, cart=cart, id=cart_item_id)
if cart_item.quantity > 1:
cart_item.quantity -= 1
cart_item.save()
else:
cart_item.delete()
except:
pass
return redirect('cart')
def remove_cart_item(request, product_id, cart_item_id):
cart =Cart.objects.get(cart_id=_cart_id(request))
product = get_object_or_404(Product, id=product_id)
cart_item = CartItem.objects.get(product=product, cart=cart, id=cart_item_id)
cart_item.delete()
return redirect('cart')
def cart(request, total=0,quantity=0, cart_items=None):
try:
tax = 0
grand_total = 0
cart = Cart.objects.get(cart_id=_cart_id(request))
cart_items = CartItem.objects.filter(cart=cart, is_active=True)
for cart_item in cart_items:
total += (cart_item.product.price * cart_item.quantity)
quantity += cart_item.quantity
tax = (3 * total)/100
grand_total = total + tax
except ObjectDoesNotExist:
pass #just ignore
context = {
'total': total,
'quantity': quantity,
'cart_items':cart_items,
'tax': tax,
'grand_total': grand_total
}
return render(request, 'store/cart.html', context)
|
[
"mnaveeb1105@gmail.com"
] |
mnaveeb1105@gmail.com
|
7cbdc883ce8ca7ea66a6d0e9657f3bd72a968692
|
ac6e4102dfb49a4e49de0e2766feb6e80ab0b5c2
|
/test/test_iam_project_sa_credential_patch.py
|
87ad3f69653ff0ad4b56860156c9eb0f861a0e95
|
[
"MIT"
] |
permissive
|
hyperonecom/h1-client-python
|
df01f05ad295121e3dd391a3274c41e2f5b88e53
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
refs/heads/master
| 2023-04-05T01:51:31.637002
| 2021-03-29T00:05:41
| 2021-03-29T00:05:41
| 319,309,525
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 757
|
py
|
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import h1
from h1.model.iam_project_sa_credential_patch import IamProjectSaCredentialPatch
class TestIamProjectSaCredentialPatch(unittest.TestCase):
"""IamProjectSaCredentialPatch unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIamProjectSaCredentialPatch(self):
"""Test IamProjectSaCredentialPatch"""
# FIXME: construct object with mandatory attributes with example values
# model = IamProjectSaCredentialPatch() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"action@github.com"
] |
action@github.com
|
c95fc8ac09ec08d225a4f6bf53d3d08e2d14b97f
|
a8a4e919584a5c9448c3b8314003d61604c89cc6
|
/src/pages/migrations/0003_remove_movie_info.py
|
61ff51f859e2a037956719c387c5aaecca2f5619
|
[
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-other-copyleft",
"GPL-1.0-or-later",
"bzip2-1.0.6",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-newlib-historical",
"OpenSSL",
"Python-2.0",
"TCL",
"LicenseRef-scancode-python-cwi",
"MIT"
] |
permissive
|
brandongallagher1999/CrypTorrents
|
ed898ca8687bf6837f22237306e9d5bb132e66ad
|
b0f354d0a4bd4dc5f7c0290ddacb67359f2dbcc5
|
refs/heads/master
| 2023-05-29T17:12:06.316885
| 2022-12-08T20:29:13
| 2022-12-08T20:29:13
| 196,754,204
| 1
| 1
|
MIT
| 2023-05-22T22:29:07
| 2019-07-13T18:33:04
|
Python
|
UTF-8
|
Python
| false
| false
| 323
|
py
|
# Generated by Django 2.2.2 on 2019-07-13 17:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('pages', '0002_auto_20190713_1317'),
]
operations = [
migrations.RemoveField(
model_name='movie',
name='info',
),
]
|
[
"brandonegallagher@gmail.com"
] |
brandonegallagher@gmail.com
|
1c062643ff13824d8a11e31aa01542b690aa9df9
|
dd139c577d0acb4accc690e880a359bd4f463f71
|
/PyFunceble/__init__.py
|
f585494b6dfb32497c42d3b37a321ca928b532a6
|
[
"MIT"
] |
permissive
|
ActorExpose/PyFunceble
|
8a9edbe337f7afb11eb7514a2e023d288e9fc55c
|
e3cc494319e08df3ddfb11b5a4a0e6cb2ad3727f
|
refs/heads/master
| 2020-09-15T08:47:08.481147
| 2019-06-24T20:00:59
| 2019-06-24T20:00:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 54,143
|
py
|
# pylint:disable=line-too-long, too-many-lines, invalid-name, cyclic-import
"""
The tool to check the availability or syntax of domains, IPv4 or URL.
::
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
This submodule is the main entry of PyFunceble.
Author:
Nissar Chababy, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/special-thanks.html
Contributors:
https://pyfunceble.github.io/contributors.html
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/master/
Project homepage:
https://pyfunceble.github.io/
License:
::
MIT License
Copyright (c) 2017, 2018, 2019 Nissar Chababy
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# pylint: enable=line-too-long
import argparse
import socket
import warnings
from collections import OrderedDict
from inspect import getsourcefile
from os import environ, getcwd, mkdir, path, rename
from os import sep as directory_separator
from os import walk
from platform import system
from shutil import copy, rmtree
from time import mktime, sleep, strftime, strptime, time
import requests
from colorama import Back, Fore, Style
from colorama import init as initiate_colorama
from dotenv import load_dotenv
from PyFunceble.api_core import APICore
from PyFunceble.check import Check
from PyFunceble.clean import Clean
from PyFunceble.cli_core import CLICore
from PyFunceble.config import Load, Merge, Version
from PyFunceble.directory_structure import DirectoryStructure
from PyFunceble.dispatcher import Dispatcher
from PyFunceble.dns_lookup import DNSLookup
from PyFunceble.iana import IANA
from PyFunceble.preset import Preset
from PyFunceble.production import Production
from PyFunceble.publicsuffix import PublicSuffix
from PyFunceble.whois_lookup import WhoisLookup
# We set our project name.
NAME = "PyFunceble"
# We set out project version.
VERSION = "2.2.0. (Green Galago: Skitterbug)"
# We set the list of windows "platforms"
WINDOWS_PLATFORMS = ["windows", "cygwin", "cygwin_nt-10.0"]
if "PYFUNCEBLE_CONFIG_DIR" in environ: # pragma: no cover
# We handle the case that the `PYFUNCEBLE_CONFIG_DIR` environnement variable is set.
CONFIG_DIRECTORY = environ["PYFUNCEBLE_CONFIG_DIR"]
elif "PYFUNCEBLE_OUTPUT_DIR" in environ: # pragma: no cover
# We hande the retro compatibility.
CONFIG_DIRECTORY = environ["PYFUNCEBLE_OUTPUT_DIR"]
elif Version(True).is_cloned(): # pragma: no cover
# We handle the case that we are in a cloned.
CONFIG_DIRECTORY = getcwd() + directory_separator
elif "TRAVIS_BUILD_DIR" in environ: # pragma: no cover
# We handle the case that we are under Travis CI.
CONFIG_DIRECTORY = getcwd() + directory_separator
else: # pragma: no cover
# We handle all other case and distributions specific cases.
if system().lower() == "linux" or system().lower() == "darwin":
# We are under a Linux distribution.
# We set the default configuration location path.
config_dir_path = (
path.expanduser("~" + directory_separator + ".config") + directory_separator
)
if path.isdir(config_dir_path):
# Everything went right:
# * `~/.config` exists.
# We set our configuration location path as the directory we are working with.
CONFIG_DIRECTORY = config_dir_path
elif path.isdir(path.expanduser("~")):
# Something went wrong:
# * `~/.config` does not exists.
# * `~` exists.
# We set `~/` as the directory we are working with.
#
# Note: The `.` at the end is because we want to hide the directory we are
# going to create.
CONFIG_DIRECTORY = (
path.expanduser("~") + directory_separator + "."
) # pylint: disable=line-too-long
else:
# Everything went wrong:
# * `~/.config` does not exists.
# * `~` soes not exists.
# We set the current directory as the directory we are working with.
CONFIG_DIRECTORY = getcwd() + directory_separator
elif system().lower() in WINDOWS_PLATFORMS:
# We are under Windows or CygWin.
if "APPDATA" in environ:
# Everything went right:
# * `APPDATA` is into the environnement variables.
# We set it as the directory we are working with.
CONFIG_DIRECTORY = environ["APPDATA"]
else:
# Everything went wrong:
# * `APPDATA` is not into the environnement variables.
# We set the current directory as the directory we are working with.
CONFIG_DIRECTORY = getcwd() + directory_separator
if not CONFIG_DIRECTORY.endswith(directory_separator):
# If the directory we are working with does not ends with the directory
# separator, we append it to the end.
CONFIG_DIRECTORY += directory_separator
# We append the name of the project to the directory we are working with.
CONFIG_DIRECTORY += NAME + directory_separator
if not path.isdir(CONFIG_DIRECTORY):
# If the directory does not exist we create it.
mkdir(CONFIG_DIRECTORY)
if not CONFIG_DIRECTORY.endswith(directory_separator): # pragma: no cover
# Again for safety, if the directory we are working with does not ends with
# the directory separator, we append it to the end.
CONFIG_DIRECTORY += directory_separator
# We set the location of the `output` directory which should always be in the current
# directory.
OUTPUT_DIRECTORY = getcwd() + directory_separator
# We set the filename of the default configuration file.
DEFAULT_CONFIGURATION_FILENAME = ".PyFunceble_production.yaml"
# We set the filename of the configuration file we are actually using.
CONFIGURATION_FILENAME = ".PyFunceble.yaml"
# We set the filename of our env file.
ENV_FILENAME = ".pyfunceble-env"
# We set the current time (return the current time) in a specific format.
CURRENT_TIME = strftime("%a %d %b %H:%m:%S %Z %Y")
# We initiate the location where we are going to save our whole configuration content.
CONFIGURATION = {}
# We initiate the location where we are going to get all statuses.
STATUS = {}
# We initiate the location where we are going to get all outputs.
OUTPUTS = {}
# We initiate the location where we are going to get the map of the classification
# of each status codes for the analytic part.
HTTP_CODE = {}
# We initiate the location where we are going to get all links.
LINKS = {}
# We initiate a location which will have all internal data.
INTERN = {
"counter": {
"number": {"down": 0, "invalid": 0, "tested": 0, "up": 0},
"percentage": {"down": 0, "invalid": 0, "up": 0},
}
}
load_dotenv()
load_dotenv(CONFIG_DIRECTORY + ".env")
load_dotenv(CONFIG_DIRECTORY + ENV_FILENAME)
# We initiate the CLI logo of PyFunceble.
ASCII_PYFUNCEBLE = """
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
"""
def test(subject, complete=False, config=None): # pragma: no cover
"""
Test the availability of the given subject (domain or IP).
:param subject: The subject (IP or domain) to test.
:type subject: str|list
:param bool complete:
Activate the return of a dict with some significant data from
the test.
:param dict config:
A dict with the configuration index (from .PyFunceble.yaml) to update.
:return: The status or the informations of the domain.
:rtype: str|dict
.. note::
If :code:`config` is given, the given :code:`dict` overwrite
the last value of the given indexes in the configuration.
It's actually something like following:
::
pyfunceble.configuration.update(config_given_by_user)
.. note::
If :code:`complete` is set to :code:`True`, we return the following indexes.
::
{
"_status_source": None,
"_status": None,
"domain_syntax_validation": None,
"expiration_date": None,
"http_status_code": None,
"ip4_syntax_validation": None,
"dns_lookup": [],
"status_source": None,
"status": None,
"tested": None,
"url_syntax_validation": None,
"whois_record": None,
"whois_server": None,
}
"""
if subject:
# The subject is not empty nor None.
# We return the status of the given subject.
return APICore(subject, complete=complete, configuration=config).domain_and_ip()
# We return None, there is nothing to test.
return None
def url_test(subject, complete=False, config=None): # pragma: no covere
"""
Test the availability of the given subject (URL).
:param subject: The subject (URL) to test.
:type subject: str|list
:param bool complete:
Activate the return of a dict with some significant data from
the test.
:param dict config:
A dict with the configuration index (from .PyFunceble.yaml) to update.
:return: The status or the informations of the URL.
:rtype: str|dict
.. note::
If :code:`config` is given, the given :code:`dict` overwrite
the last value of the given indexes in the configuration.
It's actually something like following:
::
pyfunceble.configuration.update(config_given_by_user)
"""
if subject:
# The given URL is not empty nor None.
# We retunr the status of the the url.
return APICore(subject, complete=complete, configuration=config).url()
# We return None, there is nothing to test.
return None
def dns_lookup(subject, dns_server=None, complete=False): # pragma: no cover
"""
Make a DNS lookup of the given subject.
:param str subject: The subject we are working with.
:param dns_server: A (or list of) DNS server to use while resolving.
:type dns_server: str|int
:param bool complete:
Tell us to look for everything instead of :code:`NS` only.
:return:
A dict with following index if the given subject is not registered into the
given DNS server. (More likely local subjects).
::
{
"hostname": "",
"aliases": [],
"ips": []
}
A dict with following index for everything else (and if found).
::
{
"A": [],
"AAAA": [],
"CNAME": [],
"MX": [],
"NS": [],
"TXT": [],
"PTR": []
}
:rtype: dict
"""
if subject:
# The subject is not empty nor None.
# We return the lookup.
return DNSLookup(subject, dns_server=dns_server, complete=complete).request()
# We return None, there is nothing to work with.
return None
def whois(subject, server=None, timeout=3): # pragma: no cover
"""
Request the WHOIS record of the given subject.
:param str subject: The subject we are working with.
:param str server:
The WHOIS server to communicate with.
.. note::
If :code:`None` is given, we look for the best one.
:param int timeout: The timeout to apply to the request.
:return: None or the WHOIS record.
:rtype: None|str
"""
if subject:
# The subject is not empty nor None.
# We return the whois record.
return WhoisLookup(subject, server=server, timeout=timeout).request()
# We return None, there is nothing to work with.
return None
def syntax_check(domain): # pragma: no cover
"""
Check the syntax of the given domain.
:param domain: The domain to check the syntax from.
:type domain: str|list
:return: The syntax validity.
:rtype: bool|dict
.. warning::
This method will be deprecated one day in the future.
Please report to :func:`~PyFunceble.is_domain`.
"""
warnings.warn(
"`PyFunceble.syntax_check` will be deprecated in future version. "
"Please use `PyFunceble.is_domain` instead.",
DeprecationWarning,
)
return is_domain(domain)
def is_domain(subject): # pragma: no cover
"""
Check if the given subject is a syntactically valid domain.
:param subject: The subject to check the syntax from.
:type subject: str|list
:return: The syntax validity.
:rtype: bool|dict
"""
if subject:
# The given subject is not empty nor None.
# We return the validiry of the given subject.
return APICore(subject).domain_syntax()
# We return None, there is nothing to check.
return None
def is_subdomain(subject): # pragma: no cover
"""
Check if the given subject is a syntactically valid subdomain.
:param subject: The subject to check the syntax from.
:type subject: str|list
:return: The syntax validity.
:rtype: bool|dict
"""
if subject:
# The given subject is not empty nor None.
# We retun the validity of the given subject.
return APICore(subject).subdomain_syntax()
# We return None, there is nothing to check.
return None
def ipv4_syntax_check(ip): # pragma: no cover
"""
Check the syntax of the given IPv4.
:param ip: The IPv4 to check the syntax for.
:type ip: str|list
:return: The syntax validity.
:rtype: bool|dict
.. warning::
This method will be deprecated one day in the future.
Please report to :func:`~PyFunceble.is_ipv4`.
"""
warnings.warn(
"`PyFunceble.ipv4_syntax_check` will be deprecated in future version. "
"Please use `PyFunceble.is_ipv4` instead.",
DeprecationWarning,
)
return is_ipv4(ip)
def is_ipv4(subject): # pragma: no cover
"""
Check if the given subject is a syntactically valid IPv4.
:param subject: The subject to check the syntax from.
:type subject: str|list
:return: The syntax validity.
:rtype: bool|dict
"""
if subject:
# The given subject is not empty nor None.
# We return the validity of the given subject.
return APICore(subject).ipv4_syntax()
# We return None, there is nothing to check.
return None
def is_ipv4_range(subject): # pragma: no cover
"""
Check if the given subject is a syntactically valid IPv4 range.
:param subject: The subject to check the syntax from.
:type subject: str|list
:return: The IPv4 range state.
:rtype: bool|dict
"""
if subject:
# The given subject is not empty nor None.
# We return the validity of the given subject.
return APICore(subject).ipv4_range_syntax()
# We return None, there is nothing to check.
return None
def url_syntax_check(url): # pragma: no cover
"""
Check the syntax of the given URL.
:param url: The URL to check the syntax for.
:type url: str|list
:return: The syntax validity.
:rtype: bool|dict
.. warning::
This method will be deprecated one day in the future.
Please report to :func:`~PyFunceble.is_url`.
"""
warnings.warn(
"`PyFunceble.url_syntax_check` will be deprecated in future version. "
"Please use `PyFunceble.is_url` instead.",
DeprecationWarning,
)
return is_url(url)
def is_url(subject): # pragma: no cover
"""
Check if the given subject is a syntactically valid URL.
:param subject: The subject to check the syntax from.
:type subject: str|list
:return: The syntax validity.
:rtype: bool|dict
"""
if subject:
# The given subject is not empty nor None.
# We return the validity of the given subject.
return APICore(subject).url_syntax()
# We return None, there is nothing to check.
return None
def load_config(generate_directory_structure=False, custom=None): # pragma: no cover
"""
Load the configuration.
:param bool generate_directory_structure:
Tell us if we generate the directory structure
along with loading the configuration file.
:param dict custom:
A dict with the configuration index (from .PyFunceble.yaml) to update.
.. note::
If :code:`config` is given, the given :code:`dict` overwrite
the last value of the given indexes in the configuration.
It's actually something like following:
::
pyfunceble.configuration.update(config_given_by_user)
"""
if "config_loaded" not in INTERN:
# The configuration was not already loaded.
# We load and download the different configuration file if they are non
# existant.
Load(CONFIG_DIRECTORY)
if generate_directory_structure:
# If we are not under test which means that we want to save informations,
# we initiate the directory structure.
DirectoryStructure()
# We save that the configuration was loaded.
INTERN.update({"config_loaded": True})
if custom and isinstance(custom, dict):
# The given configuration is not None or empty.
# and
# It is a dict.
# We update the configuration index.
CONFIGURATION.update(custom)
# We save the fact the the custom was loaded.
INTERN.update({"custom_loaded": True, "custom_config_loaded": custom})
def _command_line(): # pragma: no cover pylint: disable=too-many-branches,too-many-statements
"""
Provide the command line interface.
"""
if __name__ == "PyFunceble":
# We initiate the end of the coloration at the end of each line.
initiate_colorama(autoreset=True)
try:
# The following handle the command line argument.
try:
# We load the configuration.
load_config(generate_directory_structure=False)
PARSER = argparse.ArgumentParser(
description="The tool to check the availability or syntax of domains, IPv4 or URL.", # pylint: disable=line-too-long
epilog="Crafted with %s by %s"
% (
Fore.RED + "♥" + Fore.RESET,
Style.BRIGHT
+ Fore.CYAN
+ "Nissar Chababy (Funilrys) "
+ Style.RESET_ALL
+ "with the help of "
+ Style.BRIGHT
+ Fore.GREEN
+ "https://pyfunceble.github.io/contributors.html "
+ Style.RESET_ALL
+ "&& "
+ Style.BRIGHT
+ Fore.GREEN
+ "https://pyfunceble.github.io/special-thanks.html",
),
add_help=False,
)
CURRENT_VALUE_FORMAT = (
Fore.YELLOW + Style.BRIGHT + "Configured value: " + Fore.BLUE
)
PARSER.add_argument(
"-ad",
"--adblock",
action="store_true",
help="Switch the decoding of the adblock format. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["adblock"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-a",
"--all",
action="store_false",
help="Output all available information on the screen. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["less"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"" "-c",
"--auto-continue",
"--continue",
action="store_true",
help="Switch the value of the auto continue mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["auto_continue"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--autosave-minutes",
type=int,
help="Update the minimum of minutes before we start "
"committing to upstream under Travis CI. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["travis_autosave_minutes"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--clean",
action="store_true",
help="Clean all files under the output directory.",
)
PARSER.add_argument(
"--clean-all",
action="store_true",
help="Clean all files under the output directory "
"along with all file generated by PyFunceble.",
)
PARSER.add_argument(
"--cmd",
type=str,
help="Pass a command to run before each commit "
"(except the final one) under the Travis mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["command_before_end"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--cmd-before-end",
type=str,
help="Pass a command to run before the results "
"(final) commit under the Travis mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["command_before_end"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--commit-autosave-message",
type=str,
help="Replace the default autosave commit message. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["travis_autosave_commit"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--commit-results-message",
type=str,
help="Replace the default results (final) commit message. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["travis_autosave_final_commit"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--complements",
action="store_true",
help="Switch the value of the generation and test of the complements. "
"A complement is for example `example.org` if `www.example.org` "
"is given and vice-versa. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["generate_complements"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-d", "--domain", type=str, help="Set and test the given domain."
)
PARSER.add_argument(
"-db",
"--database",
action="store_true",
help="Switch the value of the usage of a database to store "
"inactive domains of the currently tested list. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["inactive_database"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--database-type",
type=str,
help="Tell us the type of database to use. "
"You can choose between the following: `json|mariadb|mysql|sqlite` %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["db_type"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-dbr",
"--days-between-db-retest",
type=int,
help="Set the numbers of days between each retest of domains present "
"into inactive-db.json. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["days_between_db_retest"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--debug", action="store_true", help=argparse.SUPPRESS
)
PARSER.add_argument(
"--directory-structure",
action="store_true",
help="Generate the directory and files that are needed and which does "
"not exist in the current directory.",
)
PARSER.add_argument(
"--dns",
nargs="+",
help="Set the DNS server(s) we have to work with. "
"Multiple space separated DNS server can be given. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(", ".join(CONFIGURATION["dns_server"]))
if CONFIGURATION["dns_server"]
else CURRENT_VALUE_FORMAT + "Follow OS DNS" + Style.RESET_ALL
),
)
PARSER.add_argument(
"-ex",
"--execution",
action="store_true",
help="Switch the default value of the execution time showing. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["show_execution_time"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-f",
"--file",
type=str,
help="Read the given file and test all domains inside it. "
"If a URL is given we download and test the content of the given URL.", # pylint: disable=line-too-long
)
PARSER.add_argument(
"--filter", type=str, help="Domain to filter (regex)."
)
PARSER.add_argument(
"--help",
action="help",
default=argparse.SUPPRESS,
help="Show this help message and exit.",
)
PARSER.add_argument(
"--hierarchical",
action="store_true",
help="Switch the value of the hierarchical sorting of the tested file. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["hierarchical_sorting"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-h",
"--host",
action="store_true",
help="Switch the value of the generation of hosts file. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["generate_hosts"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--http",
action="store_true",
help="Switch the value of the usage of HTTP code. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(HTTP_CODE["active"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--iana",
action="store_true",
help="Update/Generate `iana-domains-db.json`.",
)
PARSER.add_argument(
"--idna",
action="store_true",
help="Switch the value of the IDNA conversion. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["idna_conversion"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-ip",
type=str,
help="Change the IP to print in the hosts files with the given one. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["custom_ip"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--json",
action="store_true",
help="Switch the value of the generation "
"of the JSON formatted list of domains. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["generate_json"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--less",
action="store_true",
help="Output less informations on screen. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(Preset().switch("less"))
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--local",
action="store_true",
help="Switch the value of the local network testing. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(Preset().switch("local"))
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--link", type=str, help="Download and test the given file."
)
PARSER.add_argument(
"--mining",
action="store_true",
help="Switch the value of the mining subsystem usage. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["mining"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-m",
"--multiprocess",
action="store_true",
help="Switch the value of the usage of multiple process. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["multiprocess"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-n",
"--no-files",
action="store_true",
help="Switch the value of the production of output files. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["no_files"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-nl",
"--no-logs",
action="store_true",
help="Switch the value of the production of logs files "
"in the case we encounter some errors. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(not CONFIGURATION["logs"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-ns",
"--no-special",
action="store_true",
help="Switch the value of the usage of the SPECIAL rules. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["no_special"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-nu",
"--no-unified",
action="store_true",
help="Switch the value of the production unified logs "
"under the output directory. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["unified"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-nw",
"--no-whois",
action="store_true",
help="Switch the value the usage of whois to test domain's status. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["no_whois"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--percentage",
action="store_true",
help="Switch the value of the percentage output mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["show_percentage"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--plain",
action="store_true",
help="Switch the value of the generation "
"of the plain list of domains. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["plain_list_domain"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-p",
"--processes",
type=int,
help="Set the number of simultaneous processes to use while "
"using multiple processes. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["maximal_processes"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--production", action="store_true", help=argparse.SUPPRESS
)
PARSER.add_argument(
"-psl",
"--public-suffix",
action="store_true",
help="Update/Generate `public-suffix.json`.",
)
PARSER.add_argument(
"-q",
"--quiet",
action="store_true",
help="Run the script in quiet mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["quiet"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--share-logs",
action="store_true",
help="Switch the value of the sharing of logs. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["share_logs"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-s",
"--simple",
action="store_true",
help="Switch the value of the simple output mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["simple"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--split",
action="store_true",
help="Switch the value of the split of the generated output files. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["inactive_database"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--syntax",
action="store_true",
help="Switch the value of the syntax test mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["syntax"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-t",
"--timeout",
type=int,
default=3,
help="Switch the value of the timeout. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["seconds_before_http_timeout"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--travis",
action="store_true",
help="Switch the value of the Travis mode. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["travis"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"--travis-branch",
type=str,
default="master",
help="Switch the branch name where we are going to push. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["travis_branch"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-u", "--url", type=str, help="Set and test the given URL."
)
PARSER.add_argument(
"-uf",
"--url-file",
type=str,
help="Read and test the list of URL of the given file. "
"If a URL is given we download and test the list (of URL) of the given URL content.", # pylint: disable=line-too-long
)
PARSER.add_argument(
"-ua",
"--user-agent",
type=str,
help="Set the user-agent to use and set every time we "
"interact with everything which is not our logs sharing system.", # pylint: disable=line-too-long
)
PARSER.add_argument(
"-v",
"--version",
help="Show the version of PyFunceble and exit.",
action="version",
version="%(prog)s " + VERSION,
)
PARSER.add_argument(
"-vsc",
"--verify-ssl-certificate",
action="store_true",
help="Switch the value of the verification of the "
"SSL/TLS certificate when testing for URL. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["verify_ssl_certificate"])
+ Style.RESET_ALL
),
)
PARSER.add_argument(
"-wdb",
"--whois-database",
action="store_true",
help="Switch the value of the usage of a database to store "
"whois data in order to avoid whois servers rate limit. %s"
% (
CURRENT_VALUE_FORMAT
+ repr(CONFIGURATION["whois_database"])
+ Style.RESET_ALL
),
)
ARGS = PARSER.parse_args()
if ARGS.less:
CONFIGURATION.update({"less": ARGS.less})
elif not ARGS.all:
CONFIGURATION.update({"less": ARGS.all})
if ARGS.adblock:
CONFIGURATION.update({"adblock": Preset().switch("adblock")})
if ARGS.auto_continue:
CONFIGURATION.update(
{"auto_continue": Preset().switch("auto_continue")}
)
if ARGS.autosave_minutes:
CONFIGURATION.update(
{"travis_autosave_minutes": ARGS.autosave_minutes}
)
if ARGS.cmd:
CONFIGURATION.update({"command": ARGS.cmd})
if ARGS.cmd_before_end:
CONFIGURATION.update({"command_before_end": ARGS.cmd_before_end})
if ARGS.commit_autosave_message:
CONFIGURATION.update(
{"travis_autosave_commit": ARGS.commit_autosave_message}
)
if ARGS.commit_results_message:
CONFIGURATION.update(
{"travis_autosave_final_commit": ARGS.commit_results_message}
)
if ARGS.complements:
CONFIGURATION.update(
{
"generate_complements": Preset().switch(
"generate_complements"
)
}
)
if ARGS.database:
CONFIGURATION.update(
{"inactive_database": Preset().switch("inactive_database")}
)
if ARGS.database_type:
if ARGS.database_type.lower() in [
"json",
"sqlite",
"mariadb",
"mysql",
]:
CONFIGURATION.update({"db_type": ARGS.database_type.lower()})
else:
print(
Style.BRIGHT
+ Fore.RED
+ "Unknown database type: {0}".format(
repr(ARGS.database_type)
)
)
exit(1)
if ARGS.days_between_db_retest:
CONFIGURATION.update(
{"days_between_db_retest": ARGS.days_between_db_retest}
)
if ARGS.debug:
CONFIGURATION.update({"debug": Preset().switch("debug")})
if ARGS.dns:
CONFIGURATION.update({"dns_server": ARGS.dns})
if ARGS.execution:
CONFIGURATION.update(
{"show_execution_time": Preset().switch("show_execution_time")}
)
if ARGS.filter:
CONFIGURATION.update({"filter": ARGS.filter})
if ARGS.hierarchical:
CONFIGURATION.update(
{
"hierarchical_sorting": Preset().switch(
"hierarchical_sorting"
)
}
)
if ARGS.host:
CONFIGURATION.update(
{"generate_hosts": Preset().switch("generate_hosts")}
)
if ARGS.http:
HTTP_CODE.update(
{"active": Preset().switch(HTTP_CODE["active"], True)}
)
if ARGS.idna:
CONFIGURATION.update(
{"idna_conversion": Preset().switch("idna_conversion")}
)
if ARGS.ip:
CONFIGURATION.update({"custom_ip": ARGS.ip})
if ARGS.json:
CONFIGURATION.update(
{"generate_json": Preset().switch("generate_json")}
)
if ARGS.local:
CONFIGURATION.update({"local": Preset().switch("local")})
if ARGS.mining:
CONFIGURATION.update({"mining": Preset().switch("mining")})
if ARGS.multiprocess:
CONFIGURATION.update(
{"multiprocess": Preset().switch("multiprocess")}
)
if ARGS.no_files:
CONFIGURATION.update({"no_files": Preset().switch("no_files")})
if ARGS.no_logs:
CONFIGURATION.update({"logs": Preset().switch("logs")})
if ARGS.no_special:
CONFIGURATION.update({"no_special": Preset().switch("no_special")})
if ARGS.no_unified:
CONFIGURATION.update({"unified": Preset().switch("unified")})
if ARGS.no_whois:
CONFIGURATION.update({"no_whois": Preset().switch("no_whois")})
if ARGS.percentage:
CONFIGURATION.update(
{"show_percentage": Preset().switch("show_percentage")}
)
if ARGS.plain:
CONFIGURATION.update(
{"plain_list_domain": Preset().switch("plain_list_domain")}
)
if ARGS.processes and ARGS.processes >= 2:
CONFIGURATION.update({"maximal_processes": ARGS.processes})
if ARGS.quiet:
CONFIGURATION.update({"quiet": Preset().switch("quiet")})
if ARGS.share_logs:
CONFIGURATION.update({"share_logs": Preset().switch("share_logs")})
if ARGS.simple:
CONFIGURATION.update(
{"simple": Preset().switch("simple"), "quiet": True}
)
if ARGS.split:
CONFIGURATION.update({"split": Preset().switch("split")})
if ARGS.syntax:
CONFIGURATION.update({"syntax": Preset().switch("syntax")})
if ARGS.timeout and ARGS.timeout % 3 == 0:
CONFIGURATION.update({"seconds_before_http_timeout": ARGS.timeout})
if ARGS.travis:
CONFIGURATION.update({"travis": Preset().switch("travis")})
if ARGS.travis_branch:
CONFIGURATION.update({"travis_branch": ARGS.travis_branch})
if ARGS.user_agent:
CONFIGURATION.update({"user_agent": ARGS.user_agent})
if ARGS.verify_ssl_certificate:
CONFIGURATION.update(
{"verify_ssl_certificate": ARGS.verify_ssl_certificate}
)
if ARGS.whois_database:
CONFIGURATION.update(
{"whois_database": Preset().switch("whois_database")}
)
if not CONFIGURATION["quiet"]:
CLICore.colorify_logo(home=True)
if ARGS.clean:
Clean(None)
if ARGS.clean_all:
Clean(None, ARGS.clean_all)
if ARGS.directory_structure:
DirectoryStructure()
if ARGS.iana:
IANA().update()
if ARGS.production:
Production()
if ARGS.public_suffix:
PublicSuffix().update()
# We compare the versions (upstream and local) and in between.
Version().compare()
# We call our Core which will handle all case depending of the configuration or
# the used command line arguments.
Dispatcher(
domain_or_ip=ARGS.domain,
file_path=ARGS.file,
url_to_test=ARGS.url,
url_file_path=ARGS.url_file,
link_to_test=ARGS.link,
)
except KeyError as e:
if not Version(True).is_cloned():
# We are not into the cloned version.
# We merge the local with the upstream configuration.
Merge(CONFIG_DIRECTORY)
else:
# We are in the cloned version.
# We raise the exception.
#
# Note: The purpose of this is to avoid having
# to search for a mistake while developing.
raise e
except KeyboardInterrupt:
CLICore.stay_safe()
|
[
"contact@funilrys.com"
] |
contact@funilrys.com
|
c670140169e945574a3535bbeef8af42ca7b75f3
|
523a3857a2f31f442d6550d00ef8d534cfa37457
|
/liteeth/phy/s6rgmii.py
|
5f4c70a9e40cfc532f306ecf09f36369a140abe2
|
[
"BSD-2-Clause"
] |
permissive
|
zsipos/liteeth
|
ed7d31b16028b76a52162adb87cbbf4e3ab1c89b
|
fb478537e7d45512567b9b35b5a69c536cb588b2
|
refs/heads/master
| 2021-06-18T11:02:11.104258
| 2020-03-25T11:40:02
| 2020-03-25T11:40:02
| 215,765,402
| 0
| 0
|
NOASSERTION
| 2019-10-17T10:25:59
| 2019-10-17T10:25:59
| null |
UTF-8
|
Python
| false
| false
| 8,880
|
py
|
# This file is Copyright (c) 2019-2020 Florent Kermarrec <florent@enjoy-digital.fr>
# License: BSD
# RGMII PHY for Spartan6 Xilinx FPGA
from migen import *
from migen.genlib.resetsync import AsyncResetSynchronizer
from liteeth.common import *
from liteeth.phy.common import *
class LiteEthPHYRGMIITX(Module):
def __init__(self, pads):
self.sink = sink = stream.Endpoint(eth_phy_description(8))
# # #
tx_ctl_obuf = Signal()
tx_data_obuf = Signal(4)
self.specials += [
Instance("ODDR2",
p_DDR_ALIGNMENT = "C0",
p_SRTYPE = "ASYNC",
o_Q = tx_ctl_obuf,
i_C0 = ClockSignal("eth_tx"),
i_C1 = ~ClockSignal("eth_tx"),
i_CE = 1,
i_D0 = sink.valid,
i_D1 = sink.valid,
i_R = ResetSignal("eth_tx"),
i_S = 0
),
Instance("IODELAY2",
p_IDELAY_TYPE = "FIXED",
p_ODELAY_VALUE = 0,
p_DELAY_SRC = "ODATAIN",
o_DOUT = pads.tx_ctl,
i_CAL = 0,
i_CE = 0,
i_CLK = 0,
i_IDATAIN = 0,
i_INC = 0,
i_IOCLK0 = 0,
i_IOCLK1 = 0 ,
i_ODATAIN = tx_ctl_obuf,
i_RST = 0,
i_T = 0,
)
]
for i in range(4):
self.specials += [
Instance("ODDR2",
p_DDR_ALIGNMENT = "C0",
p_SRTYPE = "ASYNC",
o_Q = tx_data_obuf[i],
i_C0 = ClockSignal("eth_tx"),
i_C1 = ~ClockSignal("eth_tx"),
i_CE = 1,
i_D0 = sink.data[i],
i_D1 = sink.data[4+i],
i_R = ResetSignal("eth_tx"),
i_S = 0,
),
Instance("IODELAY2",
p_IDELAY_TYPE = "FIXED",
p_ODELAY_VALUE = 0,
p_DELAY_SRC = "ODATAIN",
o_DOUT = pads.tx_data[i],
i_CAL = 0,
i_CE = 0,
i_CLK = 0,
i_IDATAIN = 0,
i_INC = 0,
i_IOCLK0 = 0,
i_IOCLK1 = 0,
i_ODATAIN = tx_data_obuf[i],
i_RST = 0,
i_T = 0,
)
]
self.comb += sink.ready.eq(1)
class LiteEthPHYRGMIIRX(Module):
def __init__(self, pads, rx_delay=2e-9):
self.source = source = stream.Endpoint(eth_phy_description(8))
# # #
rx_delay_taps = int(rx_delay/50e-12) # 50ps per tap
assert rx_delay_taps < 256
rx_ctl_ibuf = Signal()
rx_ctl_idelay = Signal()
rx_ctl = Signal()
rx_ctl_reg = Signal()
rx_data_ibuf = Signal(4)
rx_data_idelay = Signal(4)
rx_data = Signal(8)
rx_data_reg = Signal(8)
self.specials += [
Instance("IBUF",
i_I = pads.rx_ctl,
o_O = rx_ctl_ibuf,
),
Instance("IODELAY2",
p_IDELAY_TYPE = "FIXED",
p_ODELAY_VALUE = rx_delay_taps,
p_DELAY_SRC = "IDATAIN",
o_DATAOUT = rx_ctl_idelay,
i_CAL = 0,
i_CE = 0,
i_CLK = 0,
i_IDATAIN = rx_ctl_ibuf,
i_INC = 0,
i_IOCLK0 = 0,
i_IOCLK1 = 0,
i_ODATAIN = 0,
i_RST = 0,
i_T = 1,
),
Instance("IDDR2",
p_DDR_ALIGNMENT = "C0",
o_Q0 = rx_ctl,
i_C0 = ClockSignal("eth_rx"),
i_C1 = ~ClockSignal("eth_rx"),
i_CE = 1,
i_D = rx_ctl_idelay,
i_R = 0,
i_S = 0,
)
]
self.sync += rx_ctl_reg.eq(rx_ctl)
for i in range(4):
self.specials += [
Instance("IBUF",
i_I = pads.rx_data[i],
o_O = rx_data_ibuf[i],
),
Instance("IODELAY2",
p_IDELAY_TYPE = "FIXED",
p_ODELAY_VALUE = rx_delay_taps,
p_DELAY_SRC = "IDATAIN",
o_DATAOUT = rx_data_idelay[i],
i_CAL = 0,
i_CE = 0,
i_CLK = 0,
i_IDATAIN = rx_data_ibuf[i],
i_INC = 0,
i_IOCLK0 = 0,
i_IOCLK1 = 0,
i_ODATAIN = 0,
i_RST = 0,
i_T = 1,
),
Instance("IDDR2",
p_DDR_ALIGNMENT = "C0",
o_Q0 = rx_data[i],
o_Q1 = rx_data[i+4],
i_C0 = ClockSignal("eth_rx"),
i_C1 = ~ClockSignal("eth_rx"),
i_CE = 1,
i_D = rx_data_idelay[i],
i_R = 0,
i_S = 0,
)
]
self.sync += rx_data_reg.eq(rx_data)
rx_ctl_reg_d = Signal()
self.sync += rx_ctl_reg_d.eq(rx_ctl_reg)
last = Signal()
self.comb += last.eq(~rx_ctl_reg & rx_ctl_reg_d)
self.sync += [
source.valid.eq(rx_ctl_reg),
source.data.eq(Cat(rx_data_reg[:4], rx_data[4:]))
]
self.comb += source.last.eq(last)
class LiteEthPHYRGMIICRG(Module, AutoCSR):
def __init__(self, clock_pads, pads, with_hw_init_reset, tx_delay=2e-9):
self._reset = CSRStorage()
# # #
self.clock_domains.cd_eth_rx = ClockDomain()
self.clock_domains.cd_eth_tx = ClockDomain()
self.comb += self.cd_eth_tx.clk.eq(self.cd_eth_rx.clk)
# RX
eth_rx_clk_ibuf = Signal()
self.specials += [
Instance("IBUF",
i_I = clock_pads.rx,
o_O = eth_rx_clk_ibuf,
),
Instance("BUFG",
i_I = eth_rx_clk_ibuf,
o_O = self.cd_eth_rx.clk,
),
]
# TX
tx_delay_taps = int(tx_delay/50e-12) # 50ps per tap
assert tx_delay_taps < 256
eth_tx_clk_o = Signal()
self.specials += [
Instance("ODDR2",
p_DDR_ALIGNMENT = "C0",
p_SRTYPE = "ASYNC",
o_Q = eth_tx_clk_o,
i_C0 = ClockSignal("eth_tx"),
i_C1 = ~ClockSignal("eth_tx"),
i_CE = 1,
i_D0 = 1,
i_D1 = 0,
i_R = ResetSignal("eth_tx"),
i_S = 0,
),
Instance("IODELAY2",
p_IDELAY_TYPE = "FIXED",
p_ODELAY_VALUE = tx_delay_taps,
p_DELAY_SRC = "ODATAIN",
o_DOUT = clock_pads.tx,
i_CAL = 0,
i_CE = 0,
i_CLK = 0,
i_IDATAIN = 0,
i_INC = 0,
i_IOCLK0 = 0,
i_IOCLK1 = 0,
i_ODATAIN = eth_tx_clk_o,
i_RST = 0,
i_T = 0,
)
]
# Reset
self.reset = reset = Signal()
if with_hw_init_reset:
self.submodules.hw_reset = LiteEthPHYHWReset()
self.comb += reset.eq(self._reset.storage | self.hw_reset.reset)
else:
self.comb += reset.eq(self._reset.storage)
if hasattr(pads, "rst_n"):
self.comb += pads.rst_n.eq(~reset)
self.specials += [
AsyncResetSynchronizer(self.cd_eth_tx, reset),
AsyncResetSynchronizer(self.cd_eth_rx, reset),
]
class LiteEthPHYRGMII(Module, AutoCSR):
dw = 8
tx_clk_freq = 125e6
rx_clk_freq = 125e6
def __init__(self, clock_pads, pads, with_hw_init_reset=True, tx_delay=2e-9, rx_delay=2e-9):
self.submodules.crg = LiteEthPHYRGMIICRG(clock_pads, pads, with_hw_init_reset, tx_delay)
self.submodules.tx = ClockDomainsRenamer("eth_tx")(LiteEthPHYRGMIITX(pads))
self.submodules.rx = ClockDomainsRenamer("eth_rx")(LiteEthPHYRGMIIRX(pads, rx_delay))
self.sink, self.source = self.tx.sink, self.rx.source
if hasattr(pads, "mdc"):
self.submodules.mdio = LiteEthPHYMDIO(pads)
|
[
"florent@enjoy-digital.fr"
] |
florent@enjoy-digital.fr
|
878153f223e114a2296af881155377a48ad010bf
|
fbbe424559f64e9a94116a07eaaa555a01b0a7bb
|
/Sklearn_x86/source/scipy/sparse/linalg/isolve/tests/test_gcrotmk.py
|
5c6fe30aff5e23067d4e3111d12b909f53e4774c
|
[
"MIT",
"Python-2.0",
"Qhull",
"BSL-1.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"GPL-3.0-or-later",
"GPL-3.0-only",
"BSD-3-Clause-Open-MPI",
"GCC-exception-3.1"
] |
permissive
|
ryfeus/lambda-packs
|
6544adb4dec19b8e71d75c24d8ed789b785b0369
|
cabf6e4f1970dc14302f87414f170de19944bac2
|
refs/heads/master
| 2022-12-07T16:18:52.475504
| 2022-11-29T13:35:35
| 2022-11-29T13:35:35
| 71,386,735
| 1,283
| 263
|
MIT
| 2022-11-26T05:02:14
| 2016-10-19T18:22:39
|
Python
|
UTF-8
|
Python
| false
| false
| 5,405
|
py
|
#!/usr/bin/env python
"""Tests for the linalg.isolve.gcrotmk module
"""
from numpy.testing import (assert_, assert_allclose, assert_equal,
suppress_warnings)
import numpy as np
from numpy import zeros, array, allclose
from scipy.linalg import norm
from scipy.sparse import csr_matrix, eye, rand
from scipy.sparse.linalg.interface import LinearOperator
from scipy.sparse.linalg import splu
from scipy.sparse.linalg.isolve import gcrotmk, gmres
Am = csr_matrix(array([[-2,1,0,0,0,9],
[1,-2,1,0,5,0],
[0,1,-2,1,0,0],
[0,0,1,-2,1,0],
[0,3,0,1,-2,1],
[1,0,0,0,1,-2]]))
b = array([1,2,3,4,5,6])
count = [0]
def matvec(v):
count[0] += 1
return Am*v
A = LinearOperator(matvec=matvec, shape=Am.shape, dtype=Am.dtype)
def do_solve(**kw):
count[0] = 0
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x0, flag = gcrotmk(A, b, x0=zeros(A.shape[0]), tol=1e-14, **kw)
count_0 = count[0]
assert_(allclose(A*x0, b, rtol=1e-12, atol=1e-12), norm(A*x0-b))
return x0, count_0
class TestGCROTMK:
def test_preconditioner(self):
# Check that preconditioning works
pc = splu(Am.tocsc())
M = LinearOperator(matvec=pc.solve, shape=A.shape, dtype=A.dtype)
x0, count_0 = do_solve()
x1, count_1 = do_solve(M=M)
assert_equal(count_1, 3)
assert_(count_1 < count_0/2)
assert_(allclose(x1, x0, rtol=1e-14))
def test_arnoldi(self):
np.random.seed(1)
A = eye(2000) + rand(2000, 2000, density=5e-4)
b = np.random.rand(2000)
# The inner arnoldi should be equivalent to gmres
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x0, flag0 = gcrotmk(A, b, x0=zeros(A.shape[0]), m=15, k=0, maxiter=1)
x1, flag1 = gmres(A, b, x0=zeros(A.shape[0]), restart=15, maxiter=1)
assert_equal(flag0, 1)
assert_equal(flag1, 1)
assert np.linalg.norm(A.dot(x0) - b) > 1e-3
assert_allclose(x0, x1)
def test_cornercase(self):
np.random.seed(1234)
# Rounding error may prevent convergence with tol=0 --- ensure
# that the return values in this case are correct, and no
# exceptions are raised
for n in [3, 5, 10, 100]:
A = 2*eye(n)
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
b = np.ones(n)
x, info = gcrotmk(A, b, maxiter=10)
assert_equal(info, 0)
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
x, info = gcrotmk(A, b, tol=0, maxiter=10)
if info == 0:
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
b = np.random.rand(n)
x, info = gcrotmk(A, b, maxiter=10)
assert_equal(info, 0)
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
x, info = gcrotmk(A, b, tol=0, maxiter=10)
if info == 0:
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
def test_nans(self):
A = eye(3, format='lil')
A[1,1] = np.nan
b = np.ones(3)
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x, info = gcrotmk(A, b, tol=0, maxiter=10)
assert_equal(info, 1)
def test_truncate(self):
np.random.seed(1234)
A = np.random.rand(30, 30) + np.eye(30)
b = np.random.rand(30)
for truncate in ['oldest', 'smallest']:
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x, info = gcrotmk(A, b, m=10, k=10, truncate=truncate, tol=1e-4,
maxiter=200)
assert_equal(info, 0)
assert_allclose(A.dot(x) - b, 0, atol=1e-3)
def test_CU(self):
for discard_C in (True, False):
# Check that C,U behave as expected
CU = []
x0, count_0 = do_solve(CU=CU, discard_C=discard_C)
assert_(len(CU) > 0)
assert_(len(CU) <= 6)
if discard_C:
for c, u in CU:
assert_(c is None)
# should converge immediately
x1, count_1 = do_solve(CU=CU, discard_C=discard_C)
if discard_C:
assert_equal(count_1, 2 + len(CU))
else:
assert_equal(count_1, 3)
assert_(count_1 <= count_0/2)
assert_allclose(x1, x0, atol=1e-14)
def test_denormals(self):
# Check that no warnings are emitted if the matrix contains
# numbers for which 1/x has no float representation, and that
# the solver behaves properly.
A = np.array([[1, 2], [3, 4]], dtype=float)
A *= 100 * np.nextafter(0, 1)
b = np.array([1, 1])
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
xp, info = gcrotmk(A, b)
if info == 0:
assert_allclose(A.dot(xp), b)
|
[
"ryfeus@gmail.com"
] |
ryfeus@gmail.com
|
4c9b0d1ebfbd32b61c22ada01520bfa080ebb0a4
|
80e9a64396414636d4d5984b873696baccdf0d31
|
/app/__init__.py
|
559f15b5799b6e0372bddf3ce62aad9e39ecdfe3
|
[
"MIT"
] |
permissive
|
syemoore/info3180-Project2
|
a14f1c890b2f650e744bc847cc73fdf248bf1aba
|
bae0163693264d25aed39ef95025e5dbb8b03828
|
refs/heads/master
| 2020-03-14T09:05:46.456979
| 2018-04-29T23:34:51
| 2018-04-29T23:34:51
| 131,538,566
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,024
|
py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import os , psycopg2
from flask_login import LoginManager
from flask_wtf.csrf import CSRFProtect
app = Flask(__name__)
app.config.from_object(__name__)# Flask-Login login manager
csrf = CSRFProtect(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
login_manager.session_protection = "strong"
UPLOAD_FOLDER = './app/static/uploads'
DATABASE_URL = os.environ['DATABASE_URL'] = 'postgresql://admin:adminonly@localhost/photogram'
TOKEN_SECRET = 'Thisissecret'
app.config['SECRET_KEY'] = 'pH0t 0Gr@l^l'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
PROFILE_IMG_UPLOAD_FOLDER = os.path.join("static/uploads", "profile_photos")
POST_IMG_UPLOAD_FOLDER = os.path.join("static/uploads", "posts")
app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URL
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['TOKEN_SECRET'] = csrf
conn = psycopg2.connect(DATABASE_URL)
db = SQLAlchemy(app)
from app import views
|
[
"sy.moore@mymona.uwi.edu"
] |
sy.moore@mymona.uwi.edu
|
d1c6dd7b2d80edb1db31b1ae2ed6619e5a39e30a
|
240ab4629d469c20450d9298ec1996a96d093692
|
/python_inf/tabliczka.py
|
7901f602d2a451df4dd013d628663471abc8b51f
|
[] |
no_license
|
pnoga190401/projekt
|
badeec7a4f3d28de908e9b3817c6fab3242bb933
|
ec5797372296923a5571a11f6a60f73214cd4aa6
|
refs/heads/master
| 2021-06-01T22:43:52.544544
| 2020-02-25T07:43:54
| 2020-02-25T07:43:54
| 105,510,454
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 345
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# tabliczka.py
#
def tabliczka():
for k in range(1, 11):
for w in range(1, 11):
print("{:>3} ".format(k * w), end='')
print()
def main(args):
tabliczka()
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
|
[
"paulinanoga55@gmail.com"
] |
paulinanoga55@gmail.com
|
ceef3c88fb67390e973d5302e78c10df5de03030
|
ff53bf82b492ed815d95a3b069e3a16d027eac7a
|
/Snake.py
|
a1668c754e9db24e077c9242d85c50e8ab5fd18f
|
[] |
no_license
|
yogivaleja/Snakes
|
a5874f2c78238338aac3446030acebebdbfc56e4
|
a1bfabf7cb92067209e7e1b399ed9d9f4c3e5e97
|
refs/heads/master
| 2023-01-08T05:09:25.106625
| 2020-11-02T19:22:08
| 2020-11-02T19:22:08
| 309,473,243
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,690
|
py
|
import pygame
import random
import os
pygame.init()
pygame.mixer.init()
#Screen Window
screen_width = 1000
screen_height = 600
game_window = pygame.display.set_mode((screen_width,screen_height))
#Title
pygame.display.set_caption('Snake Game')
#Colors
white = (255,255,255)
black = (0,0,0)
red = (255,0,0)
green = (0,255,0)
blue = (0,0,255)
cyan = (0,255,255)
# Clock
fps = 60
clock = pygame.time.Clock()
#Music
def background_music():
pygame.mixer.music.load(os.path.join('assets','background.mp3'))
pygame.mixer.music.play()
def game_over_music():
# pygame.mixer.music.load('gameover.mp3')
pygame.mixer.music.load(os.path.join('assets','gameover.mp3'))
pygame.mixer.music.play()
#Background Image
# image = pygame.image.load('abc.jpg')
image = pygame.image.load(os.path.join('assets','abc.jpg'))
image = pygame.transform.scale(image, (screen_width, screen_height))
#Text Display on Screen
font = pygame.font.SysFont(None,55)
def text_on_screen(text, color, x, y):
screen_text = font.render(text, True, color)
game_window.blit(screen_text, [x,y])
def plot_snake(snake_list, size):
for x, y in snake_list :
pygame.draw.rect(game_window, black, [x, y, size, size])
def welcome_screen():
exit_game = False
while not exit_game:
game_window.fill(white)
game_window.blit(image, (0,0))
text_on_screen('Welcome to Snakes', blue, 340, 210)
text_on_screen('Press Space Bar to Continue', blue, 270, 260)
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_game = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
background_music()
game_loop()
pygame.display.update()
clock.tick(fps)
def game_loop():
# Game Specific Variables
exit_game = False
game_over = False
'''
Here a Flag is taken for Snake entering the Boundary
before entering the boundary Snake can Only move in Down Direction
After Entering the Boundary the Game Starts
'''
#Before Entering the Boundary
flag = False
# Snake Variables
snake_x = 100
snake_y = 30
size = 12
velocity_x = 0
velocity_y = 0
velocity = 4
brick_counter = 0#For creating brick
# Food Variables
# Food Size = Snake Size
food_x = random.randrange(100, 900)
food_y = random.randrange(100, 500)
no_of_food_eaten = 0
# Snake Size Increament Variables
snake_list = []
snake_length = 1
#File DataBase for Hiscore Count
#If Hiscore file doesnt exists then create a text document by name hiscore.txt
if(not os.path.exists('hiscore.txt')):
with open('hiscore.txt','w') as file:
file.write('0')
#Score Variables
score = 0
hiscore_on_file = 0
#Check hiscore text from file
with open('hiscore.txt', 'r') as file :
highscore_on_file = file.read()
while not exit_game:
if game_over:
with open('hiscore.txt', 'w') as file:
file.write(str(highscore_on_file))
game_window.fill(white)
text_on_screen('Game Over!!!', red, 380,230)
text_on_screen('Press Enter to Restart', red, 350,270)
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_game = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
background_music()
game_loop()
else:
game_window.fill(white)
if not flag:
pygame.draw.line(game_window, black, (30, 60), (80, 60), 5)
pygame.draw.line(game_window, black, (130, 60), (970, 60), 5)
pygame.draw.line(game_window, black, (30, 60), (30, 570), 5)
pygame.draw.line(game_window, black, (30, 570), (970, 570), 5)
pygame.draw.line(game_window, black, (970, 60), (970, 570), 5)
pygame.draw.rect(game_window, black, [snake_x, snake_y, size, size])
text_on_screen('Press Down Key to Continue', green, 270, 230)
text_on_screen('Food', cyan, 360, 310)
text_on_screen('Poison', red, 360, 370)
pygame.draw.rect(game_window, cyan, [270, 320, 15, 15])
pygame.draw.rect(game_window, red, [270, 380, 15, 15])
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_game = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_DOWN:
velocity_y = velocity
velocity_x = 0
snake_x += velocity_x
snake_y += velocity_y
if snake_y > 60:
flag = True
if flag:
for event in pygame.event.get():
# print(event)
if event.type == pygame.QUIT:
exit_game = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RIGHT:
velocity_x = velocity
velocity_y = 0
if event.key == pygame.K_LEFT:
velocity_x = -velocity
velocity_y = 0
if event.key == pygame.K_UP:
velocity_y = -velocity
velocity_x = 0
if event.key == pygame.K_DOWN:
velocity_y = velocity
velocity_x = 0
# Cheat Code
if event.key == pygame.K_q:
score += 10
snake_x += velocity_x
snake_y += velocity_y
if snake_x < 30 or snake_x > 970 or snake_y < 60 or snake_y > 570 :
game_over = True
game_over_music()
if abs(snake_x - food_x) < 10 and abs(snake_y - food_y) < 10:
score += 10
brick_counter += 1
food_x = random.randrange(50, 950)
food_y = random.randrange(80, 550)
brick_x = random.randrange(80, 550)
brick_y = random.randrange(80, 550)
snake_length += 5
no_of_food_eaten += 1
if no_of_food_eaten % 5 == 0 :
velocity += 1
if score > int(highscore_on_file) :
highscore_on_file = score
# print(brick_counter)
game_window.fill(white)
text_on_screen(' Score: ' + str(score) + ' Hiscore: ' + str(highscore_on_file), green, 10, 15)
head = []
head.append(snake_x)
head.append(snake_y)
snake_list.append(head)
if len(snake_list) > snake_length:
del snake_list[0]
if head in snake_list[:-1]:
game_over = True
game_over_music()
pygame.draw.line(game_window, black, (30,60),(970,60), 5)
pygame.draw.line(game_window, black, (30,60),(30,570), 5)
pygame.draw.line(game_window, black, (30,570),(970,570), 5)
pygame.draw.line(game_window, black, (970,60),(970,570), 5)
plot_snake(snake_list, size)
pygame.draw.rect(game_window, cyan, [food_x, food_y, size, size])
if brick_counter > 8:
if abs(brick_x - food_x) > 50 or abs(brick_y - food_y) > 50:
pygame.draw.rect(game_window, red , [brick_x, brick_y, 15, 15])
print(f'snake_x:{snake_x}')
print(f'brick_x:{brick_x}')
print(f'snake_y:{snake_y}')
print(f'brick_y:{brick_y}')
print(f'abs X:{abs(snake_x - brick_x)}')
print(f'abs Y:{abs(snake_y - brick_y)}')
if abs(snake_x - brick_x) < 10 and abs(snake_y - brick_y) < 10:
game_over = True
game_over_music()
pygame.display.update()
clock.tick(fps)
pygame.quit()
# quit()
if __name__ == '__main__':
welcome_screen()
|
[
"valejayogesh@gmail.com"
] |
valejayogesh@gmail.com
|
316995c8714b536120e04ed78d65a9596ad88d95
|
51da71a26628a3c6d1814e6da38f5c48f3101d9b
|
/uri/2349.py
|
991c2a0e576be7e61c94c10e7d12560498528cb4
|
[] |
no_license
|
da-ferreira/uri-online-judge
|
279156249a1b0be49a7b29e6dbce85a293a47df1
|
6ec97122df3cb453ea26e0c9f9206a2e470ba37d
|
refs/heads/main
| 2023-03-30T11:47:05.120388
| 2021-04-02T19:45:25
| 2021-04-02T19:45:25
| 309,744,456
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 500
|
py
|
qtd_estacoes, commands, devastada = map(int, input().split())
comandos = list(map(int, input().split()))
vezes = 0
robo = 1
for i in range(commands):
if robo == devastada:
vezes += 1
if comandos[i] == 1:
if robo < qtd_estacoes:
robo += 1
else:
robo = 1
else:
if robo == 1:
robo = qtd_estacoes
else:
robo -= 1
if robo == devastada:
vezes += 1
print(vezes)
|
[
"noreply@github.com"
] |
da-ferreira.noreply@github.com
|
d73f3959f8899709e54de43cea48267c483b46ae
|
74717c23d061ae4d2df887e27a7fcf9cba013324
|
/Python/PropagatedBodies.py
|
6d31dd2fa2e7a57790dfc72c375e9022b87bce39
|
[] |
no_license
|
RensZ/thesis2
|
e338c400b6cd5ca47551f699b3ce77566a9d6798
|
2ae43a6c65ec07ce0d2693a62de0b2ec80f9aa1f
|
refs/heads/master
| 2023-01-10T11:44:24.209292
| 2020-11-10T16:39:50
| 2020-11-10T16:39:50
| 257,527,828
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,938
|
py
|
"""
Created by Rens van der Zwaard on 2020-3-2
Purpose: to plot the propagation of the bodies to check whether it went alright
"""
def f(dir_output, dir_plots, body, no_arcs):
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
AU = 149597870700.0 #m
filename = "StatePropagationHistory"+body+".dat"
data = np.genfromtxt(dir_output+filename,delimiter=',')
t = data[:,0]
i_sorted = t.argsort()
t_sorted = t[i_sorted]
dt = t_sorted[1:-1]-t_sorted[0:-2]
gaps = np.concatenate([[0],np.where(dt>60.0*60.0*24.0)[0],[-1]])
print(" minimum step size in entire simulation:", np.min(dt), "seconds")
x = data[:,1]/AU
y = data[:,2]/AU
z = data[:,3]/AU
x_sorted = x[i_sorted]
y_sorted = y[i_sorted]
z_sorted = z[i_sorted]
axmin = np.min([x,y,z])
axmax = np.max([x,y,z])
fig = plt.figure(figsize=(10, 10))
ax = fig.gca(projection='3d')
legend = []
for j in range(1, no_arcs + 1):
start = gaps[j-1] + 1
end = gaps[j]
# dt_arc = dt[start:end]
# print("minimum step size for arc", str(j), "is", str(np.min(dt_arc)), "seconds" )
ax.plot(x[start:end],y[start:end],z[start:end],linewidth=0.75)
legend.append("arc" + str(j))
ax.set_xlabel('x [AU]')
ax.set_ylabel('y [AU]')
ax.set_zlabel('z [AU]')
ax.set_xlim([axmin, axmax])
ax.set_ylim([axmin, axmax])
ax.set_zlim([axmin, axmax])
plt.legend(legend, loc='upper left')
# #plot mercury for reference
# u = np.linspace(0, 2 * np.pi, 100)
# r_mercury = 1.63 * (10**-5) #AU
# x = r_mercury * np.outer(np.cos(u), np.sin(u))
# y = r_mercury * np.outer(np.sin(u), np.sin(u))
# z = r_mercury * np.outer(np.ones(np.size(u)), np.cos(u))
# ax.plot_surface(x, y, z, color='black')
plt.tight_layout()
plt.savefig(dir_plots + body + '_state_history.png')
|
[
"rensvanderzwaard@gmail.com"
] |
rensvanderzwaard@gmail.com
|
70c65f15fa044a4fd22f5b1cd1aa72a6ad1f2aff
|
23d096a2c207eff63f0c604825d4b2ea1a5474d9
|
/TTAnalysis/EgammaClusterProducers/python/ecalDeadTTowerKillerSequence_cff.py
|
1766a7fc8f95b8b26e0782edd077c8741dc29698
|
[] |
no_license
|
martinamalberti/BicoccaUserCode
|
40d8272c31dfb4ecd5a5d7ba1b1d4baf90cc8939
|
35a89ba88412fb05f31996bd269d44b1c6dd42d3
|
refs/heads/master
| 2021-01-18T09:15:13.790891
| 2013-08-07T17:08:48
| 2013-08-07T17:08:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 181
|
py
|
import FWCore.ParameterSet.Config as cms
from TTAnalysis.EgammaClusterProducers.ecalDeadTTowerKiller_cfi import *
ecalDeadTTowerKillerSequence = cms.Sequence(EcalTTowerKiller)
|
[
""
] | |
8bf0486700ea28d4e93393b81c5e4501d8ea59a3
|
b007d88e6726452ffa8fe80300614f311ae5b318
|
/LeetCode/facebook/top_facebook_questions/convert_bst_to_sorted_doubly_linked_list.py
|
9a63bd3236d43558c430769e6f6a823f41afb113
|
[] |
no_license
|
jinurajan/Datastructures
|
ec332b12b8395f42cb769e771da3642f25ba7e7f
|
647fea5d2c8122468a1c018c6829b1c08717d86a
|
refs/heads/master
| 2023-07-06T14:42:55.168795
| 2023-07-04T13:23:22
| 2023-07-04T13:23:22
| 76,943,162
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,681
|
py
|
"""
Convert Binary Search Tree to Sorted Doubly Linked List
Convert a Binary Search Tree to a sorted Circular Doubly-Linked List in place.
You can think of the left and right pointers as synonymous to the predecessor and successor pointers in a doubly-linked list. For a circular doubly linked list, the predecessor of the first element is the last element, and the successor of the last element is the first element.
We want to do the transformation in place. After the transformation, the left pointer of the tree node should point to its predecessor, and the right pointer should point to its successor. You should return the pointer to the smallest element of the linked list.
Constraints:
-1000 <= Node.val <= 1000
Node.left.val < Node.val < Node.right.val
All values of Node.val are unique.
0 <= Number of Nodes <= 2000
"""
"""
# Definition for a Node.
class Node:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
"""
class Solution:
def treeToDoublyList(self, root: 'Node') -> 'Node':
if not root:
return root
head = Node()
last = head
node = root
while node:
if node.left:
predecessor = node.left
while predecessor.right and predecessor.right != node:
predecessor = predecessor.right
if not predecessor.right:
predecessor.right = node
node = node.left
else:
last.right = node
node.left = last
last = node
node = node.right
else:
last.right = node
node.left = last
last = node
node = node.right
last.right = head.right
head.right.left = last
return head.right
"""
# Definition for a Node.
class Node:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
"""
class Solution:
def treeToDoublyList(self, root: 'Node') -> 'Node':
if not root:
return root
def dfs(node):
nonlocal
first, last
if node:
dfs(node.left)
if last:
node.left = last
last.right = node
else:
first = node
last = node
dfs(node.right)
first, last = None, None
dfs(root)
last.right = first
first.left = last
return first
|
[
"jinu.p.r@gmail.com"
] |
jinu.p.r@gmail.com
|
18cfae7413bf79f8f691805d57a11e88b052688a
|
48cca5ec318fe173cbc8b8c3cd56be524938000d
|
/Schedule/proxyRefreshSchedule.py
|
b3d0c4166dad1264a41e48d700af9457209b7753
|
[] |
no_license
|
GrootPank/ProxyPool
|
87029d582433d0cd40d55ca2006d769d85c1ad02
|
2f91783fd4f3631b8600ad76627c96d4e9ce30b9
|
refs/heads/master
| 2021-01-01T04:03:16.343482
| 2017-07-14T08:37:20
| 2017-07-14T08:37:20
| 97,111,679
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,489
|
py
|
import threading
from apscheduler.schedulers.blocking import BlockingScheduler
from Manager.proxyManager import ProxyManager
from Util.logHandler import LogHandler
from Util.utilFunction import validProxy
class ProxyRefreshSchedule(ProxyManager):
def __init__(self):
ProxyManager.__init__(self)
self.logger = LogHandler("refresh_shedule")
def valid_Proxy(self):
"""
valid the proxy in origal proxy pool, put the useful proxies in useful proxy pool
:return:
"""
proxy = self.pop()
self.logger.info("*************Start Valid proxies************* ")
while proxy:
if validProxy(proxy.decode("utf-8")):
self.put(proxy)
self.logger.info("{0} validation pass".format(proxy))
else:
self.logger.info("{0} validation failed".format(proxy))
proxy = self.pop()
self.logger.info("##############Valid proxies is complete ################ ")
def doValidProxy():
sh = ProxyRefreshSchedule()
sh.valid_Proxy()
def main(threadMun=5):
sh = ProxyRefreshSchedule()
sh.refresh()
threads = [threading.Thread(target=doValidProxy) for item in range(threadMun)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
def run():
schedule = BlockingScheduler()
schedule.add_job(main,'interval',hours=1)
schedule.start()
if __name__ == '__main__':
main()
|
[
"843310964@qq.com"
] |
843310964@qq.com
|
3e205cbf4d16776c34e99cabceabc616d2f77b52
|
55b57d64ec547869835334318f3059fbb507558c
|
/Fred2/Data/pssms/tepitopepan/mat/DRB1_1487_9.py
|
6f0bee31aa8412e5546a3d450eca4d7c1593e249
|
[
"BSD-3-Clause"
] |
permissive
|
FRED-2/Fred2
|
9845f6678d4011cb746c7a5a6f283eea68077a02
|
b3e54c8c4ed12b780b61f74672e9667245a7bb78
|
refs/heads/master
| 2021-07-12T05:05:54.515427
| 2020-05-25T06:56:25
| 2020-05-25T06:56:25
| 16,275,425
| 42
| 35
| null | 2021-07-07T12:05:11
| 2014-01-27T10:08:11
|
Python
|
UTF-8
|
Python
| false
| false
| 2,174
|
py
|
DRB1_1487_9 = {0: {'A': -999.0, 'E': -999.0, 'D': -999.0, 'G': -999.0, 'F': -0.98558, 'I': -0.014418, 'H': -999.0, 'K': -999.0, 'M': -0.014418, 'L': -0.014418, 'N': -999.0, 'Q': -999.0, 'P': -999.0, 'S': -999.0, 'R': -999.0, 'T': -999.0, 'W': -0.98558, 'V': -0.014418, 'Y': -0.98558}, 1: {'A': 0.0, 'E': 0.1, 'D': -1.3, 'G': 0.5, 'F': 0.8, 'I': 1.1, 'H': 0.8, 'K': 1.1, 'M': 1.1, 'L': 1.0, 'N': 0.8, 'Q': 1.2, 'P': -0.5, 'S': -0.3, 'R': 2.2, 'T': 0.0, 'W': -0.1, 'V': 2.1, 'Y': 0.9}, 2: {'A': 0.0, 'E': -1.2, 'D': -1.3, 'G': 0.2, 'F': 0.8, 'I': 1.5, 'H': 0.2, 'K': 0.0, 'M': 1.4, 'L': 1.0, 'N': 0.5, 'Q': 0.0, 'P': 0.3, 'S': 0.2, 'R': 0.7, 'T': 0.0, 'W': 0.0, 'V': 0.5, 'Y': 0.8}, 3: {'A': 0.0, 'E': -1.413, 'D': -0.59541, 'G': -1.0962, 'F': 0.089501, 'I': 0.63965, 'H': -0.27703, 'K': -0.40522, 'M': 0.75757, 'L': 0.73443, 'N': 0.051289, 'Q': -0.2262, 'P': -1.4623, 'S': -0.34089, 'R': -0.59039, 'T': -0.76496, 'W': -0.25713, 'V': 0.17454, 'Y': -0.54047}, 4: {'A': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 5: {'A': 0.0, 'E': -1.4124, 'D': -2.3809, 'G': -0.70879, 'F': -1.3948, 'I': 0.68888, 'H': -0.11719, 'K': 1.2505, 'M': -0.90135, 'L': 0.18327, 'N': -0.57408, 'Q': -0.31604, 'P': 0.49269, 'S': -0.085018, 'R': 0.9563, 'T': 0.81302, 'W': -1.3936, 'V': 1.1943, 'Y': -1.3991}, 6: {'A': 0.0, 'E': -0.79277, 'D': -1.2459, 'G': -0.7096, 'F': -0.15733, 'I': 0.066354, 'H': -0.47376, 'K': -0.82466, 'M': 0.67126, 'L': 0.33385, 'N': 0.0045172, 'Q': -0.361, 'P': -0.45654, 'S': -0.19575, 'R': -0.74293, 'T': -0.43948, 'W': -0.75274, 'V': -0.18667, 'Y': -0.43394}, 7: {'A': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 8: {'A': 0.0, 'E': -0.24345, 'D': -0.39245, 'G': -0.35253, 'F': -0.53237, 'I': -0.18595, 'H': 0.64856, 'K': -0.63126, 'M': 0.15453, 'L': -0.5039, 'N': -0.43168, 'Q': 0.86605, 'P': -1.089, 'S': 0.70805, 'R': -0.96918, 'T': -0.7571, 'W': -0.57158, 'V': -0.53639, 'Y': -0.44963}}
|
[
"schubert@informatik.uni-tuebingen.de"
] |
schubert@informatik.uni-tuebingen.de
|
57f811805d7d8ef926a10b1e2ba283d1c071b9ad
|
e36b63987149a043f79a2f6a85ea5a90546ebacd
|
/python/Lib/site-packages/GitHubCommitsDownloader/wait.py
|
a62892f110ccf09940864fd604fa4ccbd845c5a0
|
[] |
no_license
|
truongsang0801/Test
|
9bdc5fe54e63e18b98b066f4addca88e59a9d6b9
|
0394758e1dea0b4cb80bf2f8bb2b0a21fb60dd97
|
refs/heads/main
| 2023-05-12T13:45:51.349020
| 2021-06-05T16:17:48
| 2021-06-05T16:17:48
| 374,144,285
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,063
|
py
|
def wait():
import termios, fcntl, sys, os
fd = sys.stdin.fileno()
flags_save = fcntl.fcntl(fd, fcntl.F_GETFL)
attrs_save = termios.tcgetattr(fd)
attrs = list(attrs_save)
attrs[0] &= ~(termios.IGNBRK | termios.BRKINT | termios.PARMRK
| termios.ISTRIP | termios.INLCR | termios. IGNCR
| termios.ICRNL | termios.IXON )
attrs[1] &= ~termios.OPOST
attrs[2] &= ~(termios.CSIZE | termios. PARENB)
attrs[2] |= termios.CS8
attrs[3] &= ~(termios.ECHONL | termios.ECHO | termios.ICANON
| termios.ISIG | termios.IEXTEN)
termios.tcsetattr(fd, termios.TCSANOW, attrs)
fcntl.fcntl(fd, fcntl.F_SETFL, flags_save & ~os.O_NONBLOCK)
ret = []
try:
ret.append(sys.stdin.read(1))
fcntl.fcntl(fd, fcntl.F_SETFL, flags_save | os.O_NONBLOCK)
c = sys.stdin.read(1)
while len(c) > 0:
ret.append(c)
c = sys.stdin.read(1)
except KeyboardInterrupt:
ret.append('\x03')
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, attrs_save)
fcntl.fcntl(fd, fcntl.F_SETFL, flags_save)
return tuple(ret)
|
[
"kimseohunhq@gmail.com"
] |
kimseohunhq@gmail.com
|
16cea2bd4e9dc58fbc3704311c14050073dffe3e
|
afb9991cbc58d6966de44bd19d31307032a6230e
|
/1. This is Ludo (dice simulator).py
|
754e759afe7fb9835e8bc1016addd86fadf631a3
|
[] |
no_license
|
sam-kumar-sah/python-project
|
d87803c673c3058085cb2798221253ba6743786f
|
93068b6fa43e99352137b32212d324212593a1e7
|
refs/heads/master
| 2022-09-08T19:05:51.111510
| 2020-05-28T09:32:13
| 2020-05-28T09:32:13
| 267,398,338
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,196
|
py
|
//This is Ludo (dice simulator)
import random
print("This is Ludo (dice simulator)")
t='p'
while t=='p':
x=random.randint(1,6)
if x==1:
print("-------------")
print("| |")
print("| 0 |")
print("| |")
print("-------------")
elif x==2:
print("-------------")
print("| |")
print("| 0 0 |")
print("| |")
print("-------------")
elif x==3:
print("-------------")
print("| 0 |")
print("| 0 0 |")
print("| |")
print("-------------")
elif x==4:
print("-------------")
print("| 0 0 |")
print("| |")
print("| 0 0 |")
print("-------------")
elif x==5:
print("-------------")
print("| 0 0 |")
print("| 0 |")
print("| 0 0 |")
print("-------------")
elif x==6:
print("-------------")
print("| 0 0 0 |")
print("| |")
print("| 0 0 0 |")
print("-------------")
x=input("press P to roll dice again")
|
[
"samsahp9@gmail.com"
] |
samsahp9@gmail.com
|
48c15a6f09dce9b9f099128d9bd66bba77f4d97c
|
acb8e84e3b9c987fcab341f799f41d5a5ec4d587
|
/langs/8/uag.py
|
edc7726348da9432c2bf8662fcd1eab53646893c
|
[] |
no_license
|
G4te-Keep3r/HowdyHackers
|
46bfad63eafe5ac515da363e1c75fa6f4b9bca32
|
fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2
|
refs/heads/master
| 2020-08-01T12:08:10.782018
| 2016-11-13T20:45:50
| 2016-11-13T20:45:50
| 73,624,224
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'uAG':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1])
|
[
"juliettaylorswift@gmail.com"
] |
juliettaylorswift@gmail.com
|
bccca9ceafcf18310a6864bfdcfe1637211fbfc3
|
6ececdfaf893837c02c9450551e8313676c01fc0
|
/spencer_tracking_msgs/msg/_TrackedPersons2d.py
|
8dc47e350b126a329fff34be2906c0f0ba2fa2ab
|
[] |
no_license
|
noskill/spencer_messages
|
d5bfe0fd8578cf134f3197d2c76f5cb614a888d2
|
a301b3d7e2dd9847ef2ff777357b6091ff723898
|
refs/heads/master
| 2023-02-04T08:12:27.324206
| 2020-12-25T10:40:36
| 2020-12-25T10:40:36
| 324,342,636
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,390
|
py
|
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from spencer_tracking_msgs/TrackedPersons2d.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import spencer_tracking_msgs.msg
import std_msgs.msg
class TrackedPersons2d(genpy.Message):
_md5sum = "972b7d693ce31c3b18f092f43387621c"
_type = "spencer_tracking_msgs/TrackedPersons2d"
_has_header = True # flag to mark the presence of a Header object
_full_text = """# Message with all 2d bbox in image of currently tracked persons
#
Header header # Header containing timestamp etc. of this message
TrackedPerson2d[] boxes # All persons that are currently being tracked (2d image bbox)
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: spencer_tracking_msgs/TrackedPerson2d
# Message defining a 2d image bbox of a tracked person
#
uint64 track_id # unique identifier of the target, consistent over time
float32 person_height # 3d height of person in m
int32 x # top left corner x of 2d image bbox
int32 y # top left corner y of 2d image bbox
uint32 w # width of 2d image bbox
uint32 h # height of 2d image bbox
float32 depth # distance from the camera in m
"""
__slots__ = ['header','boxes']
_slot_types = ['std_msgs/Header','spencer_tracking_msgs/TrackedPerson2d[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,boxes
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(TrackedPersons2d, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.boxes is None:
self.boxes = []
else:
self.header = std_msgs.msg.Header()
self.boxes = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.boxes:
_x = val1
buff.write(_get_struct_Qf2i2If().pack(_x.track_id, _x.person_height, _x.x, _x.y, _x.w, _x.h, _x.depth))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.boxes is None:
self.boxes = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.boxes = []
for i in range(0, length):
val1 = spencer_tracking_msgs.msg.TrackedPerson2d()
_x = val1
start = end
end += 32
(_x.track_id, _x.person_height, _x.x, _x.y, _x.w, _x.h, _x.depth,) = _get_struct_Qf2i2If().unpack(str[start:end])
self.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.boxes:
_x = val1
buff.write(_get_struct_Qf2i2If().pack(_x.track_id, _x.person_height, _x.x, _x.y, _x.w, _x.h, _x.depth))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.boxes is None:
self.boxes = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.boxes = []
for i in range(0, length):
val1 = spencer_tracking_msgs.msg.TrackedPerson2d()
_x = val1
start = end
end += 32
(_x.track_id, _x.person_height, _x.x, _x.y, _x.w, _x.h, _x.depth,) = _get_struct_Qf2i2If().unpack(str[start:end])
self.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_Qf2i2If = None
def _get_struct_Qf2i2If():
global _struct_Qf2i2If
if _struct_Qf2i2If is None:
_struct_Qf2i2If = struct.Struct("<Qf2i2If")
return _struct_Qf2i2If
|
[
"abelikov@singularitynet.io"
] |
abelikov@singularitynet.io
|
5ce5acddc90424bf3e98b10c3156ad868247ae7a
|
0307becacfb38247892d8615512d7b330ec72cff
|
/mwt_detection.py
|
39a1eaf6cf171a0dd478212dc993b6547a7eead3
|
[] |
no_license
|
MarvinMartin24/multiplewavetracking_py
|
fcbde842323c8c5fa192daa22c37501c413375b8
|
486704c8683598bd30e51b53c7d8d7bd41ebf508
|
refs/heads/master
| 2021-06-23T04:47:10.695323
| 2017-08-31T22:50:12
| 2017-08-31T22:50:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,702
|
py
|
##
## Near-shore Wave Tracking
## mwt_detection.py
##
## Created by Justin Fung on 9/1/17.
## Copyright 2017 justin fung. All rights reserved.
##
## ========================================================
"""Routine for detecting potential waves.
Method for detecting is:
-1. detect contours
-2. filter contours
-3. create list of wave objects from filtered contours
"""
from __future__ import division
import math
import cv2
from mwt_objects import Section
# Boolean flag to filter blobs by area:
FLAGS_FILTER_BY_AREA = True
# Boolean flag to filter blobs by inertia (shape):
FLAGS_FILTER_BY_INERTIA = True
# Minimum area threshold for contour:
MINIMUM_AREA = 100
# Maximum area threshold for contour:
MAXIMUM_AREA = 1e7
# Minimum inertia threshold for contour:
MINIMUM_INERTIA_RATIO = 0.0
# Maximum inertia threshold for contour:
MAXIMUM_INERTIA_RATIO = 0.1
def find_contours(frame):
"""Contour finding function utilizing OpenCV.
Args:
frame: A frame from a cv2.video_reader object to process.
Returns:
contours: An array of contours, each represented by an array of
points.
"""
_, contours, hierarchy = cv2.findContours(
image=frame,
mode=cv2.RETR_EXTERNAL,
method=cv2.CHAIN_APPROX_NONE,
hierarchy=None,
offset=None)
return contours
def keep_contour(contour,
area=FLAGS_FILTER_BY_AREA,
inertia=FLAGS_FILTER_BY_INERTIA,
min_area=MINIMUM_AREA,
max_area=MAXIMUM_AREA,
min_inertia_ratio=MINIMUM_INERTIA_RATIO,
max_inertia_ratio=MAXIMUM_INERTIA_RATIO):
"""Contour filtering function utilizing OpenCV. In our case,
we are looking for oblong shapes that exceed a user-defined area.
Args:
contour: A contour from an array of contours
area: boolean flag to filter contour by area
inertia: boolean flag to filter contour by inertia
min_area: minimum area threshold for contour
max_area: maximum area threshold for contour
min_inertia_ratio: minimum inertia threshold for contour
max_inertia_ratio: maximum inertia threshold for contour
Returns:
ret: A boolean TRUE if contour meets conditions, else FALSE
"""
# Initialize the return value.
ret = True
# Obtain contour moments.
moments = cv2.moments(contour)
# Filter Contours By Area.
if area is True and ret is True:
area = cv2.contourArea(contour)
if area < min_area or area >= max_area:
ret = False
# Filter contours by inertia.
if inertia is True and ret is True:
denominator = math.sqrt((2*moments['m11'])**2
+ (moments['m20']-moments['m02'])**2)
epsilon = 0.01
ratio = 0.0
if denominator > epsilon:
cosmin = (moments['m20']-moments['m02']) / denominator
sinmin = 2*moments['m11'] / denominator
cosmax = -cosmin
sinmax = -sinmin
imin = (0.5*(moments['m20']+moments['m02'])
- 0.5*(moments['m20']-moments['m02'])*cosmin
- moments['m11']*sinmin)
imax = (0.5*(moments['m20']+moments['m02'])
- 0.5*(moments['m20']-moments['m02'])*cosmax
- moments['m11']*sinmax)
ratio = imin / imax
else:
ratio = 1
if ratio < min_inertia_ratio or ratio >= max_inertia_ratio:
ret = False
#center.confidence = ratio * ratio;
return ret
## ========================================================
def detect_sections(frame, frame_number):
"""Finds sections that meet the user-defined criteria.
Args:
frame: a frame from a cv2.video_reader object
frame_number: number of the frame in the video sequence
Returns:
sections: a list of Section objects
"""
# Convert to single channel for blob detection if necessary.
if len(frame.shape) > 2:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Initiate and empty list of sections.
sections = []
# 1. Find the contours.
contours = find_contours(frame)
# 2. Filter the contours.
for contour in contours:
if keep_contour(contour) is False:
continue
# If contour passes thresholds, convert it to a Section.
section = Section(points=contour, birth=frame_number)
# 3. Add the section to sections list.
sections.append(section)
return sections
|
[
"citrusvanilla@gmail.com"
] |
citrusvanilla@gmail.com
|
736edcbb3ecab2c689398ce64245242a76f51cc6
|
b024bcd4c8363a45fb0cb884e03701dcaf3c3a89
|
/Music Program Only/P4.py
|
70f072792fb6dc4d4f43b5e0d922e5dc6020b730
|
[] |
no_license
|
KylerM-23/LED-Music-Commission
|
f42768bf7d70e5494672e962c532aef7f67022c4
|
841459a962e3836940b728e75c6c69977ae6dbfe
|
refs/heads/main
| 2023-03-28T01:31:59.712748
| 2021-03-18T04:56:32
| 2021-03-18T04:56:32
| 348,921,901
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,173
|
py
|
import pygame
import time
import datetime as d
import random as R
from tkinter import *
from timeit import default_timer as timer
global NoMusic, NoLight, Retro, Background_Color
Background_Color = '#ffe603'
class Song:
def __init__(self,Song,AlbumArt = "1"):
self.Song = Song
self.AlbumArt = AlbumArt
def updateArt (self, Art):
self.AlbumArt = Art
def getSong(self):
return self.Song
def getArt(self):
return self.AlbumArt
def thedelay(fps):
x = timer()
while timer() - x < fps:
pass
return
def playSong(Song):
pygame.mixer.music.load(Song)
pygame.mixer.music.play()
def songPick(TrackList,ArtList):
Track = []
for i in TrackList:
x=Song(i.rstrip())
Track.append(x)
j = 0
for i in ArtList:
y = Track[j]
j = j + 1
y.updateArt(i.rstrip())
return Track
def RiseJukeBox(F,G,x):
TrackList = open(F,"r")
ArtList = open(G,"r")
Track = songPick(TrackList,ArtList)
sn= R.randint(0,len(Track)-1)
Song = Track[sn]
playSong(Song.getSong())
TrackList.close()
titileSong = Song.getSong()[x:-4]
titileSong = titileSong.replace("_"," ")
return Song.getArt(), titileSong
def pickFrameYK(i):
delay = .041
if i<=295:
j=i
if (j<10):
fileName = "Pictures/MidnightChannels/Yukiko/1/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Yukiko/1/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Yukiko/1/frame_"+ str(j)+"_delay-0.05s.png"
elif 557>=i>=296:
j=i-296
if (j<10):
fileName = "Pictures/MidnightChannels/Yukiko/2/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Yukiko/2/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Yukiko/2/frame_"+ str(j)+"_delay-0.05s.png"
elif i>=558:
j=i-558
if (j<10):
fileName = "Pictures/MidnightChannels/Yukiko/3/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Yukiko/3/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Yukiko/3/frame_"+ str(j)+"_delay-0.05s.png"
return fileName,delay
def pickFrameR(i):
delay = .03
if i<=239:
j=i
#frame_000_delay-0.04s
if (j<10):
fileName = "Pictures/MidnightChannels/Rise/1/frame_00"+ str(i)+"_delay-0.04s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Rise/1/frame_0"+ str(i)+"_delay-0.04s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Rise/1/frame_"+ str(i)+"_delay-0.04s.png"
elif 485>=i>239:
j=i-240
if j == 0:
delay = .03
#frame_000_delay-0.04s
if (j<10):
fileName = "Pictures/MidnightChannels/Rise/2/frame_00"+ str(j)+"_delay-0.04s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Rise/2/frame_0"+ str(j)+"_delay-0.04s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Rise/2/frame_"+ str(j)+"_delay-0.04s.png"
elif 700>=i>485:
j=i-486
if j == 0:
delay = .03
#frame_000_delay-0.04s
if (j<10):
fileName = "Pictures/MidnightChannels/Rise/3/frame_00"+ str(j)+"_delay-0.04s.gif"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Rise/3/frame_0"+ str(j)+"_delay-0.04s.gif"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Rise/3/frame_"+ str(j)+"_delay-0.04s.gif"
elif i>700:
j=i-701
#frame_000_delay-0.04s
if (j<10):
fileName = "Pictures/MidnightChannels/Rise/4/frame_00"+ str(j)+"_delay-0.04s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Rise/4/frame_0"+ str(j)+"_delay-0.04s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Rise/4/frame_"+ str(j)+"_delay-0.04s.png"
return fileName,delay
def pickFrameK(i):
delay = .04
if i<=270:
j=i
#frame_000_delay-0.04s
if (j<10):
fileName = "Pictures/MidnightChannels/Kanji/1/frame_00"+ str(i)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Kanji/1/frame_0"+ str(i)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Kanji/1/frame_"+ str(i)+"_delay-0.05s.png"
elif 521>=i>270:
j=i-271
#frame_000_delay-0.04s
if (j<10):
fileName = "Pictures/MidnightChannels/Kanji/2/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Kanji/2/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Kanji/2/frame_"+ str(j)+"_delay-0.05s.png"
elif i>521:
j=i-522
#frame_000_delay-0.04s
if (j<10):
fileName = "Pictures/MidnightChannels/Kanji/3/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Kanji/3/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Kanji/3/frame_"+ str(j)+"_delay-0.05s.png"
return fileName,delay
def pickFrameN(i):
delay = .041
if i<=282:
j=i
if (j<10):
fileName = "Pictures/MidnightChannels/Naoto/1/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Naoto/1/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Naoto/1/frame_"+ str(j)+"_delay-0.05s.png"
elif 576>=i>282:
j=i-282
if (j<10):
fileName = "Pictures/MidnightChannels/Naoto/2/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Naoto/2/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Naoto/2/frame_"+ str(j)+"_delay-0.05s.png"
elif i>576:
j=i-576
if (j<10):
fileName = "Pictures/MidnightChannels/Naoto/3/frame_00"+ str(j)+"_delay-0.05s.png"
elif (100>j>=10):
fileName = "Pictures/MidnightChannels/Naoto/3/frame_0"+ str(j)+"_delay-0.05s.png"
elif (j>=100):
fileName = "Pictures/MidnightChannels/Naoto/3/frame_"+ str(j)+"_delay-0.05s.png"
return fileName,delay
def playYukikoMC():
global SongPlayer, AlbumArt, fileName, window
playSong("P4/Yukiko.wav")
i = 0
while i < (677):
fileName, delay = pickFrameYK(i)
AlbumArt = PhotoImage(file = fileName)
SongPlayer.configure(image = AlbumArt)
window.update_idletasks()
thedelay(delay)
i = i +1
def playKanjiMC():
global SongPlayer, AlbumArt, fileName, window
playSong("P4/Kanji.wav")
i = 1
while i < (763):
fileName, delay = pickFrameK(i)
AlbumArt = PhotoImage(file = fileName)
SongPlayer.configure(image = AlbumArt)
window.update_idletasks()
thedelay(delay)
i = i +1
def playRiseMC():
global SongPlayer, AlbumArt, fileName, window
playSong("P4/Rise.wav")
i = 1
while i < (878):
fileName, delay = pickFrameR(i)
AlbumArt = PhotoImage(file = fileName)
SongPlayer.configure(image = AlbumArt)
window.update_idletasks()
thedelay(delay)
i = i +1
def playNaotoMC():
global SongPlayer, AlbumArt, fileName, window
playSong("P4/Naoto.wav")
i = 1
while i < (874):
fileName, delay = pickFrameN(i)
AlbumArt = PhotoImage(file = fileName)
SongPlayer.configure(image = AlbumArt)
window.update_idletasks()
thedelay(delay)
i = i +1
def OpeningMC():
x= R.randint(0,4)
x=0
if x==0:
playYukikoMC()
elif x==1:
playKanjiMC()
elif x==2:
playRiseMC()
elif x==3:
playNaotoMC()
def MidnightChannel(strip):
global MCGIF
if NoLight == False:
colorWipe(strip)
if MCGIF == False:
OpeningMC()
MCGIF == True
def P4(NoMusic1, NoLight1, Retro1, LL):
global NoMusic, NoLight, Retro, window, AlbumArt, IsDarkHour, MusicLabel, Background_Color, SongPlayer, fps,MCGIF
NoMusic = NoMusic1
NoLight = NoLight1
Retro = Retro1
strip =0
if NoMusic == False:
window= Tk()
window.geometry('600x660')
window.title("Music Player")
window['background']= Background_Color
AlbumArt = PhotoImage(file = r"Pictures/AlbumArt/Default_Album_Art.png")
SongPlayer = Button(window, image=AlbumArt , compound="top")
SongPlayer.place(x = 0 , y =0 )
MusicLabel = Label(window, text = "Temp Text",font=("Helvetica", 25),anchor=W, justify=LEFT)
MusicLabel['background']= Background_Color
MusicLabel.place(x = 0 , y =615)
print ('Press Ctrl-C to quit.')
pygame.mixer.init()
try:
n= 0
x = 3
F = r"P4/P4Songs.txt"
G = r"P4/P4Art.txt"
if Retro:
x = 9
F = r"P4/Retro/P4Songs.txt"
G = r"P4/Retro/P4Art.txt"
while(True):
weather = 'Rain'
if pygame.mixer.music.get_busy() == False and NoMusic != True:
Art, SongPlaying = RiseJukeBox(F,G,x)
img = Art
other =PhotoImage(file = img)
SongPlayer.configure(image = other)
if len(SongPlaying) <=36:
MusicLabel.configure(font=("Helvetica", 25))
if len(SongPlaying) >36:
MusicLabel.configure(font=("Helvetica", 20))
MusicLabel.configure(text = SongPlaying)
window.update_idletasks()
dt = str(d.datetime.now())
if dt[11:19] == "00:00:00" and weather == 'Rain':
window.geometry('600x340')
MCGIF =False
pygame.mixer.music.fadeout(500)
MidnightChannel(strip)
window.geometry('600x660')
if NoMusic == False:
window.update()
except KeyboardInterrupt:
return
|
[
"noreply@github.com"
] |
KylerM-23.noreply@github.com
|
3a571a7922f6154a27732321583724b7d99e2e7c
|
d2c00f4aa714a8dcc516a5adb3818ce3762e4d7b
|
/docs/conf.py
|
38847adf1240bf30490160a43724ac78124f17be
|
[] |
permissive
|
SeppPenner/job-runner
|
3889547111d9896c698f84d2497dfa7b817efd60
|
427a29b21be98e8dfadc400c03d4bb3422922a46
|
refs/heads/master
| 2021-01-13T05:28:48.589912
| 2020-03-08T16:52:28
| 2020-03-08T16:52:28
| 86,578,978
| 0
| 0
|
BSD-3-Clause
| 2020-03-08T16:52:29
| 2017-03-29T12:23:42
|
Python
|
UTF-8
|
Python
| false
| false
| 8,072
|
py
|
# -*- coding: utf-8 -*-
#
# Job-Runner documentation build configuration file, created by
# sphinx-quickstart on Mon Nov 5 10:14:03 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'job_runner.settings.env.testing'
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Job-Runner'
copyright = u'2012, Spil Games'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
import job_runner
# The short X.Y version.
version = job_runner.__version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
html_theme = 'default'
else:
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Job-Runnerdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Job-Runner.tex', u'Job-Runner Documentation',
u'Spil Games', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'job-runner', u'Job-Runner Documentation',
[u'Spil Games'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Job-Runner', u'Job-Runner Documentation',
u'Spil Games', 'Job-Runner', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
intersphinx_mapping = {
'http://docs.python.org/': None,
}
|
[
"info@brocaar.com"
] |
info@brocaar.com
|
22439d9bd7b7bcc61cc6fd3bb4fa157fc2df4732
|
35e515305b4a16e86a486b55fc0ba4cece7a51b8
|
/site/application.py
|
90cc16c1e55ffe05a0448077c0a584149efd2415
|
[] |
no_license
|
SamKChang/ML_server
|
f07df229516e3b4b49c31a979c0c891f2a576208
|
7a4a92cdbe06ec16660190fb05797b53908293b9
|
refs/heads/master
| 2021-01-10T15:34:19.149530
| 2016-10-24T22:18:27
| 2016-10-24T22:18:27
| 46,265,289
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,401
|
py
|
#!/usr/bin/python
from flask import Flask
from flask import request
from flask import render_template
app = Flask(__name__)
import ml_engine.converter as converter
import ml_engine.interface as interface
import ml_engine.dynamic_plot as pt
from ml_engine.ml_core.List2Energies import loadMachine
m = loadMachine(True)
@app.route('/')
def home():
return render_template("home.html", mode=0)
@app.route('/', methods=['POST'])
def inpProcessor():
if 'crystal' in request.form:
mode = 0
# input processing
cyl = converter.stringConverter(request.form['crystal'])
# output processing
out = interface.outputString([cyl], m)
elif 'target' not in request.form:
mode = 1
# input processing
atoms = []
strList = ['a1', 'a2', 'a3', 'a4']
for i in range(4):
flag = strList[i]
elist = list(set(
converter.stringConverter(request.form[flag])))
if len(elist) > 0:
atoms.append(
list(set(converter.stringConverter(request.form[flag])))
)
cylList = converter.groupConstructor(atoms)
# output processing
out = interface.outputString(cylList, m)
else:
out = [['\nEvery atom site must be specified.']]
else:
mode = 2
# input processing
atoms = []
strList = ['a1', 'a2', 'a3', 'a4']
for i in range(4):
flag = strList[i]
atoms.append(
list(set(converter.stringConverter(request.form[flag])))
)
cylList = atoms
target = request.form['target']
popsize = request.form['ml_pop']
step = request.form['ml_step']
out = interface.optimizer(target, popsize, cylList, step, m)
out = [['\nOptimizing crystals with\n' +\
'target value: %.2f\n' % float(target) +\
'total generations: %d\n' % int(step) +\
'population size: %d\n' % int(popsize)]]
return render_template("home.html", result=out, mode=mode)
from ml_engine.dynamic_plot import entries as entries
import json
from flask import Response
@app.route('/data', methods=['GET', 'OPTIONS', 'POST'])
@pt.crossdomain(origin="*", methods=['GET', 'POST'])
def hello_world():
#entries
#from ml_engine.dynamic_plot import entries as entries
try:
modified_since = float(
request.headers.get('If-Modified-Since'))
except TypeError:
modified_since = 0
new_entries = [e for e in entries\
if e.creation > modified_since]
js = json.dumps({'x':[e.x for e in new_entries],
'y':[e.y for e in new_entries],
'cylStr':[tuple(e.cylStr) for e in new_entries]})
resp = Response(js, status=200, mimetype='application/json')
print "resp:",
print resp
if new_entries:
resp.headers['Last-Modified'] = new_entries[-1].creation
elif modified_since:
resp.headers['Last-Modified'] = modified_since
return resp
@app.route('/done')
def done(out_cylStrLIst):
print " yoyo from done"
@app.route('/date')
def date():
return render_template('date.html')
@app.route('/add')
def test():
return render_template('add.html')
@app.route('/ml_front')
def ml():
return render_template('ml_server.html')
@app.route('/material_project')
def material():
return render_template('materialsProject.html')
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=8080)
|
[
"samio54321@gmail.com"
] |
samio54321@gmail.com
|
b8c9b84a7ae7edcf1671719803b809fb5ae1e4b2
|
a4534cf894b050f546cfd7b303ed8a41778162f5
|
/devel/.private/ur_driver/lib/python2.7/dist-packages/ur_driver/__init__.py
|
a4ae47df663867a10d95227448ff4ea6fae5accb
|
[] |
no_license
|
AdamRest/ur5_motion_test
|
5acccf961c22d0702ae20c9be86af51491d63d3b
|
7a63af0a00473cdcb4188f747fb26a122a27f712
|
refs/heads/master
| 2020-05-31T13:59:47.044352
| 2019-06-05T03:22:46
| 2019-06-05T03:22:46
| 190,317,890
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,031
|
py
|
# -*- coding: utf-8 -*-
# generated from catkin/cmake/template/__init__.py.in
# keep symbol table as clean as possible by deleting all unnecessary symbols
from os import path as os_path
from sys import path as sys_path
from pkgutil import extend_path
__extended_path = "/home/amr/ur_ws/src/universal_robot/ur_driver/src".split(";")
for p in reversed(__extended_path):
sys_path.insert(0, p)
del p
del sys_path
__path__ = extend_path(__path__, __name__)
del extend_path
__execfiles = []
for p in __extended_path:
src_init_file = os_path.join(p, __name__ + '.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
else:
src_init_file = os_path.join(p, __name__, '__init__.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
del src_init_file
del p
del os_path
del __extended_path
for __execfile in __execfiles:
with open(__execfile, 'r') as __fh:
exec(__fh.read())
del __fh
del __execfile
del __execfiles
|
[
"adam.rest@gmail.com"
] |
adam.rest@gmail.com
|
398d3fd1f82fef844c5ae4315dd6a37ac56bc1e9
|
cef8d6a42c4baa4344ce805636249a2038257659
|
/Game/extended_groups.py
|
18675a36f5ed5974381de017a771ed251869f0be
|
[] |
no_license
|
Denis9619/2D-Game
|
47453f7ca5fc7c0b50b462e749806733ddda45f5
|
d289e79be50386c356e3e3b8f3dff7c555a394f9
|
refs/heads/master
| 2022-11-27T12:45:03.889880
| 2020-08-06T09:26:15
| 2020-08-06T09:26:15
| 285,528,130
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 897
|
py
|
from pygame.sprite import LayeredUpdates as _LayeredUpdates
from pygame.sprite import Sprite as _Sprite
class LayeredUpdates(_LayeredUpdates):
def update(self, *args, **kwargs):
for s in self.sprites():
s.update(*args, **kwargs)
class Sprite(_Sprite):
def update(self, *args, **kwargs):
pass
class MetaLayeredUpdates(LayeredUpdates):
def addLU(self, other, start_layer):
for layer in other.layers():
self.add(*other.get_sprites_from_layer(layer),
layer = start_layer + layer )
try:
other.on_added_to_LU
except AttributeError:
return
other.on_added_to_LU(self)
addLA = addLU
def removeLU(self, other):
self.remove( other.sprites() )
def on_added_to_LU(self, where):
pass
|
[
"noreply@github.com"
] |
Denis9619.noreply@github.com
|
169e861874f7a1c08928383d276e5fc99f0d2da3
|
8f41891ad2923ba794403e7a43fbe8ca117f8e88
|
/2018/trouble_sort.py
|
89167ecf0dc5b0dd56b5027fdae321faf7360d66
|
[] |
no_license
|
ASHIJANKEN/Google-Code-Jam
|
eda883d172aba3dd586e640950a39428402835bb
|
b25045c651e0699c0ee1f10c935498655d5feb4b
|
refs/heads/master
| 2020-03-09T02:28:18.299563
| 2018-04-07T15:28:43
| 2018-04-07T15:28:43
| 128,540,515
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 718
|
py
|
def detect_error(list):
error = 'OK';
for i in range(len(list) - 1):
if list[i] > list[i + 1]:
error = i
break;
return error
t = int(input('')) # read a line with a single integer
cases = []
for case in range(t):
cases.append([int(input('')), input('')])
cases[case][1] = [int(s) for s in cases[case][1].split(" ")]
for case in range(len(cases)):
done = False
while not done:
done = True
for i in range(cases[case][0]-2):
if cases[case][1][i] > cases[case][1][i+2]:
done = False
cases[case][1][i], cases[case][1][i+2] = cases[case][1][i+2], cases[case][1][i]
print("Case #{}: {}".format(case+1, detect_error(cases[case][1])))
|
[
"24617794+ASHIJANKEN@users.noreply.github.com"
] |
24617794+ASHIJANKEN@users.noreply.github.com
|
f4609cd0ac288396c4feccbdcaedb629f9f7282f
|
768cc956563df5020d52860b2ebff368cef9cc9e
|
/exercicio_05.py
|
79ad1d10554c44d5d38217e708af028a72644818
|
[] |
no_license
|
diegolisboadev/Curso_Selenium_Python
|
9ef365d779afad0117c1d777a34c8b7848a89db4
|
f485dc32548ea9d21c9d63efd39b260f06ccfb16
|
refs/heads/master
| 2023-02-09T19:16:54.425059
| 2021-01-11T03:00:09
| 2021-01-11T03:00:09
| 296,327,837
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,016
|
py
|
from selenium.webdriver import Firefox
from time import sleep
browser = Firefox(executable_path='./geckodriver')
url = "https://selenium.dunossauro.live/exercicio_05.html"
browser.get(url)
def preenche_forms(form, nome, senha):
"""Preenche os forms 4 dinamicamente
Args:
form ([type]): [Nome do form a ser preenchido]
text ([type]): [Texto a ser passado para o input]
"""
browser.find_element_by_css_selector(f'.form-{form} input[name="nome"]').send_keys(nome)
browser.find_element_by_css_selector(f'.form-{form} input[name="senha"]').send_keys(senha)
browser.find_element_by_css_selector(f'.form-{form} input[name="{form}"]').click()
# Code
sleep(3)
# Preenche o primeiro formulário
preenche_forms('l0c0', 'diego', '123')
# Preenche o segundo formulário
preenche_forms('l0c1', 'lucio', '345')
# Preenche terceito formulário
preenche_forms('l1c0', 'mario', '678')
# Preenche quarto form
preenche_forms('l1c1', 'paula', '91011')
|
[
"diego.lisboa.pires@hotmail.com"
] |
diego.lisboa.pires@hotmail.com
|
a85a3b68631c00ceb1ac07854805960de3e1cb94
|
23ba3fb157c70e5f0a139ce6378e979272f01fc3
|
/Extracurricular.py
|
67322ea1e5ddcb226e8b3dfc4576c481559ccd0b
|
[] |
no_license
|
NguyenHuuHieuThien/ERP-Nhom5
|
a90caa30294296fbc6bb2c8a9d50a84cb79da0ff
|
99cf060553482cd36df31b29572aee383a24193c
|
refs/heads/main
| 2023-09-04T10:07:34.609415
| 2021-11-05T07:16:04
| 2021-11-05T07:16:04
| 419,258,048
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 977
|
py
|
from odoo import fields, models,api,_
class educate(models.Model):
_name = 'mylib.extracurricular'
_description = 'Extracurricular'
# id = fields.Char(string='Mã Dao Tao', required=True, copy=False, readonly=True,
# default=lambda seft: _('New'))
name = fields.Text("Tên Hoạt Động")
time_happend = fields.Date("Thời Gian Diễn Ra")
state = fields.Selection(
[('sapdienra','Sắp diễn ra'), ('dangdienra','Đang diễn ra'), ('dadienra','Đã diễn ra')],"Trạng Thái")
description = fields.Text()
address = fields.Text("Địa điểm")
person = fields.Many2many('hr.employee', string="Nhân viên tham gia")
# @api.model
# def create(self, vals):
# if vals.get('id', ('New')) == ('New'):
# vals['id'] = self.env['ir.sequence'].next_by_code('educate.id') or _('New')
# res = super(educate, self).create(vals)
# return res
|
[
"noreply@github.com"
] |
NguyenHuuHieuThien.noreply@github.com
|
612abedc5a77af6f29bb30fa353f5514070a1da6
|
14c585862b15968f68d7e5563e3c25335e232b9b
|
/item_functions.py
|
bcde0763a78344b85ec9451bfde3204ec8b0c516
|
[] |
no_license
|
LeoJMaro/Joellike
|
210cfaaaccfeaab82fe72294486c44dcf5c3e540
|
337c18b2fddef5e899b3421bae07ceab7ed97b14
|
refs/heads/master
| 2020-04-10T02:42:13.889889
| 2018-12-21T00:04:28
| 2019-01-04T00:25:11
| 160,750,451
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,561
|
py
|
import libtcodpy as libtcod
from components.ai import ConfusedMonster
from game_messages import Message
def heal(*args, **kwargs):
entity = args[0]
amount = kwargs.get('amount')
results = []
if entity.fighter.hp == entity.fighter.max_hp:
results.append({'consumed': False, 'message': Message('You are already at full health', libtcod.yellow)})
else:
entity.fighter.heal(amount)
results.append({'consumed': True, 'message': Message('Your wounds start to feel better!', libtcod.green)})
return results
def cast_lightning(*args, **kwargs):
caster = args[0]
entities = kwargs.get('entities')
fov_map = kwargs.get('fov_map')
damage = kwargs.get('damage')
maximum_range = kwargs.get('maximum_range')
results = []
target = None
closest_distance = maximum_range + 1
for entity in entities:
if entity.fighter and entity != caster and libtcod.map_is_in_fov(fov_map, entity.x, entity.y):
distance = caster.distance_to(entity)
if distance < closest_distance:
target = entity
closest_distance = distance
if target:
results.append({'consumed': True, 'target': target, 'message': Message('A lighting bolt strikes the {0} with a loud thunder! The damage is {1}'.format(target.name, damage))})
results.extend(target.fighter.take_damage(damage))
else:
results.append({'consumed': False, 'target': None, 'message': Message('No enemy is close enough to strike.', libtcod.red)})
return results
def cast_fireball(*args, **kwargs):
entities = kwargs.get('entities')
fov_map = kwargs.get('fov_map')
damage = kwargs.get('damage')
radius = kwargs.get('radius')
target_x = kwargs.get('target_x')
target_y = kwargs.get('target_y')
results = []
if not libtcod.map_is_in_fov(fov_map, target_x, target_y):
results.append({'consumed': False, 'message': Message('You cannot target a tile outside your field of view.', libtcod.yellow)})
return results
results.append({'consumed': True, 'message': Message('The fireball explodes, burning everything within {0} tiles!'.format(radius), libtcod.orange)})
for entity in entities:
if entity.distance(target_x, target_y) <= radius and entity.fighter:
results.append({'message': Message('The {0} gets burned for {1} hit points.'.format(entity.name, damage), libtcod.orange)})
results.extend(entity.fighter.take_damage(damage))
return results
def cast_confuse(*args, **kwargs):
entities = kwargs.get('entities')
fov_map = kwargs.get('fov_map')
target_x = kwargs.get('target_x')
target_y = kwargs.get('target_y')
results = []
if not libtcod.map_is_in_fov(fov_map, target_x, target_y):
results.append({'consumed': False, 'message': Message('You cannot target a tile outside your field of view.', libtcod.yellow)})
return results
for entity in entities:
if entity.x == target_x and entity.y == target_y and entity.ai:
confused_ai = ConfusedMonster(entity.ai, 10)
confused_ai.owner = entity
entity.ai = confused_ai
results.append({'consumed': True, 'message': Message('The eyes of the {0} look vacant, as he starts to stumble around!'.format(entity.name), libtcod.light_green)})
break
else:
results.append({'consumed': False, 'message': Message('There is no targetable enemy at that location.', libtcod.yellow)})
return results
|
[
"leojmaro@gmail.com"
] |
leojmaro@gmail.com
|
5c37acd6d27ded34dbc88264ec5fab86c6dc457e
|
86226fd15a55a9ceded9ec641eddd0f20bc61017
|
/env/bin/wheel
|
559d11371af46dcce33bab7f17224bd1f8d65966
|
[] |
no_license
|
zeetec20/DjangoChannels-Chat
|
c0deb3bff8d9a8cbd28e5a576679c3cbea479634
|
7c781b3f74b1bffe5aef04658090ad1a754a087b
|
refs/heads/master
| 2020-09-21T05:37:31.211455
| 2019-11-28T17:17:46
| 2019-11-28T17:17:46
| 224,695,498
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 238
|
#!/home/zeetec/django/djangoChat/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"jusles363@gmail.com"
] |
jusles363@gmail.com
|
|
a71b7414eea9b212150062d871507aaf69ad3949
|
f725139a611f58a34e8838a5c3a26f443c9f4725
|
/python_lxml/lxml_xpath.py
|
ba4628a71ab73b634b4a2680ab795ff7a48118b7
|
[] |
no_license
|
zuiwoshachangjunmoxiao/pythonStudy_spider
|
46045ad2d62af0310c2fa35b911465abf9316f09
|
215adc0c3bda7c0f9fa39b2a78896c39f31d612d
|
refs/heads/master
| 2020-03-26T16:29:14.335156
| 2018-10-23T01:59:13
| 2018-10-23T01:59:13
| 145,104,632
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,006
|
py
|
# -*- coding: utf-8 -*-
# @Time : 2018/10/22 14:57
# @Author : 银河以北
# @Email : smilegks@163.com
# @Introduction :
'''
python + lxml 爬虫,以豆瓣图书为例,爬取豆瓣图书的信息。
Python 3.6.5
安装 lxml 库,安装有点复杂(可能出错)。我使用的Anaconda3,自带lxml 4.3.1,不需要自己安装,推荐使用。
'''
import requests
from lxml import etree
# 要爬取的URL
url = "https://book.douban.com/tag/%E5%8E%86%E5%8F%B2"
# 1、下载网页
# 使用requests下载网页,然后用lxml解析。
r = requests.get(url)
# 注意使用r.content,使用r.text会报错。r.text返回的是Unicode型数据,r.content返回的是bytes型数据。
response = r.content
# 2、解析网页
'''
etree.fromstring() # 用于解析字符串
etree.HTML() # 用于解析HTML对象
etree.XML() # 用于解析XML对象
etree.parse() # 用于解析文件类型的对象,parse{帕斯]=解析
'''
htmlElement = etree.HTML(response) # 返回值为对象
# 构建 DOM 树。 使用tostring decode,格式化html代码。
html = etree.tostring(htmlElement, encoding='utf-8').decode('utf-8') # 返回值为str
# 3、定位
# 图书信息列表
bookinfo_list = htmlElement.xpath('//*[@id="subject_list"]/ul/li') # 返回值是list,list中元素是一个个对象
'''
str.strip() 去掉行首行尾的空白字符,包括空格、\t、\r、\n
str.lstrip() 去掉行首(str左侧)的空白字符,包括空格、\t、\r、\n
str.rstrip() 去掉行首(str右侧)的空白字符,包括空格、\t、\r、\n
'''
for book in bookinfo_list:
# 图书名称
# 图书名称包括主标题和副标题,同时显示主标题和副标题需要处理一下。
#book_name = book.xpath('.//div[2]/h2/a/text()')[0].strip()
book_a = book.xpath('.//div[2]/h2/a')
children = book_a[0].getchildren()
if len(children):
# 如果 a 标签的子节点存在,图书名称就加上子节点中的副标题
book_name = book_a[0].text.strip() + children[0].text.strip()
else:
book_name = book_a[0].text.strip()
# 图书详情URL
bookinfo_url = book.xpath('.//div[2]/h2/a/@href')[0]
# 基本信息
book_baseinfo = book.xpath('.//div[2]/div[1]/text()')[0].strip()
# 评分
rating = book.xpath('.//div[2]/div[2]/span[2]/text()')[0]
# 评论人数
rate_nums = book.xpath('.//div[2]/div[2]/span[3]/text()')[0].strip().replace('人评价','').replace('(','').replace(')','')
# 封面URL
fengmian_url = book.xpath('.//div[1]/a/img/@src')[0]
# 图书简介
#book_instr = book.xpath('.//div[2]/p/text()')[0]
# 打印爬取的信息
print("{},{},{},{},{},{}".format(book_name, bookinfo_url, book_baseinfo, rating, rate_nums, fengmian_url))
# 将数据写入csv
with open("data\douban_book.csv", "a", encoding='utf-8') as f:
f.write("{},{},{},{},{},{}\n".format(book_name, bookinfo_url, book_baseinfo, rating, rate_nums, fengmian_url))
|
[
"smilegks@163.com"
] |
smilegks@163.com
|
e685a14784fe36cb845908888c5f98bf5088f329
|
7b3b09bf28350ec4afa8c4dbbd2ed6adac12370b
|
/learnPythonExample/pythonGrammar/08-通用装饰器.py
|
12959935611489967ad5d6cfb9ce5985a826d4cc
|
[] |
no_license
|
Armyz/learnPython
|
068046e38ce7b9887e01acea3c47156f75e06487
|
88a529c5117219af57e8af491d1702676c218191
|
refs/heads/master
| 2021-05-23T05:09:14.301237
| 2017-12-11T15:10:31
| 2017-12-11T15:10:31
| 95,230,350
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,134
|
py
|
# -*- coding:utf-8 -*-
"""
装饰器相当于执行@xxx时将下面的函数名作为参数传送给了装饰器函数
如下面的例子:
@DecoratorFunc
def func2():
xxx
相当于func2 = DecoratorFunc(func2)
"""
from time import (ctime,sleep)
def DecoratorFunc(funcName):
print("执行装饰器")
#为了保证装饰的函函数有参数的情况,应该使用*args和**kwargs保存参数
#下面为通用的函数装饰,包括参数和返回值
def inner_func(*args,**kwargs):
print("%s函数调用时间为:%s"%(funcName.__name__,ctime()))
ret = funcName(*args,**kwargs)
return ret #返回原函数的返回值
print("装饰器执行完毕")
return inner_func
@DecoratorFunc
def func1():
print("这是无参数的函数")
@DecoratorFunc
def func2(a,b):
print("含有两个参数:%d %d"%(a,b))
@DecoratorFunc
def func3(a,b,c,d):
print("含有多个个参数:%d %d %d %d"%(a,b,c,d))
@DecoratorFunc
def func4(a):
print("参数:%d"%a)
return "这是一个函数返回测试"
func1()
func2(11,22)
func3(11,22,33,44)
ret =func4(100)
print(ret)
|
[
"armyz@foxmail.com"
] |
armyz@foxmail.com
|
ff0cadfe61595fb01066cd48cbc555c020961a96
|
935195fdf6cc1ef7252d096994a32bd449cb0635
|
/ask_parse.py
|
0e50b1c3131b12bae447a04120ddf596d77c0bb6
|
[] |
no_license
|
ZZWENG/WikiQA
|
d8bc8b1063b8ffaf0ef64552dc66ed8989af31fa
|
c8c238cd3d77336791b2eb4fabcbaf882628e2e7
|
refs/heads/master
| 2021-05-31T04:29:49.573041
| 2016-04-19T04:46:26
| 2016-04-19T04:46:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,605
|
py
|
__author__ = 'laceyliu'
import re
import stanford_utils
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.corpus import wordnet as wn
import tree_parser
bes = ["am", "are", "was", "were", "have", "has", "is"]
# get basic form for verb (set pos = 'v' for verb)
def basicForm(word, pos):
exceptions = wn._exception_map[pos]
if word in exceptions:
return exceptions[word][0]
else:
return WordNetLemmatizer().lemmatize(word, pos)
ner_tagger = stanford_utils.new_NERtagger()
def get_howmany(tree):
num = ""
obj = ""
question = get_binary(tree)
nps = tree_parser.get_phrases(tree, "NP")
for np in nps:
for i in xrange(len(np)):
if i+1 < len(np) and np[i].label() == "CD" and np[i+1].label() == "NNS":
num = " ".join(np[i].leaves())
obj = " ".join(np[i+1].leaves())
break
if len(num) > 0 and len(obj)>0:
break
return "How many "+obj+ " "+question.replace(num+" ", "").replace(obj, "").strip().rstrip(',').rstrip('.') +"?"
def get_where(tree):
where = ""
question = get_binary(tree)
pps = tree_parser.get_phrases(tree, "PP")
for pp in pps:
ner_pp = ner_tagger.tag(pp.leaves())
for (word, tag) in ner_pp:
if tag == "LOCATION" or tag == "ORGANIZATION":
where = " ".join(pp.leaves())
break
return "where "+question.replace(where, "").strip().rstrip(',').rstrip('.') +"?"
def get_when(tree):
when = ""
question = get_binary(tree)
pps = tree_parser.get_phrases(tree, "PP", sort=True, reversed = True)
for pp in pps:
ner_pp = ner_tagger.tag(pp.leaves())
for (word, tag) in ner_pp:
if tag == "TIME" or tag == "DATE":
when = " ".join(pp.leaves())
break
return "when "+question.replace(when, "").rstrip(',').rstrip('.') + "?"
def get_binary(tree):
question = ""
verbP = re.compile("^V[BP].{0,1}$")
md = ""
mbody = ""
for node in tree:
if node.label() == "NP" and len(mbody) == 0:
for sub in node:
if sub.label() == "DT" or sub.label() == "PRP" or sub.label() == "IN":
first = " ".join(sub.leaves()).lower()
if first == "i":
first = first.upper()
mbody += first
else:
mbody += " "+ " ".join(sub.leaves())
mbody += " "
# mbody = " ".join([leave.lower() if node.label() == "DT" else leave for leave in node.leaves()])+" "
elif node.label() == "VP":
for i in xrange(len(node)):
if node[i].label() == "MD" or (node[i][0] in bes):
# if node[i].label() == "MD" or node[i][0] == "is" or (node[i][0] == "has" and i+1<len(node) and verbP.match(node[i+1].label())):
md = " ".join(node[i].leaves())
question = md + mbody + question[question.find(" "):]
break
elif node[i].label() == "VBZ":
question = "does " + mbody.lstrip() + question[question.find(" "):]
for j in xrange(i, len(node)):
if node[j].label() == "VBZ":
node[j][0] = basicForm(node[j][0], 'v')
break
elif node[i].label() == "VBP":
question = "do " + mbody.lstrip() + question[question.find(" "):]
for j in xrange(i, len(node)):
if node[j].label() == "VBD":
node[j][0] = basicForm(node[j][0], 'v')
elif node[i].label() == "VBD":
if node[i][0] != "was" and node[i][0] != "were" and not (node[i][0] == "had" and i+1<len(node) and verbP.match(node[i+1].label())):
question = "did " + mbody.lstrip() + " " +question[question.find(" "):]
for j in xrange(i, len(node)):
if node[j].label() == "VBD":
node[j][0] = basicForm(node[j][0], 'v')
if md != "":
question += " ".join([leave for leave in node.leaves()]).replace(md, "")
else:
question += " ".join([leave for leave in node.leaves()])
return question.rstrip(',').rstrip('.')
# generate who question
def get_who(tree):
question = "who "
verbP = re.compile("^V[BP].{0,1}$")
for node in tree:
if node.label() != "NP":
if node.label() == "VP":
for i in xrange(len(node)):
if node[i].label() == "MD" or (node[i][0] in bes ):
break
elif node[i].label() == "VBZ":
# if node[i][0] != "is" and not (node[i][0] == "has" and i+1<len(node) and verbP.match(node[i+1].label())):
question = "who" + question[question.find(" "):]
# for j in xrange(i, len(node)):
# if node[j].label() == "VBZ":
# node[j][0] = basicForm(node[j][0], 'v')
break
elif node[i].label() == "VBD":
if node[i][0] != "was" and node[i][0] != "were" and not (node[i][0] == "had" and i+1<len(node) and verbP.match(node[i+1].label())):
question = "who" + question[question.find(" "):]
# for j in xrange(i, len(node)):
# if node[j].label() == "VBD":
# node[j][0] = basicForm(node[j][0], 'v')
question += " ".join([leave for leave in node.leaves()]) + " "
question = question[:len(question)-3]
return question.rstrip(',').rstrip('.') + "?"
# generate what question
def get_what(tree):
question = "what "
verbP = re.compile("^V[BP].{0,1}$")
for node in tree:
if node.label() != "NP":
if node.label() == "VP":
for i in xrange(len(node)):
if node[i].label() == "MD" or (node[i][0] in bes ):
break
elif node[i].label() == "VBZ":
#if node[i][0] != "is" and not (node[i][0] == "has" and i+1<len(node) and verbP.match(node[i+1].label())):
question = "what" + question[question.find(" "):]
# for j in xrange(i, len(node)):
# if node[j].label() == "VBZ":
# node[j][0] = basicForm(node[j][0], 'v')
# break
break
elif node[i].label() == "VBD":
if node[i][0] != "was" and node[i][0] != "were" and not (node[i][0] == "had" and i+1<len(node) and verbP.match(node[i+1].label())):
question = "what" + question[question.find(" "):]
# for j in xrange(i, len(node)):
# if node[j].label() == "VBD":
# node[j][0] = basicForm(node[j][0], 'v')
question += " ".join([leave for leave in node.leaves()]) + " "
question = question[:len(question)-3]
return question.rstrip(',').rstrip('.') + "?"
#
# import tree_parser
# test = "the 2nd century AD astronomer Ptolemy described 48 constellations."
# tree = tree_parser.sent_to_tree(test)
# print get_howmany(tree)
|
[
"xiliuhk@gmail.com"
] |
xiliuhk@gmail.com
|
c64d2d3e90b12fa6f541b0e91472aae542fb7c2a
|
920d6c6f23a6aa004c3a353c2dd1fffaff757c3d
|
/visualization.py
|
d8ac98e836f3ac9b3fc4ffe43d30796c80fd942f
|
[] |
no_license
|
bhakti-10/cognifront-capstone-project
|
13974dd21b8cfffab9dd52a46607b4d764008766
|
ebf9efcc1abd2772e1e45856522f44e23cd83732
|
refs/heads/master
| 2022-12-05T17:29:41.873192
| 2020-08-30T11:12:53
| 2020-08-30T11:12:53
| 291,452,254
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 788
|
py
|
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
def Plot(df):
# Creating the bar plot
df['online_order'].value_counts().plot(kind='bar')
plt.title('Does restaurant take online order?')
plt.show()
df['book_table'].value_counts().plot(kind='bar')
plt.title('Allowed to book a table?')
plt.show()
df['location'].value_counts().plot(kind='bar')
plt.title('Restaurant located at')
plt.show()
#Count Plot
sns.countplot(x="type", hue="city", data=df)
plt.show()
#Box Plot
sns.boxplot(x=df["cost"])
plt.show()
#Pie Chart
df['type'].value_counts().plot(kind='pie')
plt.show()
df['rest_type'].value_counts().plot(kind='pie')
plt.show()
|
[
"noreply@github.com"
] |
bhakti-10.noreply@github.com
|
dbf2529558a71d575f0da2b41d8365f734bfea8f
|
c5b9f0fabffb6b2d13c6e350c8187a922709ac60
|
/build/pmb2_rgbd_sensors/catkin_generated/generate_cached_setup.py
|
374ff6169f89261aed91836157f562dc2d36ca5d
|
[] |
no_license
|
MohamedEhabHafez/Sorting_Aruco_Markers
|
cae079fdce4a14561f5e092051771d299b06e789
|
0f820921c9f42b39867565441ed6ea108663ef6c
|
refs/heads/master
| 2020-12-09T02:43:00.731223
| 2020-01-15T17:31:29
| 2020-01-15T17:31:29
| 233,154,293
| 0
| 0
| null | 2020-10-13T18:46:44
| 2020-01-11T00:41:38
|
Makefile
|
UTF-8
|
Python
| false
| false
| 1,369
|
py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/kinetic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/kinetic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in "/home/mohamed/tiago_public_ws/devel;/opt/ros/kinetic".split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/home/mohamed/tiago_public_ws/devel/.private/pmb2_rgbd_sensors/env.sh')
output_filename = '/home/mohamed/tiago_public_ws/build/pmb2_rgbd_sensors/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
#print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
|
[
"mohamed@radiirobotics.com"
] |
mohamed@radiirobotics.com
|
44e45baa28dfabad770130906a9edcbed40c5526
|
16c4173006036ff05646a81236991b112a3a48f9
|
/fcsrf/wsgi.py
|
2ed4028e05dd68af67d91bab66e4cea1b091df4b
|
[] |
no_license
|
reshan9b11/fcscp
|
a862d007607b97da25126e74ca60734257221542
|
0a9376b8256170f64cc3956c6e1fdc9576fe9641
|
refs/heads/master
| 2020-08-29T18:50:01.778649
| 2019-10-30T12:12:23
| 2019-10-30T12:12:23
| 218,137,035
| 0
| 0
| null | 2019-10-30T14:42:31
| 2019-10-28T20:16:15
|
Python
|
UTF-8
|
Python
| false
| false
| 387
|
py
|
"""
WSGI config for fcsrf project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'fcsrf.settings')
application = get_wsgi_application()
|
[
"reshanofficial@gmail.com"
] |
reshanofficial@gmail.com
|
737e7ce4a2fd2b4aabe9f882affa65840be075de
|
1345397c014d757daf99e4d782d76b502c3c6e2a
|
/coursera_algorithmic toolbox/divide_and_conquer/majority_element.py
|
2d55b4782e59ea6b4d1ed7c1c81015f10b94b4f7
|
[] |
no_license
|
eobrie16/courses
|
637dc912c949ac00509b590b65a8b213fe6c5298
|
f660ca2a2791ef115e84230cceea90dba8f802bd
|
refs/heads/master
| 2023-02-25T11:21:37.465227
| 2022-02-11T04:19:18
| 2022-02-11T04:19:18
| 85,152,794
| 0
| 0
| null | 2023-02-16T02:48:20
| 2017-03-16T04:30:50
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,837
|
py
|
# Uses python3
# The goal in this code problem is to check whether an input sequence
# contains a majority element
# Algorithmic Toolbox, Week3.2 (Divide and Conquer)
import sys, random
def swap(a, i, j):
temp = a[i]
a[i] = a[j]
a[j] = temp
def partition(a, left, right):
x = a[left]
j = left
k = left
for i in range(left+1, right+1):
if a[i] <= x:
k += 1
swap(a, i, k)
if a[k] != x:
j += 1
swap(a, j, k)
a[left] = a[j]
a[j] = x
return j, k
def get_majority_element(a, left, right):
if len(a) == 1:
return a[0]
if left >= right:
return -1
pivot = random.randint(left,right)
#print (pivot)
swap(a, left, pivot)
m, n = partition(a, left, right)
#print (a)
if n-m+1 > len(a) // 2:
return a[n]
if m-left > len(a) // 2:
return get_majority_element(a, left, m-1)
if right-n > len(a) // 2:
return get_majority_element(a, n+1, right)
return -1
if __name__ == '__main__':
'''while 1:
x=random.randint(1,10e9)
n=random.randint(1,10e5)
a = [x if i%2==0 else 0 for i in range(n)]
import time
start = time.clock()
num = get_majority_element(a, 0, len(a)-1)
print ("time = %f" % (time.clock()-start))
if n%2:
expected = x
else:
expected = -1
if num != expected:
print (a)
print (n)
print ("Should get %s" % expected)
print ("But got %s instead" % num)
break
else:
print ("OK")'''
n = int(sys.stdin.readline())
a = list(map(int, sys.stdin.readline().split()))
if get_majority_element(a, 0, n-1) != -1:
print(1)
else:
print(0)
|
[
"late.for.lunch@gmail.com"
] |
late.for.lunch@gmail.com
|
eb853026c953d0ce0e7575ba3df55475b0c4da4b
|
a0d61a10b1791f7b4ef20de5dd1c4375b9b33596
|
/monkeybook/generators/signals/posts_signal.py
|
142bef48595bad9caa47f7928a2f08b4cb082cdb
|
[] |
no_license
|
bcattle/monkeybook3
|
e04cb477fe9e14cae63f6aaed75ae5c28bc0b140
|
e5c60d88833c6940071e9dca45494a47b649749d
|
refs/heads/master
| 2016-09-05T20:39:21.975209
| 2013-04-27T00:14:41
| 2013-04-27T00:14:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 572
|
py
|
from monkeybook.generators.signals import Signal
from monkeybook.data_connnectors.results import ResultField
from monkeybook.data_connnectors.facebook.connectors import friends
class TopPostsResult(friends.FriendsResult):
score = ResultField()
class TopPostsSignal(Signal):
def __init__(self, *args, **kwargs):
pass
def process(self, data):
pass
class BirthdayPostsSignal(Signal):
def __init__(self, *args, **kwargs):
pass
def process(self, data):
pass
TopPostsSignal.register()
BirthdayPostsSignal.register()
|
[
"iotasquared@gmail.com"
] |
iotasquared@gmail.com
|
98df9a9e0a968c92a25dad6123757ca231261354
|
ccbfc7818c0b75929a1dfae41dc061d5e0b78519
|
/aliyun-openapi-python-sdk-master/aliyun-python-sdk-airec/aliyunsdkairec/request/v20181012/DescribeDiversifyRequest.py
|
7e8eb2b99070adb40c1b39818922dfe0ebf97b6e
|
[
"Apache-2.0"
] |
permissive
|
P79N6A/dysms_python
|
44b634ffb2856b81d5f79f65889bfd5232a9b546
|
f44877b35817e103eed469a637813efffa1be3e4
|
refs/heads/master
| 2020-04-28T15:25:00.368913
| 2019-03-13T07:52:34
| 2019-03-13T07:52:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,376
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
class DescribeDiversifyRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'Airec', '2018-10-12', 'DescribeDiversify','airec')
self.set_uri_pattern('/openapi/instances/[InstanceId]/diversifies/[Name]')
self.set_method('GET')
def get_InstanceId(self):
return self.get_path_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_path_param('InstanceId',InstanceId)
def get_Name(self):
return self.get_path_params().get('Name')
def set_Name(self,Name):
self.add_path_param('Name',Name)
|
[
"1478458905@qq.com"
] |
1478458905@qq.com
|
45c0b53cddd5f4a3d0d346f0b13ce1dd91ec0d58
|
0a0619950cb96dac087e78e633a64b5756abd95b
|
/Neural_Network/plot.py
|
99c06ff72f794b948bff834109142f2b087e20a3
|
[] |
no_license
|
jsjtzyy/Pattern_Recognition
|
cb96b1ea6fcb53d2bd0be1614dac561271e3b92f
|
6e527478c7b9c21407fff696d4a7192db26feeea
|
refs/heads/master
| 2021-01-12T06:36:49.328240
| 2016-12-26T21:36:21
| 2016-12-26T21:36:21
| 77,396,153
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 900
|
py
|
# This code from scratch part is completed by ghe10, yingyiz2 and gjin7
import matplotlib.pyplot as plt
import numpy as np
def plot_confusion_matrix_train(cm, classes, title='Confustion matrix of Training, stacked RBM', cmap=plt.cm.Blues):
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes)
plt.yticks(tick_marks, classes)
plt.ylabel('True label')
plt.xlabel('Predicted label')
def plot_confusion_matrix_eval(cm, classes, title='Confustion matrix of Testing, stacked RBM', cmap=plt.cm.Blues):
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes)
plt.yticks(tick_marks, classes)
plt.ylabel('True label')
plt.xlabel('Predicted label')
|
[
"zhangyingyi1014@163.com"
] |
zhangyingyi1014@163.com
|
2b685efe2710a931b3ee1302bfc3ace91286f6c1
|
09793adc4b5d4bce7b57ff30cbe7ce4b5a1dcd6b
|
/lppydsmc/systems/creator.py
|
1a72c87dd2ab111aa52a4ed2b93844d4c635cdd6
|
[
"MIT"
] |
permissive
|
Quettle/lppydsmc
|
625c746928d2011af83bec2bfdaf6bf8ac2669e9
|
37290792e845086f7ea182d81f284d68b6cdcbea
|
refs/heads/main
| 2023-08-11T17:40:39.554494
| 2021-10-07T07:59:45
| 2021-10-07T07:59:45
| 411,302,985
| 0
| 0
|
MIT
| 2021-09-28T13:54:41
| 2021-09-28T13:54:40
| null |
UTF-8
|
Python
| false
| false
| 3,924
|
py
|
import numpy as np
class SystemCreator(object):
"""
Represents a system with boundaries.
Example :
Boundary : type = ndarray ; value = [x1,y1,x2,y2]
"""
def __init__(self, segments, idx_out_segments = None):
""" Initialize a system from a list of segments (2D-ndarray). The segments have to be given in a clock-wise compared to the inside of the system.
It is also the case for the extremities of a segment : (x1,y1)<(x2,y2) clock-wise.
Args:
segments (2D-ndarray): the list containing all the segments of the system.
idx_out_segments (list, optional): list of the indexes of the out boundaries in segments. Default to None.
"""
self.segments, self.a, self.n = self._init_segments(segments)
self.min_x, self.max_x, self.min_y, self.max_y = self._init_extremal_values()
self.idx_out_segments = idx_out_segments
def _init_segments(self, segments):
""" Initialize useful arrays for the simulations : directing vectors for the segments (such that x1 =< x2 and if x1=x2, then y1 =< y2)
and also define the inward normal vectors.
Args:
segments (np.ndarray): 2D arrays of size (number of segments x 4), a segment = [x1,y2,x2,y2]. The segments (and its extremities) are in a clock-wise order.
Returns:
np.ndarray, np.ndarray, np.ndarray: the segments, with segment = [x1,y1,x2,y2] such that x1 =< x2 and if x1=x2, then y1 =< y2, the directing vectors and the normal inward vectors (normalized).
"""
segments_ = []
a = np.zeros((segments.shape[0], 3))
normal = [] # normal vectors facing inward (that is why we can not use a to get to the normal vectors but we can use the inital segments)
# defined in a counter-clock wise manner
for k, segment in enumerate(segments):
x1, y1, x2, y2 = segment
normal.append([y2-y1, x1-x2])
a[k, 2] = np.linalg.norm(segment[2:]-segment[:2])
assert((x1!=x2) or (y1!=y2))
if(x1>x2 or (x1==x2 and y1>y2)):
segments_.append([x2, y2, x1, y1])
a[k, :2] = np.array([x1-x2, y1-y2])/a[k, 2]
else :
segments_.append([x1, y1, x2, y2])
a[k, :2] = np.array([x2-x1, y2-y1])/a[k, 2]
normal = np.array(normal)
norm = np.linalg.norm(normal, axis = 1)
return np.array(segments_), a, normal/np.expand_dims(norm, axis = 1)
def _init_extremal_values(self):
segment_x_list = []
segment_y_list = []
for segment in self.segments:
x1, y1, x2, y2 = segment
segment_x_list.append(x1)
segment_x_list.append(x2)
segment_y_list.append(y1)
segment_y_list.append(y2)
max_x, min_x = max(segment_x_list), min(segment_x_list)
max_y, min_y = max(segment_y_list), min(segment_y_list)
return min_x, max_x, min_y, max_y
# -------------------------- Getter / Setter --------------- #
def get_shape(self):
return np.array([self.max_x - self.min_x, self.max_y - self.min_y])
def get_extremal_values(self):
return {
'min_x' : self.min_x,
'max_x' : self.max_x,
'min_y' : self.min_y,
'max_y' : self.max_y
}
def get_segments(self):
return self.segments
def get_offsets(self):
return np.array([self.min_x, self.min_y])
def get_dir_vects(self):
return self.a
def get_normal_vectors(self):
return self.n
def get_idx_out_segments(self):
return self.idx_out_segments
def __str__(self) -> str:
return f'System : shape = {self.get_shape()} m - offsets = {self.get_offsets()} - {len(self.segments)} segments of which {len(self.idx_out_segments)} are exits'
|
[
"paul.calot@hotmail.fr"
] |
paul.calot@hotmail.fr
|
f26665d37c4e8a81c6c050dcc58187024404a27e
|
78ba0fe28958b1d57563670448a788fe61fe100f
|
/homework/hw2/hw22_dev.py
|
6dfa1c4a8a8766bc03fb3e01db256cccabdca4fb
|
[] |
no_license
|
egleizer/High-Performance-Computing-2016
|
7ce9ea3f57142ba7dc69d1c944c0fd9c5e534436
|
c41127070ec41f381ba5932b64ddc7969abb5b25
|
refs/heads/master
| 2020-03-28T19:40:40.327423
| 2018-09-16T14:13:11
| 2018-09-16T14:13:11
| 148,999,028
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,888
|
py
|
"""Homework 2, part 2"""
"""Evgeniia Gleizer 00948999"""
import numpy as np
import matplotlib.pyplot as plt
from scipy.integrate import odeint
def solveFlu(T,NT,a,b0,b1,g,k,y0):
#giving the parameters for the differential equation
def RHS(y,t):
#initial values
s=y[0]
e=y[1]
c=y[2]
r=y[3]
#equations
b=b0+b1*(1+np.cos(2*np.pi*t))
ds = k*(1-s)-b*c*s
de = b*c*s-(k+a)*e
dc = a*e-(g+k)*c
dr = g*c-k*r
dy = ds,de,dc,dr
return dy
#timepsan
t=np.linspace(0,T,NT)
#integrating ODE specified by RHS
Y = odeint(RHS,y0,t)
return Y[:,0],Y[:,1],Y[:,2],Y[:,3],t
def displaySolutions(t,y):
#plotting the first figure
plt.figure()
#obtaining only such t's that t>1
t1=t[t>1]
#as C increases, we take elements starting from (len t - len t1)
d=len(t) - len(t1)
plt.plot(t1,Y[2,d:])
plt.legend(['C to t'])
plt.xlabel('t')
plt.ylabel('C')
plt.title('Evgeniia Gleizer. Created by displaySolution.')
plt.show()
#plotting the second figure
plt.figure()
#as we already now the gap from the left, we ammend C and E straight away
plt.plot(Y[2,d:],Y[1,d:])
plt.legend(['E to C'])
plt.xlabel('C')
plt.ylabel('E')
plt.title('Evgeniia Gleizer. Created by displaySolution.')
plt.show()
def linearFit(b0=5):
#input all the given data
a,b1,k,g=1.0,0.2,0.1,0.2
S,E,C,R,t=solveFlu(10,100,a,b0,b1,g,k,[0.997,0.001,0.001,0.001])
#ammend C to only have C such that it's element are less then 0.1
C1=C[C<0.1]
#find the length of C1
k=len(C1)
#amment t according to the length of C1
t1=t[0:k]
#use polyfit to find the coefficients of the approximation polynomial
m=np.polyfit(t1,np.log(C1),1)
plt.figure()
#plotting the log function againt time
plt.plot(t,np.log(C))
#plotting the approximation polynomial
plt.plot(t,t*m[0]+m[1])
plt.legend(['C1 to t1'])
plt.xlabel('C1')
plt.ylabel('t1')
plt.title('Evgeniia Gleizer. Created by linearFit. Comparison of solution and expected exponential growth')
plt.show()
print C1, t1,m
return
def fluStats(t,y,i1,i2):
#checking the initial conditions
if i1<i2:
#calculating appropriate yn
yn=y[i1-1:i2-1,:]
#obtaining varience
v=np.array([np.var(yn[:,0]),np.var(yn[:,1]),np.var(yn[:,2]),np.var(yn[:,3])])
#obtaining mean
k=np.array([np.mean(yn[:,0]),np.mean(yn[:,1]),np.mean(yn[:,2]),np.mean(yn[:,3])])
else:
print 'error'
return v,k
if __name__ == '__main__':
S,E,C,R,t=solveFlu(5, 1000, 45.6,750.0,1000.0,73.0,1.0,[0.1,0.05,0.05,0.8])
Y=np.array([S,E,C,R])
displaySolutions(t,Y)
Nt=len(t)
fluStats(t,Y,int(Nt/2),Nt)
|
[
"evgeniiagleizer@dyn1222-197.wlan.ic.ac.uk"
] |
evgeniiagleizer@dyn1222-197.wlan.ic.ac.uk
|
bce5c20325e09aa0a5da7db92fbfd50bbaba6361
|
0049ca3ab97f3753d28dbdaa0475f472106de613
|
/week2/day2/parantheses.py
|
80522c93323b57a84f029e8916672d57918691ba
|
[] |
no_license
|
pavani1205/Competitive-Programming
|
559bc9c9fc440fe29448428c119c2caae26e79ab
|
c06dca16ee67f4cf001943a3a790e13830a42a55
|
refs/heads/master
| 2020-03-21T13:10:04.241631
| 2018-07-21T09:58:00
| 2018-07-21T09:58:00
| 138,590,332
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,012
|
py
|
import unittest
def get_closing_paren(sentence, opening_paren_index):
open1 = 0
for position in range(opening_paren_index + 1, len(sentence)):
char = sentence[position]
if char == '(':
open1 += 1
elif char == ')':
if open1 == 0:
return position
else:
open1 -= 1
raise Exception("No closing parenthesis :)")
# Tests
class Test(unittest.TestCase):
def test_all_openers_then_closers(self):
actual = get_closing_paren('((((()))))', 2)
expected = 7
self.assertEqual(actual, expected)
def test_mixed_openers_and_closers(self):
actual = get_closing_paren('()()((()()))', 5)
expected = 10
self.assertEqual(actual, expected)
def test_no_matching_closer(self):
with self.assertRaises(Exception):
get_closing_paren('()(()', 2)
unittest.main(verbosity=2)
|
[
"noreply@github.com"
] |
pavani1205.noreply@github.com
|
7e5e24d059de75d022a6fa3d641c68c15e102ec4
|
4fa4a8c4b3cb1713d85942d7b3dac797ae3cb089
|
/src/zojax/persistentresource/configlet.py
|
cf09562d353cdc575e18580fc579d5cb340edb45
|
[
"ZPL-2.1"
] |
permissive
|
Zojax/zojax.persistentresource
|
2c9b3f01dd564021e719f44e0c8da7ed660e1041
|
b926bcfc5d15e4ede98393a6aa9fa701a61f21f0
|
refs/heads/master
| 2021-01-19T07:49:26.001052
| 2014-01-29T11:32:59
| 2014-01-29T11:32:59
| 2,026,017
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 966
|
py
|
##############################################################################
#
# Copyright (c) 2009 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
$Id$
"""
from zope import interface
from zojax.content.type.configlet import ContentContainerConfiglet
from interfaces import _, IPersistentResources, IResourcesContainer
class PersistentResources(ContentContainerConfiglet):
interface.implements(IPersistentResources, IResourcesContainer)
title = _(u'Resources')
|
[
"andrey.fedoseev@gmail.com"
] |
andrey.fedoseev@gmail.com
|
d021e21b93f474e16b830e182b2b542e352153fc
|
dfeeb6f8a691c104898eee7b9ecefe8015d40f7c
|
/Pyhton tutorial /131Loop_grade_sum_average_variance_std_deviation_.py
|
5bd8d9de2501fc6e4c35dd492e6af4affff6bf7d
|
[] |
no_license
|
narendra-ism/Python_tutorial_basic_
|
9277926dbfc707a761abe2ddebafb0855249fb68
|
29c2ebd5e7095bfda02d8c03d0afb65a85efe05d
|
refs/heads/master
| 2021-03-30T20:46:17.444715
| 2018-03-12T05:29:16
| 2018-03-12T05:29:16
| 124,831,659
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 920
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 11 17:52:00 2018
@author: narendra
"""
grades = [100, 100, 90, 40, 80, 100, 85, 70, 90, 65, 90, 85, 50.5]
def print_grades(grades_input):
for grade in grades_input:
print (grade)
print(print_grades(grades))
def grades_sum(scores):
total = 0
for score in scores:
total += score
return total
print (grades_sum(grades))
def grades_average(grades_input):
sum_of_grades = grades_sum(grades_input)
average = sum_of_grades / float(len(grades_input))
return average
print (grades_average(grades))
def grades_variance(scores):
average = grades_average(scores)
variance = 0
for score in scores:
variance += (average - score) ** 2
return variance / len(scores)
print (grades_variance(grades))
def grades_std_deviation(variance):
return variance**0.5
variance=grades_variance(grades)
print(grades_std_deviation(variance))
|
[
"narendra11d@gmail.com"
] |
narendra11d@gmail.com
|
f3fc81bb805886431d55cc89ebaa87e0ecdd43d2
|
618dae24f1d1449cc7e9ab842e96892fd2f0413d
|
/db_client.py
|
542ceb97b60ac3a36733bdd3626f86ac6a614ff4
|
[] |
no_license
|
Aehrlich98/fu_dbs_project_schehrlmar
|
ea023a18188cd6993ac153939b60460a359e3889
|
f8048f618f2f88cb5849c06971889c758496a093
|
refs/heads/main
| 2023-06-14T04:37:01.870725
| 2021-07-08T23:12:43
| 2021-07-08T23:12:43
| 382,481,280
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,938
|
py
|
#!/usr/bin/env python3
# DB client by Aaron and Pirin:
# To run this successfully, you'll need the psycopg2 module for python:
# install with: pip install psycopg2
# you might also need the binaries: pip install psycopg2-binaries ; libpq-dev ; or more...
import sys
import psycopg2
from psycopg2 import sql
def connect():
#allow user to enter a single string containing all login data separated by spaces TODO allow save to file for auto login.
connect_data=None
connect_data=input("Enter Database Data to connect to:\n-- Format for input --\n-- host database_name user password\n")
if(connect_data.casefold() == "quit"):
end_it(None)#QUIT
conArray=connect_data.split(' ')
connection=psycopg2.connect(
host=conArray[0],
database=conArray[1],
user=conArray[2],
password=conArray[3]
)
return connection
def end_it(connection=None):
if(connection is not None):
connection.close()
print("Connection closed.\n")
print("Quitting...")
sys.exit()
def main():
print("Starting up connection to PostgreSQL...\n ...To quit enter 'quit'\n'")
#Connect to DB via psycopg2
connection = connect()
cursor = connection.cursor()
while(True):
#wait for user input
statement= input("Please enter command:\n")
if(statement.casefold() == "quit"):
print("Quitting...")
break
if(statement.casefold() == "select version;"):
cursor.execute("select version;")
#handle input to make it usable for the DB
#Query DB
print("Executing statement:")
cursor.execute(statement) #query)
db_reply = cursor.fetchall() #retrieve all data
#manipulate reply: simply reverse the string reply, because why not ;)
db_reply = db_reply[::-1]
print("PostgreSQL says: ...\n")
print(db_reply)
#END while
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
Aehrlich98.noreply@github.com
|
d77884e0ed4c8762eabaf1cb4ba40a475d9ae3e9
|
57f5bc8259589db28292cd13c399a66acdfe8031
|
/tests/test_do_transform.py
|
e7050573a297535709e04082f8de80de080c8115
|
[
"MIT"
] |
permissive
|
bbelderbos/spreadsheet_etl
|
29ef5fc802e40a3d8059c6e70ad0b07b2f8cea82
|
3e705029c7f47c76335af4e83010bb937313594e
|
refs/heads/master
| 2021-02-12T21:59:46.175582
| 2020-02-25T02:29:04
| 2020-02-25T02:29:04
| 244,635,651
| 2
| 0
|
MIT
| 2020-03-03T12:53:55
| 2020-03-03T12:53:54
| null |
UTF-8
|
Python
| false
| false
| 2,463
|
py
|
# file: tests/test_do_transform.py
# andrew jarcho
# 2017-03-15
from tests.file_access_wrappers import FakeFileReadWrapper
from src.transform.do_transform import Transform
def test_read_blank_line_does_not_change_state():
file_wrapper = FakeFileReadWrapper('\n')
my_transform = Transform(file_wrapper)
my_transform.read_each_line()
assert my_transform.last_date == ''
assert my_transform.last_sleep_time == ''
def test_read_week_of_line_does_not_change_state():
file_wrapper = FakeFileReadWrapper('Week of Sunday, 2016-12-04:\n')
my_transform = Transform(file_wrapper)
my_transform.read_each_line()
assert my_transform.last_date == ''
assert my_transform.last_sleep_time == ''
def test_read_line_of_equals_signs_does_not_change_state():
file_wrapper = FakeFileReadWrapper('===========================\n')
my_transform = Transform(file_wrapper)
my_transform.read_each_line()
assert my_transform.last_date == ''
assert my_transform.last_sleep_time == ''
def test_read_good_date_sets_last_date():
file_wrapper = FakeFileReadWrapper(' 2017-01-02\n')
my_transform = Transform(file_wrapper)
my_transform.read_each_line()
assert my_transform.last_date == '2017-01-02'
def test_read_bad_date_does_not_set_last_date():
file_wrapper = FakeFileReadWrapper(' 2017-01-02\n') # missing a space
my_transform = Transform(file_wrapper)
my_transform.read_each_line()
assert not my_transform.last_date
def test_read_date_b_date_w_sets_last_sleep_time():
file_wrapper = FakeFileReadWrapper(' 2016-12-07\n'
'action: b, time: 23:45\n'
' 2016-12-08\n'
'action: w, time: 3:45, hours: 4.00\n'
)
my_transform = Transform(file_wrapper)
my_transform.read_each_line()
assert my_transform.last_sleep_time == '23:45'
def test_read_date_b_date_w_sets_last_date_to_2nd_date():
file_wrapper = FakeFileReadWrapper(' 2016-12-07\n'
'action: b, time: 23:45\n'
' 2016-12-08\n'
'action: w, time: 3:45, hours: 4.00\n'
)
my_transform = Transform(file_wrapper)
my_transform.read_each_line()
assert my_transform.last_date == '2016-12-08'
|
[
"andrew.jarcho@gmail.com"
] |
andrew.jarcho@gmail.com
|
5b2e367e2953a85e91e7440a1aa0d6843497d5ac
|
e418ee670b6456a4b8555153894e97fd32358048
|
/firebirdsql/srp.py
|
dff32807ec56e42f0bcae265ce463d91aa43994e
|
[] |
no_license
|
tuksik/pyfirebirdsql
|
1f3bb10d2edb99fe58fddf323629a26b9123836b
|
c061a135f16eb06a7e00021fea36f745d6581742
|
refs/heads/master
| 2020-07-22T16:58:25.397202
| 2016-11-03T00:26:16
| 2016-11-03T00:26:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,676
|
py
|
##############################################################################
# Copyright (c) 2014-2015 Hajime Nakagami<nakagami@gmail.com>
# All rights reserved.
# Licensed under the New BSD License
# (http://www.freebsd.org/copyright/freebsd-license.html)
#
# Python DB-API 2.0 module for Firebird.
##############################################################################
# This SRP implementation is in reference to
'''
Following document was copied from <http://srp.stanford.edu/design.html>.
-----
SRP Protocol Design
SRP is the newest addition to a new class of strong authentication protocols
that resist all the well-known passive and active attacks over the network. SRP
borrows some elements from other key-exchange and identification protcols and
adds some subtlee modifications and refinements. The result is a protocol that
preserves the strength and efficiency of the EKE family protocols while fixing
some of their shortcomings.
The following is a description of SRP-6 and 6a, the latest versions of SRP:
N A large safe prime (N = 2q+1, where q is prime)
All arithmetic is done modulo N.
g A generator modulo N
k Multiplier parameter (k = H(N, g) in SRP-6a, k = 3 for legacy SRP-6)
s User's salt
I Username
p Cleartext Password
H() One-way hash function
^ (Modular) Exponentiation
u Random scrambling parameter
a,b Secret ephemeral values
A,B Public ephemeral values
x Private key (derived from p and s)
v Password verifier
The host stores passwords using the following formula:
x = H(s, p) (s is chosen randomly)
v = g^x (computes password verifier)
The host then keeps {I, s, v} in its password database. The authentication
protocol itself goes as follows:
User -> Host: I, A = g^a (identifies self, a = random number)
Host -> User: s, B = kv + g^b (sends salt, b = random number)
Both: u = H(A, B)
User: x = H(s, p) (user enters password)
User: S = (B - kg^x) ^ (a + ux) (computes session key)
User: K = H(S)
Host: S = (Av^u) ^ b (computes session key)
Host: K = H(S)
Now the two parties have a shared, strong session key K. To complete
authentication, they need to prove to each other that their keys match.
One possible way:
User -> Host: M = H(H(N) xor H(g), H(I), s, A, B, K)
Host -> User: H(A, M, K)
The two parties also employ the following safeguards:
1. The user will abort if he receives B == 0 (mod N) or u == 0.
2. The host will abort if it detects that A == 0 (mod N).
3. The user must show his proof of K first. If the server detects that the user's proof is incorrect, it must abort without showing its own proof of K.
See http://srp.stanford.edu/ for more information.
'''
from __future__ import print_function
import sys
import hashlib
import random
import binascii
DEBUG = False
DEBUG_PRINT = False
if DEBUG:
DEBUG_PRIVATE_KEY = 0x60975527035CF2AD1989806F0407210BC81EDC04E2762A56AFD529DDDA2D4393
PYTHON_MAJOR_VER = sys.version_info[0]
if PYTHON_MAJOR_VER == 3:
def ord(c):
return c
SRP_KEY_SIZE = 128
SRP_SALT_SIZE = 32
def get_prime():
N = 0xE67D2E994B2F900C3F41F08F5BB2627ED0D49EE1FE767A52EFCD565CD6E768812C3E1E9CE8F0A8BEA6CB13CD29DDEBF7A96D4A93B55D488DF099A15C89DCB0640738EB2CBDD9A8F7BAB561AB1B0DC1C6CDABF303264A08D1BCA932D1F1EE428B619D970F342ABA9A65793B8B2F041AE5364350C16F735F56ECBCA87BD57B29E7
g = 2
#k = bytes2long(sha1(pad(N, SRP_KEY_SIZE), pad(g, SRP_KEY_SIZE)))
k = 1277432915985975349439481660349303019122249719989
return N, g, k
def bytes2long(s):
n = 0
for c in s:
n <<= 8
n += ord(c)
return n
def long2bytes(n):
s = []
while n > 0:
s.insert(0, n & 255)
n >>= 8
if PYTHON_MAJOR_VER == 3:
return bytes(s)
else:
return b''.join([chr(c) for c in s])
def sha1(*args):
sha1 = hashlib.sha1()
for v in args:
if not isinstance(v, bytes):
v = long2bytes(v)
sha1.update(v)
return sha1.digest()
def pad(n):
s = []
for x in range(SRP_KEY_SIZE):
s.insert(0, n & 255)
n >>= 8
if n == 0:
break
if PYTHON_MAJOR_VER == 3:
return bytes(s)
else:
return b''.join([chr(c) for c in s])
def get_scramble(x, y):
return bytes2long(sha1(pad(x), pad(y)))
def getUserHash(salt, user, password):
assert isinstance(user, bytes)
assert isinstance(password, bytes)
hash1 = sha1(user, b':', password)
hash2 = sha1(salt, hash1)
rc = bytes2long(hash2)
return rc
def client_seed():
"""
A: Client public key
a: Client private key
"""
N, g, k = get_prime()
a = random.randrange(0, 1 << SRP_KEY_SIZE)
A = pow(g, a, N)
if DEBUG:
a = DEBUG_PRIVATE_KEY
A = pow(g, a, N)
if DEBUG_PRINT:
print('A=', binascii.b2a_hex(long2bytes(A)), end='\n')
print('a=', binascii.b2a_hex(long2bytes(a)), end='\n')
return A, a
def server_seed(v):
"""
B: Server public key
b: Server private key
"""
N, g, k = get_prime()
b = random.randrange(0, 1 << SRP_KEY_SIZE)
gb = pow(g, b, N)
kv = (k * v) % N
B = (kv + gb) % N
if DEBUG:
b = DEBUG_PRIVATE_KEY
gb = pow(g, b, N)
kv = (k * v) % N
B = (kv + gb) % N
if DEBUG_PRINT:
print("v", v, end='\n')
print('b=', binascii.b2a_hex(long2bytes(b)), end='\n')
print("gb", gb, end='\n')
print("kv", kv, end='\n')
print('B=', binascii.b2a_hex(long2bytes(B)), end='\n')
return B, b
def client_session(user, password, salt, A, B, a):
"""
Client session secret
Both: u = H(A, B)
User: x = H(s, p) (user enters password)
User: S = (B - kg^x) ^ (a + ux) (computes session key)
User: K = H(S)
"""
N, g, k = get_prime()
u = get_scramble(A, B)
x = getUserHash(salt, user, password) # x
gx = pow(g, x, N) # g^x
kgx = (k * gx) % N # kg^x
diff = (B - kgx) % N # B - kg^x
ux = (u * x) % N
aux = (a + ux) % N
session_secret = pow(diff, aux, N) # (B - kg^x) ^ (a + ux)
K = sha1(session_secret)
return K
def server_session(user, password, salt, A, B, b):
"""
Server session secret
Both: u = H(A, B)
Host: S = (Av^u) ^ b (computes session key)
Host: K = H(S)
"""
N, g, k = get_prime()
u = get_scramble(A, B)
v = get_verifier(user, password, salt)
vu = pow(v, u, N) # v^u
Avu = (A * vu) % N # Av^u
session_secret = pow(Avu, b, N) # (Av^u) ^ b
K = sha1(session_secret)
if DEBUG_PRINT:
print('server session_secret=', binascii.b2a_hex(long2bytes(session_secret)), end='\n')
print('server session hash K=', binascii.b2a_hex(K))
return K
def client_proof(user, password, salt, A, B, a):
"""
M = H(H(N) xor H(g), H(I), s, A, B, K)
"""
N, g, k = get_prime()
K = client_session(user, password, salt, A, B, a)
n1 = bytes2long(sha1(N))
n2 = bytes2long(sha1(g))
M = sha1(pow(n1, n2, N), sha1(user), salt, A, B, K)
if DEBUG_PRINT:
print('client_proof:M=', binascii.b2a_hex(M), end='\n')
print('client_proof:K=', binascii.b2a_hex(K), end='\n')
return M, K
def get_salt():
if DEBUG:
salt = b'\00' * SRP_SALT_SIZE
else:
if PYTHON_MAJOR_VER == 3:
salt = bytes([random.randrange(0, 256) for x in range(SRP_SALT_SIZE)])
else:
salt = b''.join([chr(random.randrange(0, 256)) for x in range(SRP_SALT_SIZE)])
if DEBUG_PRINT:
print('salt=', binascii.b2a_hex(salt), end='\n')
return salt
def get_verifier(user, password, salt):
N, g, k = get_prime()
x = getUserHash(salt, user, password)
return pow(g, x, N)
if __name__ == '__main__':
"""
A, a, B, b are long.
salt, M are bytes.
"""
# Both
user = b'SYSDBA'
password = b'masterkey'
# Client send A to Server
A, a = client_seed()
# Server send B, salt to Client
salt = get_salt()
v = get_verifier(user, password, salt)
B, b = server_seed(v)
serverKey = server_session(user, password, salt, A, B, b)
# Client send M to Server
M, clientKey = client_proof(user, password, salt, A, B, a)
# Client and Server has same key
assert clientKey == serverKey
|
[
"nakagami@gmail.com"
] |
nakagami@gmail.com
|
7554a55643233f24097ee42698e538afdfe3d67b
|
45932ccaca48ec80cbca57ba3983522f9137d293
|
/my_project/python/function_key.py
|
5df5723ef2514f06bee3aad6bb8633257714f9e8
|
[] |
no_license
|
writetonewhorizon/device_driver_devlopment
|
dc00ab9e91518e2f56fcfcaf67c686bfd5a5b06b
|
a6429f69336b4bdd7867fddfe575e7f118d02d18
|
refs/heads/master
| 2021-01-21T07:25:40.229313
| 2016-08-30T11:43:50
| 2016-08-30T11:43:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 207
|
py
|
#! /usr/bin/python
def func(a=10, b=10, c=30):
print 'a is', a ,' and b is ', b ,' and c is ', c
if a > b:
return a
else:
return b
x=func(3,7)
print x
func(25,c=12)
print x
func(c=50,a=100)
print x
|
[
"dharmender.sureshchander@in.bosch.com"
] |
dharmender.sureshchander@in.bosch.com
|
bd44d091d10277509cb4e2440e95c35d97a262e2
|
f41ca194f68b60c6b8d37f67d908310044b8485f
|
/whetherornot/forms.py
|
9234da6019f2b7ee90a278f6fcfbbfcd7a88f414
|
[] |
no_license
|
cbo2/whetherornot
|
db238474d173375b898893388b8fc7438e92d9d1
|
82bd4df13c8524d473c18fd2a26c5d9619a52dd3
|
refs/heads/master
| 2020-05-18T20:26:17.691810
| 2019-10-08T11:06:49
| 2019-10-08T11:06:49
| 184,632,313
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 544
|
py
|
from django import forms
from django.views.generic.edit import FormView
# from .models import CustomUser
class CustomLocationForm(forms.Form):
location = forms.CharField(label='Location', max_length=100)
date = forms.DateField(label='Desired Date')
# def got_it(self):
# print('--------------- got it called ------------')
# print('date is: ', self.cleaned_data['date'])
# print('--------------- got it called ------------')
# class Meta():
# # model = CustomUser
# fields = ('location')
|
[
"talk2cbo@gmail.com"
] |
talk2cbo@gmail.com
|
caaea7a38bae69afd366f96a2d336bcf8fdbafec
|
1b49a60d578b6ae4bfc2d1421143f676c9d3128d
|
/src/core/src/tortuga/exceptions/volumeNotMapped.py
|
0647aeee7afee21c51662b112a843c45c206cd2a
|
[
"Apache-2.0"
] |
permissive
|
UnivaCorporation/tortuga
|
aad2c00a1b09fba67298b1dd7543125a26b64c51
|
56d808d7836cd15d6c6748cbf704cdea4407fef6
|
refs/heads/master
| 2021-03-24T13:31:33.060593
| 2020-11-23T19:57:59
| 2020-11-23T19:57:59
| 123,297,582
| 33
| 25
|
NOASSERTION
| 2020-11-23T19:58:27
| 2018-02-28T14:38:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,031
|
py
|
# Copyright 2008-2018 Univa Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tortuga.exceptions.tortugaException import TortugaException
from tortuga.utility import tortugaStatus
class VolumeNotMapped(TortugaException):
"""
Volume not mapped error class. It can be used in the same
way as the base TortugaException class.
"""
def __init__(self, error="", **kwargs):
TortugaException.__init__(
self, error, tortugaStatus.TORTUGA_VOLUME_NOT_MAPPED_ERROR,
**kwargs)
|
[
"mfrisch@univa.com"
] |
mfrisch@univa.com
|
bd49d5a4286fbe4e3e99c8b77c829bb26d482069
|
21bc558dc0875614158a0125ea446d6389c392d1
|
/python/ledecky_wr.py
|
55446eabf2fb14e58355e76450435b44f1ee08be
|
[
"LicenseRef-scancode-proprietary-license",
"MIT"
] |
permissive
|
nickloomis/loomsci-examples
|
b0d5beb81d5e969c7167918bb4b12225eb4c9c5f
|
1225e5b33804a1501b5cef6c3a793434a5ed50f9
|
refs/heads/master
| 2020-12-21T09:36:57.312142
| 2019-06-16T18:22:05
| 2019-06-16T18:22:05
| 41,226,116
| 3
| 0
|
MIT
| 2019-02-03T05:03:29
| 2015-08-22T22:05:11
|
Python
|
UTF-8
|
Python
| false
| false
| 2,794
|
py
|
"""
Fits an exponential curve to Katie Ledecky's world record swims in the 800m
freesyle through the 2016 Rio Olympics.
Change log:
2016/08/13 -- module started; nloomis@gmail.com
2016/08/14 -- documentation added; nloomis@gmail.com
"""
__authors__ = ('nloomis@gmail.com',)
import datetime
import matplotlib.pyplot as plt
import numpy
from scipy.optimize import curve_fit
def WorldRecordData():
"""Returns Kate Ledecky's world record times for the 800m freestyle."""
wr = {}
wr[datetime.date(2013, 8, 3).toordinal()] = datetime.time(0, 8, 13, 860000)
wr[datetime.date(2014, 6, 22).toordinal()] = datetime.time(0, 8, 11, 0)
wr[datetime.date(2015, 8, 8).toordinal()] = datetime.time(0, 8, 7, 390000)
wr[datetime.date(2016, 1, 17).toordinal()] = datetime.time(0, 8, 6, 680000)
wr[datetime.date(2016, 8, 12).toordinal()] = datetime.time(0, 8, 4, 790000)
return wr
def TimeToSeconds(time_object):
"""Returns the number of seconds since 0h0m0s for a datetime.time object."""
return time_object.hour * 3600 + time_object.minute * 60 +\
time_object.second + time_object.microsecond / 1000000.
def SecondsToTime(sec):
"""Returns a time object representing the number of seconds since 0h0m0s."""
hour = int(numpy.floor(sec / 3600.))
sec_rem = sec - hour * 3600
minute = int(numpy.floor(sec_rem / 60.))
sec_rem = sec - minute * 60
second = int(numpy.floor(sec_rem))
microsecond = int(numpy.round((sec_rem - second) * 1000000))
return datetime.time(hour, minute, second, microsecond)
def ExponentialWithOffset(x, amp, tau, offset):
"""Exponential with an offset."""
return amp * numpy.exp(-x / tau) + offset
def Fit():
"""Fit Ledecky's times to an exponential curve w an offset and plot the fit.
Returns (p_opt, p_cov), the optimized parameter values and covariance
matrix. The parameters are in the order of (a, tau, offset) and are the best-
fit to ExponentialWithOffset(). The initial data point at day=0 is the first
world record. a and offset are in seconds; tau is in days."""
wr = WorldRecordData();
times = numpy.array([TimeToSeconds(t) for t in wr.values()])
days = numpy.array(wr.keys())
amp_init = numpy.max(times) - numpy.min(times)
offset_init = numpy.min(times)
delta_days = numpy.max(days) - numpy.min(days)
tau_init = delta_days / amp_init
ig = [amp_init, tau_init, offset_init]
popt, pcov = curve_fit(ExponentialWithOffset, days - min(days), times, p0=ig)
test_days = numpy.linspace(0, 2 * delta_days, 200)
test_times = ExponentialWithOffset(test_days, *popt)
plt.plot(days - min(days), times, 'o')
plt.plot(test_days, test_times)
plt.xlabel('days since first wr')
plt.ylabel('world record time (s)')
plt.title('Katie Ledecky 800m freestyle WR times')
plt.show()
return popt, pcov
|
[
"nloomis@gmail.com"
] |
nloomis@gmail.com
|
6b8ab0bccedc92e85d92cf5775f938f3b3b6de16
|
4d259f441632f5c45b94e8d816fc31a4f022af3c
|
/wechat/prequest.py
|
a935eacdfbbd28a091c716a921b9faba3a34a411
|
[] |
no_license
|
xiaoruiguo/lab
|
c37224fd4eb604aa2b39fe18ba64e93b7159a1eb
|
ec99f51b498244c414b025d7dae91fdad2f8ef46
|
refs/heads/master
| 2020-05-25T01:37:42.070770
| 2016-05-16T23:24:26
| 2016-05-16T23:24:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,596
|
py
|
import httplib
import json
def sendMessage(token = None, message=None):
print '------------ launch VM ------------'
#params = '{"touser": "@all","toparty": "@all","totag":"@all","msgtype":"text","agentid": "2","text":{ "content": %s},"safe":"0"}'%message
params = '{"touser": "@all","toparty": "@all","totag":"@all","msgtype":"text","agentid": "2","text":{ "content": \"%s\"},"safe":"0"}'%message
#params =urllib.urlencode({"server":{"name":"instance1", "imageRef": "6e09b461-0ab2-40fb-a182-a3d910adc54e", "flavorRef": "1", "max_count": 1, "min_count": 1}})
print 'params %s'%params
headers = {"Content-Type": "application/json"}
method = "POST"
path = "/cgi-bin/message/send?access_token=%s"%token
print path
http_port = '443' # '5000'
http_ip = "https://qyapi.weixin.qq.com"
status, resp = _httpRequest(method, path, params, http_ip, http_port)
print ' ---------------- show launch vm result -------------------'
print resp
print status
return status
def _httpRequest(method, path, params, http_ip, http_port):
headers = {"Content-Type": "application/json"}
conn = httplib.HTTPSConnection("qyapi.weixin.qq.com","443")
conn.request(method, path, params, headers)
response = conn.getresponse()
print response
print 'show response'
data = response.read()
verify_services = json.loads(data)
conn.close()
return response.status, verify_services
token = '6jH7PJLNukazh0mRaH2gu1MoNRwgSdh4TX0G4oNii0lUEJW54nFzZp7NgJdnSvKVWko9oGw4Sym1Cme81mTM6Q'
message = 'test123'
sendMessage(token, message)
|
[
"junmein@junmeindeMacBook-Pro.local"
] |
junmein@junmeindeMacBook-Pro.local
|
030348a0cb51c3f9ae69fa7c0049045a4eebe627
|
6863b381ae7ff4614280cdd6d1a3e8eb22daaa47
|
/rnafeatures/utils/argparser.py
|
3105147c96e95de1e6f4f80715a3e3ae2536f823
|
[
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
Lewsey-Lab/rna-features
|
4a797937fcb8f34bd1372b5fac498f7217d1f14b
|
003441d3b772a5a666ad6ce49ec9ff7487dbd340
|
refs/heads/main
| 2023-08-25T12:05:06.978013
| 2021-10-21T02:08:16
| 2021-10-21T02:08:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,271
|
py
|
import argparse
import logging
from pathlib import Path
logger = logging.getLogger(__name__)
#
# Type functions for parser
#
def p_val_type(arg):
"""
Checks if p_val is a float within the range {0, 1}.
"""
try:
p_val = float(arg)
except ValueError:
msg = f"P-value '{arg}' is not a floating point number."
raise argparse.ArgumentTypeError(msg)
if not (0 < p_val < 1):
msg = f"P-value '{p_val}' is outside the range (0 < p-value < 1)"
raise argparse.ArgumentTypeError(msg)
return p_val
def path_type(arg):
"""
Checks if supplied paths are exist and contain .csv files.
"""
try:
dir = Path(arg)
except TypeError as e:
logger.exception(e)
raise
if not dir.is_dir():
msg = f"'{dir}' is not a valid directory"
raise argparse.ArgumentTypeError(msg)
elif not list(dir.glob("*.csv")):
msg = f"'{dir}' does not contain any *.csv files."
raise argparse.ArgumentTypeError(msg)
elif not list(dir.glob("*tpm.tsv")):
msg = f"'{dir}' does not contain a *tpm.tsv file."
raise argparse.ArgumentTypeError(msg)
elif len(list(dir.glob("*tpm.tsv"))) > 1:
msg = f"'{dir}' contains more than one *tpm.tsv file:\n {list(dir.glob('*tpm.tsv'))}"
raise argparse.ArgumentTypeError(msg)
else:
return dir
#
# Instantiate parser
#
desc = (
"Generates machine-learning features from RNAseq data.\n\n"
"Takes a list of directories containing DESeq2 contrast files (.csv) "
"and a 'tpm.tsv' file (containing a matrix of tpm values of genes against "
"sample) returning a 'feature_matrix.csv' containing gene expression "
"breadth and log2fc/tpm mad, max and median for each gene."
)
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
"-p",
default=0.05,
metavar="p-value",
type=p_val_type,
help="p-value cutoff for filtering log2fc values [default: 0.05]",
)
parser.add_argument(
"dir",
nargs="+",
type=path_type,
help="Dataset directories containing DESeq2 contrast files (.csv) and a 'tpm.tsv' matrix file.",
)
|
[
"74215773+SpikyClip@users.noreply.github.com"
] |
74215773+SpikyClip@users.noreply.github.com
|
04e66e4ce10ae4663e2c81fbea4a34494a487eb3
|
7de526b33e5015cd548d7d4ae1abe42d802ade83
|
/python/run_q5.py
|
03bc75c01540c75f60c8a182270bec94054f65b5
|
[] |
no_license
|
ZhihaoZhu/Advanced-Neural-Networks-for-Recognition
|
637816324a30bb09a1e638331e532b0489ff044b
|
8b367636dda29a67e37b7d0554b075306e3cf8d0
|
refs/heads/master
| 2020-04-07T10:33:04.927871
| 2018-11-25T21:10:44
| 2018-11-25T21:10:44
| 158,290,930
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,635
|
py
|
import numpy as np
import scipy.io
from nn import *
from collections import Counter
train_data = scipy.io.loadmat('../data/nist36_train.mat')
valid_data = scipy.io.loadmat('../data/nist36_valid.mat')
# we don't need labels now!
train_x = train_data['train_data']
valid_x = valid_data['valid_data']
print(valid_x.shape)
max_iters = 100
# pick a batch size, learning rate
batch_size = 36
learning_rate = 3e-5
hidden_size = 32
lr_rate = 20
batches = get_random_batches(train_x,np.ones((train_x.shape[0],1)),batch_size)
batch_num = len(batches)
params = Counter()
M_params = Counter()
# initialize layers here
initialize_weights(1024,hidden_size,params,'layer1')
initialize_weights(hidden_size,hidden_size,params,'hidden')
initialize_weights(hidden_size,hidden_size,params,'hidden2')
initialize_weights(hidden_size,1024,params,'output')
initialize_Momentum_weights(1024,hidden_size,M_params,'layer1')
initialize_Momentum_weights(hidden_size,hidden_size,M_params,'hidden')
initialize_Momentum_weights(hidden_size,hidden_size,M_params,'hidden2')
initialize_Momentum_weights(hidden_size,1024,M_params,'output')
loss_plot = []
# should look like your previous training loops
for itr in range(max_iters):
total_loss = 0
for xb,_ in batches:
h1 = forward(xb, params, 'layer1', relu)
h2 = forward(h1, params, 'hidden', relu)
h3 = forward(h2, params, 'hidden2', relu)
output = forward(h3, params, 'output', sigmoid)
# loss
# be sure to add loss and accuracy to epoch totals
loss = np.sum((output-xb)**2)
total_loss += loss
# backward
delta = 2*(output-xb)
delta1 = backwards(delta, params, name='output', activation_deriv=sigmoid_deriv)
delta2 = backwards(delta1, params, name='hidden2', activation_deriv=relu_deriv)
delta3 = backwards(delta2, params, name='hidden', activation_deriv=relu_deriv)
backwards(delta3, params, name='layer1', activation_deriv=relu_deriv)
# apply gradient
for k, v in params.items():
if 'grad' in k:
name = k.split('_')[1]
M_params[name] = 0.9*M_params[name] - learning_rate * v
params[name] += M_params[name]
loss_plot.append(total_loss)
if itr % 2 == 0:
print("itr: {:02d} \t loss: {:.2f}".format(itr,total_loss))
if itr % lr_rate == lr_rate-1:
learning_rate *= 0.9
'''
Print Loss
'''
time_seq = np.arange(max_iters)
import matplotlib.pyplot as plt
plt.figure(2)
ax = plt.gca()
ax.set_xlabel('epoch')
ax.set_ylabel('loss')
ax.plot(time_seq, loss_plot, color='r', linewidth=1, alpha=0.6)
plt.pause(1500)
plt.close()
'''
Save Parameters
'''
import pickle
saved_params = {k:v for k,v in params.items() if '_' not in k}
with open('q5_weights.pickle', 'wb') as handle:
pickle.dump(saved_params, handle, protocol=pickle.HIGHEST_PROTOCOL)
# visualize some results
# Q5.3.1
import matplotlib.pyplot as plt
import pickle
with open('q5_weights.pickle', 'rb') as handle:
params = pickle.load(handle)
h1 = forward(valid_x,params,'layer1',relu)
h2 = forward(h1,params,'hidden',relu)
h3 = forward(h2,params,'hidden2',relu)
out = forward(h3,params,'output',sigmoid)
for i in range(904,910):
plt.subplot(2,1,1)
plt.imshow(valid_x[i].reshape(32,32).T)
plt.subplot(2,1,2)
plt.imshow(out[i].reshape(32,32).T)
plt.show()
# evaluate PSNR
# Q5.3.2
from skimage.measure import compare_psnr as psnr
psnr_sum = 0
for i in range(valid_x.shape[0]):
psnri = psnr(valid_x[i], out[i])
psnr_sum += psnri
psnr_avg = psnr_sum / valid_x.shape[0]
print(psnr_avg)
|
[
"704242527zzh@gmail.com"
] |
704242527zzh@gmail.com
|
e1d0797e7fcd7aa7369ef9da9997aeb02dcfa524
|
065acd70109d206c4021954e68c960a631a6c5e3
|
/shot_detector/charts/event/trend/__init__.py
|
15bd2dac2180be3b7cc4a71b37ee456d5879c914
|
[] |
permissive
|
w495/python-video-shot-detector
|
bf2e3cc8175687c73cd01cf89441efc349f58d4d
|
617ff45c9c3c96bbd9a975aef15f1b2697282b9c
|
refs/heads/master
| 2022-12-12T02:29:24.771610
| 2017-05-15T00:38:22
| 2017-05-15T00:38:22
| 37,352,923
| 20
| 3
|
BSD-3-Clause
| 2022-11-22T01:15:45
| 2015-06-13T01:33:27
|
Python
|
UTF-8
|
Python
| false
| false
| 357
|
py
|
# -*- coding: utf8 -*-
"""
...
"""
from __future__ import absolute_import, division, print_function
from .mean_abs_diff_event_chart import MeanAbsDiffEventChart
from .mean_atan_diff_event_chart import MeanAtanDiffEventChart
from .mean_atan_vote_event_chart import MeanAtanVoteEventChart
from .mean_sign_diff_event_chart import MeanSignDiffEventChart
|
[
"w@w-495.ru"
] |
w@w-495.ru
|
7baa37d309bda5e9063a2058730e93e7ccb6b4e2
|
6fcfb638fa725b6d21083ec54e3609fc1b287d9e
|
/python/baohaojun_system-config/system-config-master/bin/windows/Imap4Monitor.py
|
7d5f014713909282d565804717534de509f9686b
|
[] |
no_license
|
LiuFang816/SALSTM_py_data
|
6db258e51858aeff14af38898fef715b46980ac1
|
d494b3041069d377d6a7a9c296a14334f2fa5acc
|
refs/heads/master
| 2022-12-25T06:39:52.222097
| 2019-12-12T08:49:07
| 2019-12-12T08:49:07
| 227,546,525
| 10
| 7
| null | 2022-12-19T02:53:01
| 2019-12-12T07:29:39
|
Python
|
UTF-8
|
Python
| false
| false
| 30
|
py
|
./Imap4Monitor/Imap4Monitor.py
|
[
"659338505@qq.com"
] |
659338505@qq.com
|
8e5be80571e060b33f9d240b87114ee0b07fe0b2
|
65cc6a8877896ef69dd03d7b5eee5bed56e5371f
|
/CSDN/Django第二季/Django第二季课程演示源码/Dj020701/Dj020701/settings.py
|
865ff0effc452c887c248158725964b8f17983c0
|
[] |
no_license
|
wuhongyi/DjangoNote
|
34bdb9e82fc379e19b1df0bd7c90e504fa70a40d
|
81ad949ff895feda8131d8bdf5fa1439f962ae37
|
refs/heads/master
| 2020-05-02T17:54:12.270297
| 2019-05-22T14:37:32
| 2019-05-22T14:37:32
| 178,112,720
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,383
|
py
|
"""
Django settings for Dj020701 project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'e6_@!2_dc#7v4@s!i(2fywu3ytnf*e9x_p@9n4+)vu$pp*vjf('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'student',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Dj020701.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Dj020701.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
HOST = "192.168.182.10"
USER = 'root'
PASSWORD = '1234.Com'
DB = 'TestDB'
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'student','static'),
]
|
[
"wuhongyi@pku.edu.cn"
] |
wuhongyi@pku.edu.cn
|
64270f6fe251897837c9c2643eec77077f76864b
|
a0b71771c5315b63e16182cf138032539774c7dc
|
/source/approximate/f2_new2_2.py
|
c05a866305ab489d67d879ca8dae5a3d2b3f2786
|
[
"MIT"
] |
permissive
|
kunalghosh/T-61.5060-Algorithmic-Methods-of-Data-Mining
|
8af35952f4490ab86080419867ace35d978e5370
|
718b1ca4a3f83f1b244bb7ddeb5cc430b2967516
|
refs/heads/master
| 2021-01-10T07:57:03.536438
| 2017-03-18T15:25:44
| 2017-03-18T15:25:44
| 52,148,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,873
|
py
|
from __future__ import print_function
import sys
import numpy as np
import cPickle as pkl
import pprint
RAND_NUMS = 100000000
def getEstimatedF2(counts, n):
# Mean of all values.
return np.mean(n*(2*np.asarray(counts) - 1))
# # def get_next_rand(rands):
# retVal = -1
# try:
# retVal = rands.pop()
# # retVal = rands[-1]
# # rands = rands[:-1]
# except IndexError as e:
# rands.extend(np.random.rand(RAND_NUMS))
# retVal = rands.pop()
# # rands = np.random.rand(RAND_NUMS)
# # retVal = rands[-1]
# # rands = rands[:-1]
# return retVal
def get_next_rand(rands):
retVal = -1
if len(rands) == 0:
rands = np.random.rand(RAND_NUMS)
retVal = rands[-1]
rands = rands[:-1]
return retVal, rands
# def get_rand_idxes(rand_idxes,low,high):
# retVal = -1
# try:
# retVal = rand_idxes[(low,high)].pop()
# except IndexError as e:
# rand_idxes[(low,high)].extend(np.random.randint(low,high,RAND_NUMS))
# except KeyError as e:
# rand_idxes[(low,high)] = list(np.random.randint(low,high,RAND_NUMS))
# finally:
# retVal = rand_idxes[(low,high)].pop()
# return retVal
# def get_rand_idxes(rand_idxes,low,high):
# retVal = -1
# try:
# retVal = rand_idxes.pop()
# # retVal = rand_idxes[-1]
# # rand_idxes = rand_idxes[:-1]
# except IndexError as e:
# rand_idxes.extend(np.random.randint(low,high,RAND_NUMS))
# rand_idxes.pop()
# # rand_idxes = np.random.randint(low,high,RAND_NUMS)
# # retVal = rand_idxes[-1]
# # rand_idxes = rand_idxes[:-1]
# return retVal
# def get_rand_idxes(rand_idxes,low,high):
# retVal = -1
# if len(rand_idxes) == 0:
# rand_idxes = np.random.randint(low,high,RAND_NUMS)
# retVal = rand_idxes[-1]
# rand_idxes = rand_idxes[:-1]
# return retVal, rand_idxes
if __name__ == "__main__":
rands = []
# rand_idxes = {}
rand_idxes = []
outputFile = "f2_values_new_2Jan_new.txt"
dataPkl = 'data.pkl'
S = 4100000
# S = 200
# S = 10
if len(sys.argv) == 2:
try:
S = int(sys.argv[1])
except ValueError as e:
outputFile = sys.argv[1]
if len(sys.argv) == 3:
S = int(sys.argv[1])
outputFile = sys.argv[2]
X_listIdx = {} # Stores {line : [Idx in X_xcount where line appears]}
X_xcount = [] # Stores [line,count] (lines can be duplicates)
n = -1
counter = 0
batch_updater = {}
for line in sys.stdin:
# print(len(rands), len(rand_idxes))
n += 1
# if n % RAND_NUMS == 0:
# print("{} Rows done.".format(n))
if n < S: # pick the first S elements of the stream
X_xcount.append([line, 1]) # line and its count
if line in X_listIdx:
X_listIdx[line].append(n) # store the index of the element
else:
X_listIdx[line] = [n]
else:
prob = (S*1.0)/n # probability of picking the n'th element
# randVal,rands = get_next_rand(rands)
# if len(rand_idxes) == 0:
# rand_idxes = np.random.randint(low,high,RAND_NUMS)
try:
randVal = rands[-1]
rands = rands[:-1]
except IndexError as e:
rands = np.random.rand(RAND_NUMS)
randVal = rands[-1]
rands = rands[:-1]
if randVal < prob: # Very high probability of picking a line close whose index is to S
#---
# Remove an element
#---
# pick an index from X_xcount to remove ( equal probability of each element )
# idx_to_del,rand_idxes = get_rand_idxes(rand_idxes,0,S)
# if len(rand_idxes) == 0:
# rand_idxes = np.random.randint(low,high,RAND_NUMS)
try:
idx_to_del = rand_idxes[-1]
rand_idxes = rand_idxes[:-1]
except IndexError as e:
rand_idxes = np.random.randint(0,S,RAND_NUMS)
idx_to_del = rand_idxes[-1]
rand_idxes = rand_idxes[:-1]
# while(True):
# # if X_listIdx[line_to_del] is empty delete it and continue
# try:
# line_to_del = X_listIdx.keys()[get_rand_idxes(rand_idxes,0,len(X_listIdx),S)]
# # list_idx = get_rand_idxes(rand_idxes,0,len(X_listIdx[line_to_del]))
# # try:
# # X_listIdx[line_to_del][list_idx], X_listIdx[line_to_del][-1] = X_listIdx[line_to_del][-1], X_listIdx[line_to_del][list_idx]
# # except Exception as e:
# # print(X_listIdx[line_to_del], list_idx)
# # sys.exit(1)
# idx_to_del = X_listIdx[line_to_del].pop()
# except IndexError as e:
# X_listIdx.pop(line_to_del,None)
# continue
# else:
# break
X_xcount[idx_to_del] = [line, 1]
#if line in X_listIdx:
# X_listIdx[line].append(idx_to_del)
#else:
# X_listIdx[line] = [idx_to_del]
else:
# The line was not picked.
# Increment X_xcount[i][1] where X_xcount[i][0] == line
# if line in X_listIdx:
# idxes = X_listIdx[line]
# for idx in idxes:
# X_xcount[idx][1] += 1
counter += 1
if counter != 500:
if line in batch_updater:
batch_updater[line] += 1
else:
batch_updater[line] = 1
else:
for idx,line in enumerate(X_xcount):
line_str = line[0]
if line_str in batch_updater:
X_xcount[idx][1] += batch_updater[line_str]
batch_updater = {}
counter = 0
strs,counts = zip(*X_xcount)
counts = np.asarray(counts)
vals = (n+1) * (2*counts-1)
np.savetxt(outputFile, vals, fmt="%d")
# vals = []
# for entry in X_xcount:
# val.append((n+1)*(2*entry[1]-1))
# with open(outputFile,"w") as oFile:
# # print(val)
# # print(val, file=oFile)
# oFile.write("{}\n".format(val))
# print("Estimated F2 {}".format(int(getEstimatedF2(zip(*X_xcount)[1], n+1))))
print("{}".format(int(getEstimatedF2(zip(*X_xcount)[1], n+1))))
|
[
"kunalghosh@users.noreply.github.com"
] |
kunalghosh@users.noreply.github.com
|
b2f28a449c54d8272a7ce33e7e59814f7a0f2175
|
8e0c257beaefd47b8906901c8359c7748cc91bdf
|
/core/winApi/__init__.py
|
8a5431f416fc13bc8e5497f2c7a1483549aa4f95
|
[] |
no_license
|
Reqin/tft_rebuild
|
fc2d917e7bb4ff097d7473da8d5110eddca9ebc4
|
2745d3e1ab84b5931ab018e49f7eb245304d8109
|
refs/heads/master
| 2022-11-27T23:28:51.421917
| 2020-07-26T10:53:25
| 2020-07-26T10:53:25
| 279,995,549
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 35
|
py
|
from .winApi import get_cursor_pos
|
[
"reqinfeng2008@gmail.com"
] |
reqinfeng2008@gmail.com
|
728a75114877d6e0d4059039ed2b4a8421eca3e8
|
87f11fae96f19abc0fb0a445fc60270fca853306
|
/tools/numbering.py
|
d77c83f2d334816a64ad1c2d4b5720a553065b6c
|
[
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
nmathewson/walking-onions-wip
|
4e0ce8d597657f850d69364d8d1b58439595a8cc
|
cf9b2f98887943f460489ae01ce66e967552b448
|
refs/heads/master
| 2021-03-02T14:22:02.636942
| 2020-06-10T20:12:18
| 2020-06-10T20:12:18
| 245,874,531
| 0
| 3
|
NOASSERTION
| 2020-05-30T16:21:22
| 2020-03-08T19:25:52
|
Rust
|
UTF-8
|
Python
| false
| false
| 1,932
|
py
|
#!/usr/bin/python3
import os
import re
class Replacing:
def __init__(self, fname):
self.fname = fname
self.newname = fname + ".tmp"
self.inp = open(fname,'r')
self.outp = open(self.newname, 'w')
def finish(self):
self.inp.close()
self.outp.close()
os.rename(self.newname, self.fname)
def abort(self):
self.inp.close()
self.outp.close()
os.unlink(self.newname)
def write(self, s):
return self.outp.write(s)
def __iter__(self):
return iter(self.inp)
class Idx:
def __init__(self):
self.p = [0]*6
def next(self, level):
level -= 1
self.p[level] += 1
self.p[level+1:] = [0]*len(self.p[level+1:])
return ".".join(str(s) for s in self.p[:level+1])
HEADER_RE = re.compile(r'^(#+) .*')
COMMENT_STR = r'<!-- Section '
def revise(fnames):
idx = Idx()
for fname in fnames:
io = Replacing(fname)
last_was_blank = False
try:
for line in io:
if line.startswith(COMMENT_STR):
continue
if line.strip() == '':
if last_was_blank:
continue
last_was_blank = True
else:
last_was_blank = False
m = HEADER_RE.match(line)
if m != None:
if line.startswith('# Appendices'):
idx.next(1)
pos = "A"
idx.p[0] = "A"
else:
pos = idx.next(len(m.group(1)))
print("<!-- Section {0} --> <a id='S{0}'></a>\n".format(pos), file=io)
io.write(line)
except:
io.abort()
raise
io.finish()
if __name__ == '__main__':
import sys
revise(sys.argv[1:])
|
[
"nickm@torproject.org"
] |
nickm@torproject.org
|
41a42f6c70aff3b894781f36032df60d7d6b5c7d
|
7309c198b5c70dab1cde69fa010d914e97fb2ea4
|
/NN.py
|
c2fe2cfcac87a5ee9d8e986afd915ea1fa1aafd5
|
[] |
no_license
|
Waryur/Shade-of-RED-or-BLUE-Color-Deep-Learning
|
637e75df02fa79db9774391f40f7c65f13e671c4
|
8d14167cea1ae3313c51fc65604c0b9a86c8e5c2
|
refs/heads/master
| 2020-06-19T23:41:41.161849
| 2019-07-15T03:17:28
| 2019-07-15T03:17:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,405
|
py
|
import numpy as np
import os
from skimage import io
from matplotlib import pyplot as plt
import pickle as pkl
'''
Input will be [12, 1]
Hidden Layers will be 3
with 16 neurons in 1st layer
with 10 neurons in 2nd layer
with 4 neurons in 3rd layer
with 2 neurons in output
'''
DataFolder = os.listdir("C:\\Users\castrojl\Desktop\color database")
read = "C:\\Users\\castrojl\\Desktop\\color database\\"
def tanh_p(x):
return 1 - np.tanh(x)**2
def ReLU(x):
return x * (x > 0)
def dReLU(x):
return 1. * (x > 0)
w1 = np.load("weights1.npy")
b1 = np.load("bias1.npy")
w2 = np.load("weights2.npy")
b2 = np.load("bias2.npy")
w3 = np.load("weights3.npy")
b3 = np.load("bias3.npy")
w4 = np.load("weights4.npy")
b4 = np.load("bias4.npy")
w5 = np.load("weights5.npy")
b5 = np.load("bias5.npy")
#InputData = np.array([#Shades of Red
# [222, 89, 89],
# [222, 13, 13],
# [204, 33, 33],
# [240, 103, 103],
# [186, 71, 71],
# [163, 3, 3],
# [176, 49, 49],
# [196, 0, 0],
# [158, 24, 24],
# [186, 11, 11],
# [255, 0, 0],
# [255, 79, 79],
# [250, 105, 127],
# [255, 148, 164],
# [199, 139, 148],
# [242, 167, 178],
# [217, 176, 177],
# [250, 200, 201],
# #Shades of Blue
# [79, 100, 255],
# [0, 30, 255],
# [84, 104, 255],
# [32, 50, 186],
# [0, 18, 158],
# [5, 22, 150],
# [72, 91, 232],
# [30, 47, 176],
# [0, 21, 179],
# [5, 21, 135],
# [34, 57, 227],
# [68, 82, 189],
# [182, 150, 255],
# [148, 105, 250],
# [167, 167, 242],
# [128, 128, 189],
# [200, 200, 247],
# [174, 174, 214]
# #Other colors
# #[230, 242, 97],
# #[153, 242, 97],
# #[97, 242, 131],
# #[0, 242, 117],
# #[247, 57, 212],
# #[8, 230, 255],
# #[8, 255, 237],
# #[27, 158, 42],
# #[149, 157, 27],
# #[158, 27, 145],
# #[237, 180, 231],
# #[180, 237, 231],
# #[180, 237, 193],
# #[212, 237, 180],
# #[232, 237, 180]
# ])
#TargetData = np.array([[0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [0, 1],#, 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0],# 0],
# [1, 0]# 0],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# #[0, 0, 1],
# ])
#w1 = np.random.randn(6, 3)
#b1 = np.random.randn(6, 1)
#w2 = np.random.randn(6, 6)
#b2 = np.random.randn(6, 1)
#w3 = np.random.randn(10, 6)
#b3 = np.random.randn(10, 1)
#w4 = np.random.randn(6, 10)
#b4 = np.random.randn(6, 1)
#w5 = np.random.randn(2, 6)
#b5 = np.random.randn(2, 1)
#iterations = 5000
#lr = 0.1
#costlist = []
#for i in range(iterations):
# random = np.random.choice(len(InputData))
# InputData1 = InputData[random].reshape(3, 1)
# z1 = np.dot(w1, InputData1) + b1
# a1 = np.tanh(z1)
# z2 = np.dot(w2, a1) + b2
# a2 = np.tanh(z2)
# z3 = np.dot(w3, a2) + b3
# a3 = np.tanh(z3)
# z4 = np.dot(w4, a3) + b4
# a4 = np.tanh(z4)
# z5 = np.dot(w5, a4) + b5
# a5 = ReLU(z5)
# cost = np.sum(np.square(a5 - TargetData[random].reshape(2, 1)))
# if i % 100 == 0:
# c = 0
# for x in range(len(DataFolder)):
# InputData2 = InputData[x].reshape(3, 1)
# z1 = np.dot(w1, InputData2) + b1
# a1 = np.tanh(z1)
# z2 = np.dot(w2, a1) + b2
# a2 = np.tanh(z2)
# z3 = np.dot(w3, a2) + b3
# a3 = np.tanh(z3)
# z4 = np.dot(w4, a3) + b4
# a4 = np.tanh(z4)
# z5 = np.dot(w5, a4) + b5
# a5 = ReLU(z5)
# c += np.sum(np.square(a5 - TargetData[x].reshape(2, 1)))
# costlist.append(c)
# #print(cost)
# #backprop
# dcda5 = 2 * (a5 - TargetData[random].reshape(2, 1))
# da5dz5 = tanh_p(z5)
# dz5dw5 = a4
# dz5da4 = w5
# da4dz4 = tanh_p(z4)
# dz4dw4 = a3
# dz4a3 = w4
# da3dz3 = tanh_p(z3)
# dz3dw3 = a2
# dz3da2 = w3
# da2dz2 = tanh_p(z2)
# dz2dw2 = a1
# dz2da1 = w2
# da1dz1 = dReLU(z1)
# dz1dw1 = InputData1
# dw5 = dcda5 * da5dz5
# db5 = np.sum(dw5, axis=1, keepdims=True)
# w5 = w5 - lr * np.dot(dw5, dz5dw5.T)
# b5 = b5 - lr * db5
# dw4 = np.dot(dz5da4.T, dw5) * da4dz4
# db4 = np.sum(dw4, axis=1, keepdims=True)
# w4 = w4 - lr * np.dot(dw4, dz4dw4.T)
# b4 = b4 - lr * db4
# dw3 = np.dot(dz4a3.T, dw4) * da3dz3
# db3 = np.sum(dw3, axis=1, keepdims=True)
# w3 = w3 - lr * np.dot(dw3, dz3dw3.T)
# b3 = b3 - lr * db3
# dw2 = np.dot(dz3da2.T, dw3) * da2dz2
# db2 = np.sum(dw2, axis=1, keepdims=True)
# w2 = w2 - lr * np.dot(dw2, dz2dw2.T)
# b2 = b2 - lr * db2
# dw1 = np.dot(dz2da1.T, dw2) * da1dz1
# db1 = np.sum(dw1, axis=1, keepdims=True)
# w1 = w1 - lr * np.dot(dw1, dz1dw1.T)
# b1 = b1 - lr * db1
#print("W1: \n{}\n".format(w1))
#print("B1: \n{}\n".format(b1))
#print("W2: \n{}\n".format(w2))
#print("B2: \n{}\n".format(b2))
#print("W3: \n{}\n".format(w3))
#print("B3: \n{}\n".format(b3))
#print("W4: \n{}\n".format(w4))
#print("B4: \n{}\n".format(b4))
#print("W4: \n{}\n".format(w5))
#print("B4: \n{}\n".format(b5))
#cost = 0
#for x in range(len(InputData)):
# InputData1 = InputData[x].reshape(3, 1)
# z1 = np.dot(w1, InputData1) + b1
# a1 = np.tanh(z1)
# z2 = np.dot(w2, a1) + b2
# a2 = np.tanh(z2)
# z3 = np.dot(w3, a2) + b3
# a3 = np.tanh(z3)
# z4 = np.dot(w4, a3) + b4
# a4 = np.tanh(z4)
# z5 = np.dot(w5, a4) + b5
# a5 = ReLU(z5)
# c = np.sum(np.square(a5 - TargetData[x].reshape(2, 1)))
# print(InputData[x])
# print("Prediction: \n{}\n".format(np.round(a5)))
# print("Target: \n{}\n".format(TargetData[x].reshape(2, 1)))
# print("Cost: {}\n".format(c))
# if np.round(a4[0]) == 1:
# print("It's BLUE\n")
# else:
# print("It's RED\n")
# cost += c
#print("Total Cost: {}\n".format(cost))
#if cost < 0.09:
# np.save("weights1", w1)
# np.save("weights2", w2)
# np.save("weights3", w3)
# np.save("weights4", w4)
# np.save("weights5", w5)
# np.save("bias1", b1)
# np.save("bias2", b2)
# np.save("bias3", b3)
# np.save("bias4", b4)
# np.save("bias5", b5)
#plt.plot(costlist)
#plt.show()
while True:
print("Please enter the RGB value for Shade of RED or BLUE")
r = input("For R: ")
g = input("For G: ")
b = input("For B: ")
x = np.array([[float(r)],
[float(g)],
[float(b)]])
z1 = np.dot(w1, x) + b1
a1 = np.tanh(z1)
z2 = np.dot(w2, a1) + b2
a2 = np.tanh(z2)
z3 = np.dot(w3, a2) + b3
a3 = np.tanh(z3)
z4 = np.dot(w4, a3) + b4
a4 = np.tanh(z4)
z5 = np.dot(w5, a4) + b5
a5 = ReLU(z5)
if np.round(a5[0]) == 1:
print("It's BLUE\n")
else:
print("It's RED\n")
|
[
"noreply@github.com"
] |
Waryur.noreply@github.com
|
ee85d6966bff4e8284c95b45b43aa2eb8b930b69
|
b1a0fd07fd8c28a5fa6c8ed1982e6f74bc6e6e53
|
/12_basic rnn03.py
|
0356edcc8fa09d2fc8a587040f133012be482d86
|
[] |
no_license
|
BingzheJiang/pytorch-
|
805ecccae81abbdf29e6b556c4edd5df6baab66c
|
8a555b80d22d1165fa4e4e0da95b0bc0a9c388d5
|
refs/heads/main
| 2023-07-04T11:55:23.449032
| 2021-08-03T03:31:38
| 2021-08-03T03:31:38
| 384,365,588
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,298
|
py
|
'''
训练RNN模型使得 "hello" -> "ohlol"
输入为"hello",可设置字典 e -> 0 h -> 1 l -> 2 o -> 3 hello对应为 10223 one-hot编码有下面对应关系
h 1 0100 o 3
e 0 1000 h 1
l 2 0010 l 2
l 2 0010 o 3
o 3 0001 l 2
输入有“helo”四个不同特征于是input_size = 4
hidden_size = 4 batch_size = 1
RNN模型维度的确认至关重要:
rnn = torch.nn.RNN(input_size=input_size, hidden_size=hidden_size,num_layers=num_layers)
outputs, hidden_outs = rnn(inputs, hiddens):
inputs of shape 𝑠𝑒𝑞𝑆𝑖𝑧𝑒, 𝑏𝑎𝑡𝑐ℎ, 𝑖𝑛𝑝𝑢𝑡_𝑠𝑖𝑧𝑒
hiddens of shape 𝑛𝑢𝑚𝐿𝑎𝑦𝑒𝑟𝑠, 𝑏𝑎𝑡𝑐ℎ, ℎ𝑖𝑑𝑑𝑒𝑛_𝑠𝑖𝑧𝑒
outputs of shape 𝑠𝑒𝑞𝑆𝑖𝑧𝑒, 𝑏𝑎𝑡𝑐ℎ, ℎ𝑖𝑑𝑑𝑒𝑛_𝑠𝑖𝑧𝑒
hidden_outs of shape 𝑠𝑒𝑞𝑆𝑖𝑧𝑒, 𝑏𝑎𝑡𝑐ℎ, ℎ𝑖𝑑𝑑𝑒𝑛_𝑠𝑖𝑧𝑒
cell = torch.nn.RNNcell(input_size=input_size, hidden_size=hidden_size)
output, hidden_out = cell(input, hidden):
input of shape 𝑏𝑎𝑡𝑐ℎ, 𝑖𝑛𝑝𝑢𝑡_𝑠𝑖𝑧𝑒
hidden of shape 𝑏𝑎𝑡𝑐ℎ, ℎ𝑖𝑑𝑑𝑒𝑛_𝑠𝑖𝑧𝑒
output of shape 𝑏𝑎𝑡𝑐ℎ, ℎ𝑖𝑑𝑑𝑒𝑛_𝑠𝑖𝑧𝑒
hidden_out of shape 𝑏𝑎𝑡𝑐ℎ, ℎ𝑖𝑑𝑑𝑒𝑛_𝑠𝑖𝑧𝑒
其中,seqSize:输入个数 batch:批量大小 input_size:特征维数 numLayers:网络层数 hidden_size:隐藏层维数
'''
import torch
idx2char = ['e', 'h', 'l', 'o'] #方便最后输出结果
x_data = [1, 0, 2, 2, 3] #输入向量
y_data = [3, 1, 2, 3, 2] #标签
one_hot_lookup = [ [1, 0, 0, 0], #查询ont hot编码 方便转换
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1] ]
x_one_hot = [one_hot_lookup[x] for x in x_data] #按"1 0 2 2 3"顺序取one_hot_lookup中的值赋给x_one_hot
'''运行结果为x_one_hot = [ [0, 1, 0, 0],
[1, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 1, 0],
[0, 0, 0, 1] ]
刚好对应输入向量,也对应着字符值'hello'
'''
def RNN_module():
input_size = 4
hidden_size = 4
num_layers = 1
batch_size = 1
seq_len = 5
inputs = torch.Tensor(x_one_hot).view(seq_len, batch_size, input_size)
labels = torch.LongTensor(y_data)
print(labels.shape)
class RNNModel(torch.nn.Module):
def __init__(self,input_size, hidden_size, batch_size, num_layers=1):
super(RNNModel, self).__init__()
self.num_layers = num_layers
self.input_size = input_size
self.batch_size = batch_size
self.hidden_size = hidden_size
self.rnn = torch.nn.RNN(input_size=self.input_size, hidden_size=self.hidden_size, num_layers=num_layers)
def forward(self, input):
hidden = torch.zeros(self.num_layers, self.batch_size, self.hidden_size) # 提供初始化隐藏层(h0)
out, _ = self.rnn(input, hidden)#out=[ h0, h1, h2, h3, h4] _ = [[[h4]]]
#out torch.Size([5, 1, 4]) 𝑠𝑒𝑞𝑆𝑖𝑧𝑒, 𝑏𝑎𝑡𝑐ℎ, ℎ𝑖𝑑𝑑𝑒𝑛_𝑠𝑖𝑧𝑒
return out.view(-1, self.hidden_size) #(5,4)
net = RNNModel(input_size, hidden_size,batch_size, num_layers)
#---计算损失和更新
criterion = torch.nn.CrossEntropyLoss()#交叉熵
optimizer = torch.optim.Adam(net.parameters(), lr = 0.01)
#---计算损失和更新
for epoch in range(100):#训练100次
optimizer.zero_grad()
outputs = net(inputs)
# print(outputs.shape)
# print(labels.shape)
loss = criterion(outputs, labels)#output 5,4 labeL 5
loss.backward()
optimizer.step()
_, idx = outputs.max(dim=1)##从第一个维度上取出预测概率最大的值和该值所在序号
idx = idx.data.numpy()
print('Predicted: ', ''.join([idx2char[x] for x in idx]), end='')
print(', Epoch [%d/100] loss = %.3f' % (epoch + 1, loss.item()))
if __name__=="__main__":
RNN_module()
|
[
"1219255570@qq.com"
] |
1219255570@qq.com
|
8fe41995148ecdbd6b30e5d1641322e6133f6478
|
7556e256fca5d92481fb3cfdc7f7f14c4f7e0a82
|
/istagram/users/urls.py
|
0c59844ba9afed220d3181c532d32f79c4d5ff9b
|
[] |
no_license
|
faisol-chehumar/istagram
|
3585654d544b1a5419f51986710fc4ec8751a443
|
a52866b50d4c285dedeed288451e0f834ad892b0
|
refs/heads/master
| 2023-04-28T22:35:35.334503
| 2019-11-07T10:25:56
| 2019-11-07T10:25:56
| 220,209,847
| 0
| 0
| null | 2023-04-21T20:40:39
| 2019-11-07T10:25:13
|
Python
|
UTF-8
|
Python
| false
| false
| 164
|
py
|
from django.urls import path
from .views import ProfileDetailView
urlpatterns = [
path('<str:username>/', ProfileDetailView.as_view(), name="profile_detail")
]
|
[
"faisol@tellscore.com"
] |
faisol@tellscore.com
|
c406ee91ac9285040485ea8f2059d302c28b6c50
|
f9a526e5f034a866595e39c6564f5269c9cfe388
|
/String_Array/2.py
|
e92ca9cb946deb970eddff7d1b0738776d2b76b6
|
[] |
no_license
|
ketanpandey01/DS_Algo
|
c660b9c5193aa5d6e077ab7b9bdb08c363a6ce24
|
31d2f22b85b87ff8438a5977684bb8ef428d1964
|
refs/heads/master
| 2022-12-14T15:21:56.477120
| 2020-09-14T06:42:29
| 2020-09-14T06:42:29
| 293,096,251
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 652
|
py
|
# https://www.geeksforgeeks.org/given-a-string-print-all-possible-palindromic-partition/
def palindromiPartitions(inputStr):
paritionList = []
for itr in range(len(inputStr)-1):
tempList = []
for itr2 in range(itr+1, len(inputStr)):
if(checkPalindrome(inputStr[itr:itr2+1])):
tempList.append(inputStr[itr:itr2+1])
else:
return
for s in tempList:
paritionList.append(' '.join(list(s)))
def checkPalindrome(str1):
if(str1 == str1[::-1]):
return True
else:
return False
inputStr = input()
print(palindromiPartitions(inputStr))
|
[
"ketanpandey01@gmail.com"
] |
ketanpandey01@gmail.com
|
3ae336c77b34de633de2f368683dd45a023a6a36
|
b4a0b784536c52d04c79735d0468f6d91dc7754c
|
/tools/set_LESCell.py
|
1afc1f9e8c9d50e070ffe241efe9fed5c3ce2d6a
|
[] |
no_license
|
ChanJeunlam/mpasowork
|
de773c1789567955b6d9dbc7fbeae08331c0421a
|
e50be1d8fb1f0b5ff051dcd68fc89067cdd20b6c
|
refs/heads/master
| 2023-08-28T18:16:19.041903
| 2021-10-25T13:51:15
| 2021-10-25T13:51:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,873
|
py
|
#!/usr/bin/env python3
"""
This script sets the lesCell array in a mesh, which is the mask for cells that run LES.
Qing Li, 20200205
"""
import sys
import netCDF4
import numpy as np
import argparse
from scipy import spatial
# process the input arguments
parser = argparse.ArgumentParser(description="""
Read in an MPAS grid file and add/modify the lesCell array for LES mask.
""")
parser.add_argument('-f', '--file', action='store', dest='fname_in',
metavar='GRIDFILE', required=True, help='MPAS grid file name')
parser.add_argument('-t', '--type', action='store', dest='type',
metavar='INPUTTYPE', required=True, help='Input type, \'cellid\' or \'location\'.')
parser.add_argument('-i', '--cellid', action='store', dest='cellid',
metavar='CELLID', nargs='+',
help='List of cell IDs. Only used when type is \'cellid\'.')
parser.add_argument('-x', '--xlocation', action='store', dest='xloc',
metavar='XLOCATION', nargs='+',
help='List of x locations in fraction (0, 1). Only used when type is \'location\'.')
parser.add_argument('-y', '--ylocation', action='store', dest='yloc',
metavar='YLOCATION', nargs='+',
help='List of y locations in fraction (0, 1). Only used when type is \'location\'.')
parser.add_argument('-g', '--graphpart', action='store', dest='fgraph',
help='Graph partitioning for the MPAS grid.')
parser.add_argument('--version', action='version', version='%(prog)s: 1.0')
# parsing arguments and save to args
args=parser.parse_args()
# read input file
fin = netCDF4.Dataset(args.fname_in, 'r+')
# Get info from input file
xCell = fin.variables['xCell'][:]
yCell = fin.variables['yCell'][:]
nCells = len(fin.dimensions['nCells'])
indexToCellID = fin.variables['indexToCellID'][:]
# read or create lesCell
if 'lesCell' in fin.variables:
lesCell = fin.variables['lesCell']
else:
lesCell = fin.createVariable('lesCell', fin.variables['indexToCellID'].dtype, ('nCells',))
lesCell_local = np.zeros( (nCells,) )
# find the cells
print('\nFinding LES Cell...')
print('----------------')
if args.type == 'all':
idx = [i for i in np.arange(nCells)]
cidx = np.array(idx)
idLESCells = indexToCellID[cidx]
print('Using all cells')
print(indexToCellID)
elif args.type == 'cellid':
assert args.cellid is not None, 'Cell ID is required when input type is \'cellid\'.'
idx = []
for cellID in args.cellid:
idx.append(int(cellID)-1)
print('Cell ID: {:s}'.format(cellID))
# for test
# print(indexToCellID[int(cellID)-1])
cidx = np.array(idx)
idLESCells = indexToCellID[cidx]
elif args.type == 'location':
assert args.xloc is not None, 'List of x locations is required when input type is \'location\'.'
assert args.yloc is not None, 'List of y locations is required when input type is \'location\'.'
assert len(args.yloc) == len(args.xloc), 'List of y locations should have the same number of elements as the list of x locations.'
xFracList = np.array([float(x) for x in args.xloc])
yFracList = np.array([float(y) for y in args.yloc])
assert xFracList.max() < 1.0 and xFracList.min() > 0.0, 'x locations should be in (0, 1)'
assert yFracList.max() < 1.0 and yFracList.min() > 0.0, 'y locations should be in (0, 1)'
xCellMax = xCell.max()
xCellMin = xCell.min()
yCellMax = yCell.max()
yCellMin = yCell.min()
npoints = len(xFracList)
xCellList = np.zeros(npoints)
yCellList = np.zeros(npoints)
for i in np.arange(npoints):
xCellList[i] = xCellMin + xFracList[i] * (xCellMax - xCellMin)
yCellList[i] = yCellMin + yFracList[i] * (yCellMax - yCellMin)
# select nearest neighbor
pts = np.array(list(zip(xCellList, yCellList)))
tree = spatial.KDTree(list(zip(xCell, yCell)))
p = tree.query(pts)
cidx = p[1]
# list of indices
idLESCells = indexToCellID[cidx]
xLESCells = xCell[cidx]
yLESCells = yCell[cidx]
# print
for i in np.arange(len(idLESCells)):
print('Cell {:d} {:6d}: {:10.2f} ({:4.2f}), {:10.2f} ({:4.2f})'.format(i+1, idLESCells[i], \
xLESCells[i], xFracList[i], yLESCells[i], yFracList[i]))
# for test
# print('{:6.4f} {:6.4f}'.format((xCell[cidx[i]]-xCellMin)/(xCellMax-xCellMin), (yCell[cidx[i]]-yCellMin)/(yCellMax-yCellMin)))
else:
raise ValueError('Input type should be either \'cellid\' or \'location\'.')
print('')
# check graph partitioning for the les cells
if args.fgraph is not None:
print('\nChecking graph partitioning...')
print('----------------')
gpdata = np.loadtxt(args.fgraph)
for i in np.arange(cidx.size):
print('Cell ID: {:d}, Partitioning: {:4d}'.format(idLESCells[i], int(gpdata[idLESCells[i]-1])))
print('')
# save lesCell
lesCell_local[cidx] = 1
lesCell[:] = lesCell_local
fin.close()
|
[
"lqingpku@gmail.com"
] |
lqingpku@gmail.com
|
5d81946181be0b9f5b740e80f1935a0eb3359a53
|
e068f440b60a9ace1f42f278b0498dfc66e17d1a
|
/site/apps/shift_explorer.py
|
62a43287bba6d8385a23a3dcdc6a25df83e3880d
|
[] |
no_license
|
fundeIT/cmif
|
1b211c6bf44d177c3a6959ed98f3129010cb1acc
|
34a08fea60d67f3bccb3b8892372c38e76fe6fa7
|
refs/heads/master
| 2022-12-18T11:56:58.177074
| 2021-11-15T20:09:24
| 2021-11-15T20:09:24
| 213,757,972
| 0
| 0
| null | 2022-12-14T00:01:14
| 2019-10-08T21:26:07
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 12,341
|
py
|
import io
import base64
import numpy as np
import pandas as pd
import sqlite3
import urllib
import dash_table
from dash_table.Format import Format
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.graph_objs as go
import plotly.express as px
import geopandas
import matplotlib.pyplot as plt
from app import app
import queries as qry
DBNAME = 'data/master.db'
def get_data(year, month=None):
conn = sqlite3.connect(DBNAME)
option = ''
if month:
option = f"month = {month} AND "
stmt = f"""
SELECT
office, unit, line, source,
SUBSTR(object, 0, 3) AS head,
SUBSTR(object, 0, 4) AS subhead,
object,
SUM(modified - approved) AS shifted
FROM accrued
WHERE
year={year} AND
{option}
SUBSTR(office, 3, 2) = '00'
GROUP BY office, unit, line, source, head, subhead, object
ORDER BY office, unit, line, source, head, subhead, object
"""
data = pd.read_sql(stmt, conn)
conn.close()
return data
MONTHS = {
1 : 'ENE', 2 : 'FEB', 3 : 'MAR',
4 : 'ABR', 5 : 'MAY', 6 : 'JUN',
7 : 'JUL', 8 : 'AGO', 9 : 'SEP',
10 : 'OCT', 11 : 'NOV', 12 : 'DIC'
}
# Getting valid periods
def get_periods():
conn = sqlite3.connect(DBNAME)
stmt_years = """
SELECT DISTINCT(year)
FROM accrued
ORDER BY year
"""
stmt_months = """
SELECT DISTINCT(month)
FROM accrued
WHERE year={}
ORDER BY month
"""
y = conn.cursor()
m = conn.cursor()
periods = {
year[0] : [
month[0] for month in m.execute(stmt_months.format(year[0]))
]
for year in y.execute(stmt_years)
}
conn.close()
return periods
periods = get_periods()
year = list(periods.keys())[-1]
data = get_data(year)
def get_structure(year, office):
stmt = """
SELECT est, est_name FROM
(
SELECT year, office, line AS est, line_name AS est_name
FROM line
UNION
SELECT year, office, unit AS est, unit_name AS est_name
FROM unit
)
WHERE year={} AND office='{}'
ORDER BY year, office, est, est_name
""".format(year, office)
conn = sqlite3.connect(DBNAME)
data = pd.read_sql(stmt, conn)
conn.close()
return data
periods = get_periods()
year = list(periods.keys())[-1]
data = get_data(year)
offices = qry.get_offices(year)
structure = get_structure(year, '0100')
def make_year_control():
years = list(periods.keys())
control = html.Div([
html.Label('Año'),
dcc.Dropdown(
id = 'year_control',
options = [
{
'label': key,
'value': key,
} for key in years
],
value=years[-1],
clearable = False
)
])
return control
def make_month_control():
control = html.Div([
html.Label('Mes'),
dcc.Dropdown(
id='month_control',
options = [
{
'label': MONTHS[month],
'value': month
} for month in periods[year]
]
)
])
return control
def make_office_control():
offices = qry.get_offices(year).to_dict('records')
control = html.Div([
html.Label('Oficinas'),
dcc.Dropdown(
id = 'office_control',
options = [
{
'label': '{} - {}'.format(rec['office'], rec['office_name']),
'value': rec['office']
} for i, rec in enumerate(offices)
],
disabled = False,
)
])
return control
def make_structure_control():
control = html.Div([
html.Label('Unidades/Líneas'),
dcc.Dropdown(
id = 'structure_control',
options = [
{
'label': '{} - {}'.format(rec['est'], rec['est_name']),
'value': rec['est']
} for i, rec in enumerate(structure.to_dict('records'))
],
disabled = True,
)
])
return control
def make_detail_control():
control = html.Div([
html.Label('Nivel de detalle'),
dcc.Dropdown(
id = 'detail_control',
options = [
{'label': 'Rubro', 'value': 2},
{'label': 'Cuenta', 'value': 3},
{'label': 'Objeto específico', 'value': 5},
],
)
])
return control
def prepare_figure(df):
source = []
target = []
value = []
label = []
color = []
keys = ['global'] + list(df.columns[:-1])
df['global'] = df['shifted'].apply(lambda val: 'in' if val >= 0 else 'out')
if 'office' in df.columns:
df['office'] = df['office'].apply(
lambda s: qry.get_office_name(s, offices)
)
for i in range(0, len(keys) - 1):
subset = df.groupby(keys[0:i + 2])['shifted'].sum().reset_index()
subset = subset[subset['shifted'] != 0]
for row in subset.iterrows():
record = row[1].to_dict()
source_label = '-'.join([record[key] for key in list(record.keys())[1:-2]])
target_label = '-'.join([record[key] for key in list(record.keys())[1:-1]])
if record['shifted'] < 0:
source_label, target_label = target_label, source_label
color.append('rgba(255, 0, 0, 0.4)')
else:
color.append('rgba(0, 255, 0, 0.4)')
"""
if source_label == '':
source_label = 'A'
if target_label == '':
target_label = 'B'
"""
if not source_label in label:
label.append(source_label)
source_index = label.index(source_label)
if not target_label in label:
label.append(target_label)
target_index = label.index(target_label)
source.append(source_index)
target.append(target_index)
value.append(abs(record['shifted']))
fig = {
'data': [
go.Sankey(
link = dict(
source = source,
target = target,
value = value,
color = color
),
node = dict(
label = label,
color = 'blue'
)
),
],
'layout': go.Layout(
font_size = 6,
)
}
return fig
def make_figure():
df = data.groupby('office')['shifted'].sum().reset_index()
return html.Div([
dcc.Graph(
id = 'shifted_figure',
figure = prepare_figure(df),
config = {
'displaylogo': False,
},
)
])
def make_table(df=None):
if not df:
df = data.groupby('office')['shifted'].sum().reset_index()
df = df.round(2)
columns = [{'name': col, 'id': col} for col in df.columns]
return html.Div([
dash_table.DataTable(
id = 'shifted_table',
columns = columns,
data = df.to_dict('records'),
sort_action = 'native',
style_data = {
'whiteSpace': 'normal',
'height': 'auto'
},
style_as_list_view = True,
style_header = {
'backgroundColor': 'white',
'fontWeight': 'bold'
},
page_size = 50,
fill_width = False,
)
])
txt_header = '''
# Modificaciones presupuestarias
'''
txt_intro = '''
Esta herramienta permite revisar los cambios hechos en el presupuesto de un determinado período.
'''
content = dbc.Container([
dbc.Row([
dbc.Col(dcc.Markdown(txt_header))
]),
dbc.Row([
dbc.Col([
dcc.Markdown(txt_intro),
make_year_control(),
make_month_control(),
make_office_control(),
make_structure_control(),
make_detail_control(),
], md=4),
dbc.Col(
dbc.Tabs([
dbc.Tab([
make_figure(),
html.A(
'Descargar CSV',
# href='static/budget_by_object.csv',
download='budget.csv',
id='download_csv_shifts',
className='btn btn-primary'
),
], label='Gráficas', tab_id='yearly'),
dbc.Tab([
make_table(),
], label='Tabla', tab_id='map'),
], id='tabs'),
),
]),
])
layout = html.Div([content,])
@app.callback(
[
Output(component_id='month_control', component_property='options'),
Output(component_id='office_control', component_property='options'),
],
[
Input(component_id='year_control', component_property='value'),
]
)
def update_months(tmp_year):
year = tmp_year
month_options = options = [
{
'label': MONTHS[month],
'value': month
} for month in periods[year]
]
offices = qry.get_offices(year).to_dict('records')
offices_options = [
{
'label': '{} - {}'.format(rec['office'], rec['office_name']),
'value': rec['office']
} for i, rec in enumerate(offices)
]
return month_options, offices_options
@app.callback(
[
Output(component_id='structure_control', component_property='disabled'),
Output(component_id='structure_control', component_property='options'),
],
[
Input(component_id='office_control', component_property='value'),
]
)
def update_structure(office):
disabled = True
if office:
disabled = False
if not disabled:
structure = get_structure(year, office).to_dict('records')
else:
structure = get_structure(year, '0100').to_dict('records')
options = [
{
'label': '{} - {}'.format(rec['est'], rec['est_name']),
'value': rec['est']
} for i, rec in enumerate(structure)
]
return disabled, options
@app.callback(
[
Output(component_id='download_csv_shifts', component_property='href'),
Output(component_id='shifted_figure', component_property='figure'),
Output(component_id='shifted_table', component_property='data'),
Output(component_id='shifted_table', component_property='columns'),
],
[
Input(component_id='year_control', component_property='value'),
Input(component_id='month_control', component_property='value'),
Input(component_id='office_control', component_property='value'),
Input(component_id='structure_control', component_property='value'),
Input(component_id='detail_control', component_property='value'),
]
)
def update_download(year, month, office, structure, detail):
data = get_data(year, month)
group = []
if not office and not detail:
group.append('office')
if office:
data = data[data['office'] == office]
if structure:
if len(structure) == 2:
group.append('unit')
data = data[data['unit'] == structure]
else:
group.append('line')
data = data[data['line'] == structure]
else:
if office:
group.append('unit')
if detail:
data['object'] = data['object'].apply(lambda s: s[:detail])
group.append('object')
data = data.groupby(group)['shifted'].sum().reset_index()
fig = prepare_figure(data)
csv_string = data.to_csv(index=False)
csv_string = 'data:text/csv;charset=utf-8,' + urllib.parse.quote(csv_string)
data = data[data.shifted != 0].round(2)
columns = [{'name': col, 'id': col} for col in data.columns]
return csv_string, fig, data.to_dict('records'), columns
|
[
"jailop@protonmail.com"
] |
jailop@protonmail.com
|
62aabdad0a6857e754276e8d7ebae0643d4e73a2
|
0549411cae611072373e879ec6cd817cfdcf71be
|
/gaudinspect/controller/queue.py
|
6c338f7eb022c10b0f37d2fd509002d4cb1d27c6
|
[] |
no_license
|
insilichem/gaudinspect
|
47cf37a7476f9805c1161882b3fd86676a33ee36
|
cb05e30905d29beac00e72d127b0c348899e17a3
|
refs/heads/master
| 2021-01-19T21:13:29.092642
| 2017-04-19T10:09:25
| 2017-04-19T10:09:25
| 88,628,175
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,093
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import os
from collections import OrderedDict
from PyQt4 import QtGui
from .base import GAUDInspectBaseChildController
class GAUDInspectQueueController(GAUDInspectBaseChildController):
def __init__(self, **kwargs):
super(GAUDInspectQueueController, self).__init__(**kwargs)
self.dialog = self.parent().menu.queue_dialog
self.table = self.dialog.table
self.jobs = OrderedDict()
self.currentjob = None
self.signals()
def signals(self):
self.dialog.add_job.clicked.connect(self.add_job)
self.dialog.del_job.clicked.connect(self.del_job)
self.dialog.del_all.clicked.connect(self.del_all)
self.dialog.start_queue.clicked.connect(self.start_queue)
self.dialog.stop_queue.clicked.connect(self.stop_queue)
# Slots
def add_job(self):
paths, f = QtGui.QFileDialog.getOpenFileNames(
self.dialog, 'Choose a GAUDI Input file',
os.getcwd(), "*.gaudi-input")
for path in paths:
i = self.dialog.table.rowCount()
job = GAUDInspectJobHelper(
path, index=i, runner=self.parent().progress)
path_item = QtGui.QTableWidgetItem(job.path)
status_item = QtGui.QTableWidgetItem(job.status)
self.jobs[path] = job
self.table.insertRow(i)
self.table.setItem(i, 0, path_item)
self.table.setItem(i, 1, status_item)
def del_job(self, i=None):
if i is None:
i = self.table.currentRow()
path = self.table.item(i, 0).text()
job = self.jobs[path]
if job.status == job.WORKING:
self.view.status('Cannot remove job in process')
else:
del self.jobs[path]
self.table.removeRow(i)
def del_all(self):
for i in reversed(range(self.table.rowCount())):
self.del_job(0)
def set_status(self, i, status):
self.table.item(i, 1).setText(status)
# Queue handlers
def start_queue(self):
self.queue_started()
self.process_queue()
def process_queue(self, exit_code=None):
pending = []
for p, j in self.jobs.items():
if j.status == j.PENDING:
pending.append(j)
elif j.status == j.WORKING:
if exit_code: # an error occured
j.status = j.FAILED
else: # everything went OK
j.status = j.FINISHED
self.set_status(j.index, j.status)
try:
self.currentjob = job = pending[0]
except IndexError: # Queue is empty!
self.queue_stopped()
else:
job.run()
job.status = job.WORKING
self.set_status(job.index, job.status)
job.process.finished.connect(self.process_queue)
def queue_started(self):
self.enable_buttons(False)
def queue_stopped(self):
self.enable_buttons(True)
def stop_queue(self):
job = self.currentjob
job.process.finished.disconnect(self.process_queue)
job.process.kill()
job.status = job.ABORTED
self.set_status(job.index, job.status)
self.queue_stopped()
# Helpers
def enable_buttons(self, value):
buttons = [self.dialog.add_job, self.dialog.del_job, self.dialog.del_all,
self.dialog.start_queue]
for btn in buttons:
btn.setEnabled(value)
class GAUDInspectJobHelper(object):
PENDING = 'Pending'
WORKING = 'Running'
FINISHED = 'Finished'
FAILED = 'Failed'
ABORTED = 'Aborted'
def __init__(self, path, status=None, runner=None, index=None):
super(GAUDInspectJobHelper, self).__init__()
self.path = path
self.status = self.PENDING if status is None else status
self.runner = runner
self.index = index
def run(self):
self.process = self.runner.run(self.path)
|
[
"jaime.rogue@gmail.com"
] |
jaime.rogue@gmail.com
|
ce28446d9d2c9a8c3d1d9d2e9ee9419513550c58
|
d3c02bb6356d642fcea23a1897f93f64483c0c15
|
/School/School/settings.py
|
b0ebb0f676829c9a99ff719f3bf8ae627c3b5fff
|
[] |
no_license
|
pioneeringdev/Django-Student-Teacher-App
|
46db8e8bcae56c6731cad868b4bbf5eb8abc0ca7
|
52dc139e7f91ebe00e8b3ae43c336451df3ac521
|
refs/heads/master
| 2020-04-21T04:23:01.045178
| 2019-02-11T16:32:15
| 2019-02-11T16:32:15
| 169,311,553
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,878
|
py
|
"""
Django settings for School project.
Generated by 'django-admin startproject' using Django 1.11.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOGIN_URL='/login'
LOGIN_REDIRECT_URL = '/'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4fdn7qh42kst+$4jbp%30h0l2=atoz(q110pwd#uxpyd)ravpz'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
AUTH_USER_MODEL = "School.User"
TEACHER_MODEL = "School.Teacher"
STUDENT_MODEL = "School.Student"
# Application definition
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 10,
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
)
}
INSTALLED_APPS = [
'admin_interface',
'colorfield',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'School',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'School.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'School.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': os.getenv('SQL_ENGINE', 'django.db.backends.sqlite3'),
'NAME': os.getenv('SQL_DATABASE', os.path.join(BASE_DIR, 'db.sqlite3')),
'USER': os.getenv('SQL_USER', 'user'),
'PASSWORD': os.getenv('SQL_PASSWORD', 'password'),
'HOST': os.getenv('SQL_HOST', 'localhost'),
'PORT': os.getenv('SQL_PORT', '5432'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
[
"codedecc116@gmail.com"
] |
codedecc116@gmail.com
|
d7408f761936133d6913c9ae207071b8f062a66a
|
90740d1bae141af44d479d090a6743da813ceebd
|
/genderclassmodel.py
|
3af67d5373bbf13edb4de2e4f9e3077720611c07
|
[] |
no_license
|
shrawansher/Titanic-Project
|
28119c37e0e360fcb4fba12b007f913d18cc479d
|
509fcbcd51976aa8e8eef25577bff7e4d0bb92bf
|
refs/heads/master
| 2021-01-11T12:05:30.956562
| 2017-01-23T02:41:33
| 2017-01-23T02:41:33
| 79,544,227
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,584
|
py
|
import csv as csv
import numpy as np
csv_file_object = csv.reader(open('train.csv', 'rb')) # Load in the csv file
header = csv_file_object.next() # Skip the fist line as it is a header
data=[] # Create a variable to hold the data
for row in csv_file_object: # Skip through each row in the csv file
data.append(row) # adding each row to the data variable
data = np.array(data) # Then convert from a list to an array
# In order to analyse the price column I need to bin up that data
# here are my binning parameters, the problem we face is some of the fares are very large
# So we can either have a lot of bins with nothing in them or we can just lose some
# information by just considering that anythng over 39 is simply in the last bin.
# So we add a ceiling
fare_ceiling = 40
# then modify the data in the Fare column to = 39, if it is greater or equal to the ceiling
data[ data[0::,9].astype(np.float) >= fare_ceiling, 9 ] = fare_ceiling - 1.0
fare_bracket_size = 10
number_of_price_brackets = fare_ceiling / fare_bracket_size
number_of_classes = 3 # I know there were 1st, 2nd and 3rd classes on board.
number_of_classes = len(np.unique(data[0::,2])) # But it's better practice to calculate this from the Pclass directly:
# just take the length of an array of UNIQUE values in column index 2
# This reference matrix will show the proportion of survivors as a sorted table of
# gender, class and ticket fare.
# First initialize it with all zeros
survival_table = np.zeros([2,number_of_classes,number_of_price_brackets],float)
# I can now find the stats of all the women and men on board
for i in xrange(number_of_classes):
for j in xrange(number_of_price_brackets):
women_only_stats = data[ (data[0::,4] == "female") \
& (data[0::,2].astype(np.float) == i+1) \
& (data[0:,9].astype(np.float) >= j*fare_bracket_size) \
& (data[0:,9].astype(np.float) < (j+1)*fare_bracket_size), 1]
men_only_stats = data[ (data[0::,4] != "female") \
& (data[0::,2].astype(np.float) == i+1) \
& (data[0:,9].astype(np.float) >= j*fare_bracket_size) \
& (data[0:,9].astype(np.float) < (j+1)*fare_bracket_size), 1]
#if i == 0 and j == 3:
survival_table[0,i,j] = np.mean(women_only_stats.astype(np.float)) # Female stats
survival_table[1,i,j] = np.mean(men_only_stats.astype(np.float)) # Male stats
# Since in python if it tries to find the mean of an array with nothing in it
# (such that the denominator is 0), then it returns nan, we can convert these to 0
# by just saying where does the array not equal the array, and set these to 0.
survival_table[ survival_table != survival_table ] = 0.
# Now I have my proportion of survivors, simply round them such that if <0.5
# I predict they dont surivive, and if >= 0.5 they do
survival_table[ survival_table < 0.5 ] = 0
survival_table[ survival_table >= 0.5 ] = 1
# Now I have my indicator I can read in the test file and write out
# if a women then survived(1) if a man then did not survived (0)
# First read in test
test_file = open('test.csv', 'rb')
test_file_object = csv.reader(test_file)
header = test_file_object.next()
# Also open the a new file so I can write to it.
predictions_file = open("genderclassmodel.csv", "wb")
predictions_file_object = csv.writer(predictions_file)
predictions_file_object.writerow(["PassengerId", "Survived"])
# First thing to do is bin up the price file
for row in test_file_object:
for j in xrange(number_of_price_brackets):
# If there is no fare then place the price of the ticket according to class
try:
row[8] = float(row[8]) # No fare recorded will come up as a string so
# try to make it a float
except: # If fails then just bin the fare according to the class
bin_fare = 3 - float(row[1])
break # Break from the loop and move to the next row
if row[8] > fare_ceiling: # Otherwise now test to see if it is higher
# than the fare ceiling we set earlier
bin_fare = number_of_price_brackets - 1
break # And then break to the next row
if row[8] >= j*fare_bracket_size\
and row[8] < (j+1)*fare_bracket_size: # If passed these tests then loop through
# each bin until you find the right one
# append it to the bin_fare
# and move to the next loop
bin_fare = j
break
# Now I have the binned fare, passenger class, and whether female or male, we can
# just cross ref their details with our survival table
if row[3] == 'female':
predictions_file_object.writerow([row[0], "%d" % int(survival_table[ 0, float(row[1]) - 1, bin_fare ])])
else:
predictions_file_object.writerow([row[0], "%d" % int(survival_table[ 1, float(row[1]) - 1, bin_fare])])
# Close out the files
test_file.close()
predictions_file.close()
|
[
"shrawansher@gmail.com"
] |
shrawansher@gmail.com
|
233294896ceef236dcbba663d8ddc879b191cd60
|
b364967f39d243c2419cd62d6f5dd426c624dcfa
|
/members/migrations/0007_auto__add_resetpasswordlink.py
|
c3099205414ad077cb0ddd391aed99b996100bfc
|
[] |
no_license
|
KSET/kset-org
|
839c98f696bdf2fc5a2fcfdf837a71156884198b
|
40f48d8060a427572a257233cdcc1607894e955b
|
refs/heads/master
| 2022-11-26T00:52:24.741122
| 2021-05-07T19:47:47
| 2021-05-07T19:47:47
| 3,121,818
| 5
| 14
| null | 2022-11-22T00:28:19
| 2012-01-06T23:14:57
|
Python
|
UTF-8
|
Python
| false
| false
| 5,681
|
py
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ResetPasswordLink'
db.create_table(u'members_resetpasswordlink', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('member', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['members.Member'], unique=True)),
('unique_link', self.gf('django.db.models.fields.CharField')(default='6fc3fd8d34dc4a1bb094ff7023634e0f', unique=True, max_length=255)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'members', ['ResetPasswordLink'])
def backwards(self, orm):
# Deleting model 'ResetPasswordLink'
db.delete_table(u'members_resetpasswordlink')
models = {
u'members.address': {
'Meta': {'object_name': 'Address'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'country': ('django.db.models.fields.CharField', [], {'default': "'Hrvatska'", 'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['members.Member']"}),
'town': ('django.db.models.fields.CharField', [], {'default': "'Zagreb'", 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'default': "'10000'", 'max_length': '16', 'null': 'True', 'blank': 'True'})
},
u'members.contact': {
'Meta': {'object_name': 'Contact'},
'contact': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'contact_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contacts'", 'to': u"orm['members.Member']"})
},
u'members.group': {
'Meta': {'object_name': 'Group'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['members.Group']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
u'members.member': {
'Meta': {'object_name': 'Member'},
'birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'card_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'comment': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['members.Group']", 'through': u"orm['members.MemberGroupLink']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
u'members.membergrouplink': {
'Meta': {'object_name': 'MemberGroupLink'},
'date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['members.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['members.Member']"})
},
u'members.resetpasswordlink': {
'Meta': {'object_name': 'ResetPasswordLink'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['members.Member']", 'unique': 'True'}),
'unique_link': ('django.db.models.fields.CharField', [], {'default': "'6fc3fd8d34dc4a1bb094ff7023634e0f'", 'unique': 'True', 'max_length': '255'})
}
}
complete_apps = ['members']
|
[
"deni@kset.org"
] |
deni@kset.org
|
6454a8fee39ebcb772b1c9079f154b5b4df9102b
|
73f80ceb12b4d972233457708d513674e227c394
|
/jmpy/encrypt_py.py
|
ae538b5b3e6b5df8e8ba72536f195e8ce804db44
|
[
"MIT"
] |
permissive
|
Dev-Bobbie/jmpy
|
5694d0911f227159beb087ae0dda2f4a7d8f1cde
|
e0ed6e962dee8db35bde867b5a2230e53c31ab75
|
refs/heads/master
| 2022-11-08T13:42:30.126948
| 2020-06-19T09:55:18
| 2020-06-19T09:55:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,061
|
py
|
# -*- coding: utf-8 -*-
"""
Created on 2018-07-18 18:24
---------
@summary: 加密python代码为pyd/so
---------
@author: Boris
"""
import os
import re
import shutil
import tempfile
from distutils.command.build_py import build_py
from distutils.core import setup
from typing import Union, List
from Cython.Build import cythonize
from jmpy.log import logger
def get_package_dir(*args, **kwargs):
return ""
# 重写get_package_dir, 否者生成的so文件路径有问题
build_py.get_package_dir = get_package_dir
class TemporaryDirectory(object):
def __enter__(self):
self.name = tempfile.mkdtemp()
return self.name
def __exit__(self, exc_type, exc_value, traceback):
shutil.rmtree(self.name)
def search(content, regexs):
if isinstance(regexs, str):
return re.search(regexs, content)
for regex in regexs:
if re.search(regex, content):
return True
def walk_file(file_path):
if os.path.isdir(file_path):
for current_path, sub_folders, files_name in os.walk(file_path):
for file in files_name:
file_path = os.path.join(current_path, file)
yield file_path
else:
yield file_path
def copy_files(src_path, dst_path):
if os.path.isdir(src_path):
if os.path.exists(dst_path):
shutil.rmtree(dst_path)
def callable(src, names: list):
if search(src, dst_path):
return names
return ["dist", ".git", "venv", ".idea", "__pycache__"]
shutil.copytree(src_path, dst_path, ignore=callable)
else:
if not os.path.exists(dst_path):
os.makedirs(dst_path)
shutil.copyfile(src_path, os.path.join(dst_path, os.path.basename(src_path)))
def get_py_files(files, ignore_files: Union[List, str, None] = None):
"""
@summary:
---------
@param files: 文件列表
#param ignore_files: 忽略的文件,支持正则
---------
@result:
"""
for file in files:
if file.endswith(".py"):
if ignore_files and search(file, regexs=ignore_files): # 该文件是忽略的文件
pass
else:
yield file
def filter_cannot_encrypted_py(files, except_main_file):
"""
过滤掉不能加密的文件,如 log.py __main__.py 以及包含 if __name__ == "__main__": 的文件
Args:
files:
Returns:
"""
_files = []
for file in files:
if search(file, regexs="__.*?.py"):
continue
if except_main_file:
with open(file, "r", encoding="utf-8") as f:
content = f.read()
if search(content, regexs="__main__"):
continue
_files.append(file)
return _files
def encrypt_py(py_files: list):
encrypted_py = []
with TemporaryDirectory() as td:
total_count = len(py_files)
for i, py_file in enumerate(py_files):
try:
dir_name = os.path.dirname(py_file)
file_name = os.path.basename(py_file)
os.chdir(dir_name)
logger.debug("正在加密 {}/{}, {}".format(i + 1, total_count, file_name))
setup(
ext_modules=cythonize([file_name], quiet=True, language_level=3),
script_args=["build_ext", "-t", td, "--inplace"],
)
encrypted_py.append(py_file)
logger.debug("加密成功 {}".format(file_name))
except Exception as e:
logger.exception("加密失败 {} , error {}".format(py_file, e))
os.remove(py_file.replace(".py", ".c"))
return encrypted_py
def delete_files(files_path):
"""
@summary: 删除文件
---------
@param files_path: 文件路径 py 及 c 文件
---------
@result:
"""
try:
# 删除python文件及c文件
for file in files_path:
os.remove(file) # py文件
os.remove(file.replace(".py", ".c")) # c文件
except Exception as e:
pass
def rename_excrypted_file(output_file_path):
files = walk_file(output_file_path)
for file in files:
if file.endswith(".pyd") or file.endswith(".so"):
new_filename = re.sub("(.*)\..*\.(.*)", r"\1.\2", file)
os.rename(file, new_filename)
def start_encrypt(
input_file_path,
output_file_path: str = None,
ignore_files: Union[List, str, None] = None,
except_main_file: int = 1,
):
assert input_file_path, "input_file_path cannot be null"
assert (
input_file_path != output_file_path
), "output_file_path must be diffent with input_file_path"
if output_file_path and os.path.isfile(output_file_path):
raise ValueError("output_file_path need a dir path")
input_file_path = os.path.abspath(input_file_path)
if not output_file_path: # 无输出路径
if os.path.isdir(
input_file_path
): # 如果输入路径是文件夹 则输出路径为input_file_path/dist/project_name
output_file_path = os.path.join(
input_file_path, "dist", os.path.basename(input_file_path)
)
else:
output_file_path = os.path.join(os.path.dirname(input_file_path), "dist")
else:
output_file_path = os.path.abspath(output_file_path)
# 拷贝原文件到目标文件
copy_files(input_file_path, output_file_path)
files = walk_file(output_file_path)
py_files = get_py_files(files, ignore_files)
# 过滤掉不需要加密的文件
need_encrypted_py = filter_cannot_encrypted_py(py_files, except_main_file)
encrypted_py = encrypt_py(need_encrypted_py)
delete_files(encrypted_py)
rename_excrypted_file(output_file_path)
logger.debug(
"加密完成 total_count={}, success_count={}, 生成到 {}".format(
len(need_encrypted_py), len(encrypted_py), output_file_path
)
)
|
[
"564773807@qq.com"
] |
564773807@qq.com
|
f1751807e875e09aa0f44efc9653bd6617246fcc
|
559ffb0a2a99b71a1645e5bb26d03bea6c5452c1
|
/.c9/metadata/workspace/connected_academy/urls.py
|
227833abad5d5373d1bfcf9b005442f11bd6f588
|
[] |
no_license
|
MrOverfl0w/ConnectedAcademy
|
98516a7495c54dd5640f33414b4072dc2c66ddd4
|
60eb2905482a5067dfce815879d2e4b696910661
|
refs/heads/master
| 2020-07-28T17:17:19.049660
| 2019-09-19T06:18:17
| 2019-09-19T06:18:17
| 209,477,052
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,383
|
py
|
{"filter":false,"title":"urls.py","tooltip":"/connected_academy/urls.py","undoManager":{"mark":100,"position":100,"stack":[[{"start":{"row":29,"column":75},"end":{"row":29,"column":76},"action":"remove","lines":["i"],"id":593}],[{"start":{"row":29,"column":74},"end":{"row":29,"column":75},"action":"remove","lines":["f"],"id":594}],[{"start":{"row":29,"column":73},"end":{"row":29,"column":74},"action":"remove","lines":["o"],"id":595}],[{"start":{"row":29,"column":72},"end":{"row":29,"column":73},"action":"remove","lines":["r"],"id":596}],[{"start":{"row":29,"column":71},"end":{"row":29,"column":72},"action":"remove","lines":["p"],"id":597}],[{"start":{"row":29,"column":71},"end":{"row":29,"column":73},"action":"insert","lines":["\"\""],"id":598}],[{"start":{"row":29,"column":71},"end":{"row":29,"column":73},"action":"remove","lines":["\"\""],"id":599}],[{"start":{"row":29,"column":71},"end":{"row":29,"column":73},"action":"insert","lines":["''"],"id":600}],[{"start":{"row":29,"column":72},"end":{"row":29,"column":73},"action":"insert","lines":["p"],"id":601}],[{"start":{"row":29,"column":73},"end":{"row":29,"column":74},"action":"insert","lines":["r"],"id":602}],[{"start":{"row":29,"column":74},"end":{"row":29,"column":75},"action":"insert","lines":["o"],"id":603}],[{"start":{"row":29,"column":75},"end":{"row":29,"column":76},"action":"insert","lines":["f"],"id":604}],[{"start":{"row":29,"column":76},"end":{"row":29,"column":77},"action":"insert","lines":["i"],"id":605}],[{"start":{"row":29,"column":77},"end":{"row":29,"column":78},"action":"insert","lines":["l"],"id":606}],[{"start":{"row":29,"column":78},"end":{"row":29,"column":79},"action":"insert","lines":["e"],"id":607}],[{"start":{"row":26,"column":75},"end":{"row":26,"column":76},"action":"remove","lines":["\""],"id":608}],[{"start":{"row":26,"column":75},"end":{"row":26,"column":76},"action":"insert","lines":["'"],"id":609}],[{"start":{"row":26,"column":81},"end":{"row":26,"column":82},"action":"remove","lines":["\""],"id":610}],[{"start":{"row":26,"column":81},"end":{"row":26,"column":82},"action":"insert","lines":["0"],"id":611}],[{"start":{"row":26,"column":81},"end":{"row":26,"column":82},"action":"remove","lines":["0"],"id":612}],[{"start":{"row":26,"column":81},"end":{"row":26,"column":82},"action":"insert","lines":["'"],"id":613}],[{"start":{"row":29,"column":57},"end":{"row":29,"column":58},"action":"insert","lines":["^"],"id":614}],[{"start":{"row":29,"column":57},"end":{"row":29,"column":58},"action":"remove","lines":["^"],"id":615}],[{"start":{"row":29,"column":57},"end":{"row":29,"column":58},"action":"insert","lines":["r"],"id":616}],[{"start":{"row":29,"column":58},"end":{"row":29,"column":59},"action":"insert","lines":["'"],"id":617}],[{"start":{"row":29,"column":58},"end":{"row":29,"column":59},"action":"remove","lines":["'"],"id":618}],[{"start":{"row":29,"column":57},"end":{"row":29,"column":58},"action":"remove","lines":["r"],"id":619}],[{"start":{"row":29,"column":56},"end":{"row":29,"column":57},"action":"insert","lines":["r"],"id":620}],[{"start":{"row":29,"column":58},"end":{"row":29,"column":59},"action":"insert","lines":["^"],"id":621}],[{"start":{"row":29,"column":63},"end":{"row":29,"column":64},"action":"remove","lines":["k"],"id":622}],[{"start":{"row":29,"column":62},"end":{"row":29,"column":63},"action":"remove","lines":["c"],"id":623}],[{"start":{"row":29,"column":61},"end":{"row":29,"column":62},"action":"remove","lines":["e"],"id":624}],[{"start":{"row":29,"column":60},"end":{"row":29,"column":61},"action":"remove","lines":["h"],"id":625}],[{"start":{"row":29,"column":59},"end":{"row":29,"column":60},"action":"remove","lines":["c"],"id":626}],[{"start":{"row":29,"column":58},"end":{"row":29,"column":59},"action":"remove","lines":["^"],"id":627}],[{"start":{"row":29,"column":57},"end":{"row":29,"column":59},"action":"remove","lines":["''"],"id":628}],[{"start":{"row":29,"column":56},"end":{"row":29,"column":57},"action":"remove","lines":["r"],"id":629}],[{"start":{"row":29,"column":56},"end":{"row":29,"column":58},"action":"insert","lines":["''"],"id":630}],[{"start":{"row":29,"column":57},"end":{"row":29,"column":106},"action":"insert","lines":["https://connected-academy-mr-overfl0w.c9users.io/"],"id":631}],[{"start":{"row":29,"column":106},"end":{"row":29,"column":107},"action":"insert","lines":["c"],"id":632}],[{"start":{"row":29,"column":107},"end":{"row":29,"column":108},"action":"insert","lines":["h"],"id":633}],[{"start":{"row":29,"column":108},"end":{"row":29,"column":109},"action":"insert","lines":["e"],"id":634}],[{"start":{"row":29,"column":109},"end":{"row":29,"column":110},"action":"insert","lines":["c"],"id":635}],[{"start":{"row":29,"column":110},"end":{"row":29,"column":111},"action":"insert","lines":["k"],"id":636}],[{"start":{"row":28,"column":46},"end":{"row":29,"column":0},"action":"insert","lines":["",""],"id":655},{"start":{"row":29,"column":0},"end":{"row":29,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":29,"column":4},"end":{"row":29,"column":94},"action":"insert","lines":["url(r'^logout/$', auth_views.logout, {'template_name': 'logged_out.html'}, name='logout'),"],"id":656}],[{"start":{"row":29,"column":60},"end":{"row":29,"column":70},"action":"remove","lines":["logged_out"],"id":657},{"start":{"row":29,"column":60},"end":{"row":29,"column":61},"action":"insert","lines":["h"]}],[{"start":{"row":29,"column":61},"end":{"row":29,"column":62},"action":"insert","lines":["o"],"id":658}],[{"start":{"row":29,"column":62},"end":{"row":29,"column":63},"action":"insert","lines":["m"],"id":659}],[{"start":{"row":29,"column":63},"end":{"row":29,"column":64},"action":"insert","lines":["e"],"id":660}],[{"start":{"row":29,"column":60},"end":{"row":29,"column":64},"action":"remove","lines":["home"],"id":661},{"start":{"row":29,"column":60},"end":{"row":29,"column":61},"action":"insert","lines":["M"]}],[{"start":{"row":29,"column":61},"end":{"row":29,"column":62},"action":"insert","lines":["a"],"id":662}],[{"start":{"row":29,"column":62},"end":{"row":29,"column":63},"action":"insert","lines":["i"],"id":663}],[{"start":{"row":29,"column":63},"end":{"row":29,"column":64},"action":"insert","lines":["n"],"id":664}],[{"start":{"row":29,"column":64},"end":{"row":29,"column":65},"action":"insert","lines":["_"],"id":665}],[{"start":{"row":29,"column":60},"end":{"row":29,"column":65},"action":"remove","lines":["Main_"],"id":666},{"start":{"row":29,"column":60},"end":{"row":29,"column":71},"action":"insert","lines":["Main_Screen"]}],[{"start":{"row":29,"column":17},"end":{"row":29,"column":18},"action":"remove","lines":["/"],"id":667}],[{"start":{"row":29,"column":17},"end":{"row":29,"column":18},"action":"remove","lines":["$"],"id":668}],[{"start":{"row":30,"column":0},"end":{"row":31,"column":0},"action":"insert","lines":[" url(r'^logout', auth_views.logout, {'template_name': 'Main_Screen.html'}, name='logout'),",""],"id":671}],[{"start":{"row":29,"column":4},"end":{"row":29,"column":5},"action":"insert","lines":["#"],"id":672}],[{"start":{"row":30,"column":39},"end":{"row":30,"column":76},"action":"remove","lines":["{'template_name': 'Main_Screen.html'}"],"id":673},{"start":{"row":30,"column":39},"end":{"row":30,"column":121},"action":"insert","lines":["RedirectView.as_view(url='https://connected-academy-mr-overfl0w.c9users.io/check')"]}],[{"start":{"row":30,"column":118},"end":{"row":30,"column":119},"action":"remove","lines":["k"],"id":674}],[{"start":{"row":30,"column":117},"end":{"row":30,"column":118},"action":"remove","lines":["c"],"id":675}],[{"start":{"row":30,"column":116},"end":{"row":30,"column":117},"action":"remove","lines":["e"],"id":676}],[{"start":{"row":30,"column":115},"end":{"row":30,"column":116},"action":"remove","lines":["h"],"id":677}],[{"start":{"row":30,"column":114},"end":{"row":30,"column":115},"action":"remove","lines":["c"],"id":678}],[{"start":{"row":30,"column":114},"end":{"row":30,"column":115},"action":"insert","lines":["h"],"id":679}],[{"start":{"row":29,"column":94},"end":{"row":30,"column":134},"action":"remove","lines":[""," url(r'^logout', auth_views.logout, RedirectView.as_view(url='https://connected-academy-mr-overfl0w.c9users.io/h'), name='logout'),"],"id":685}],[{"start":{"row":29,"column":4},"end":{"row":29,"column":5},"action":"remove","lines":["#"],"id":686}],[{"start":{"row":29,"column":58},"end":{"row":29,"column":69},"action":"remove","lines":["Main_Screen"],"id":687},{"start":{"row":29,"column":58},"end":{"row":29,"column":59},"action":"insert","lines":["l"]}],[{"start":{"row":29,"column":59},"end":{"row":29,"column":60},"action":"insert","lines":["o"],"id":688}],[{"start":{"row":29,"column":60},"end":{"row":29,"column":61},"action":"insert","lines":["g"],"id":689}],[{"start":{"row":29,"column":61},"end":{"row":29,"column":62},"action":"insert","lines":["o"],"id":690}],[{"start":{"row":29,"column":62},"end":{"row":29,"column":63},"action":"insert","lines":["u"],"id":691}],[{"start":{"row":29,"column":63},"end":{"row":29,"column":64},"action":"insert","lines":["t"],"id":692}],[{"start":{"row":29,"column":58},"end":{"row":29,"column":59},"action":"remove","lines":["l"],"id":693}],[{"start":{"row":29,"column":58},"end":{"row":29,"column":59},"action":"insert","lines":["L"],"id":694}],[{"start":{"row":29,"column":61},"end":{"row":29,"column":62},"action":"remove","lines":["o"],"id":695}],[{"start":{"row":29,"column":61},"end":{"row":29,"column":62},"action":"insert","lines":["O"],"id":696}],[{"start":{"row":27,"column":0},"end":{"row":28,"column":0},"action":"insert","lines":[" url(r'^login', auth_views.login, {'template_name': 'LogIn.html'}, name='LogIn'),",""],"id":697}],[{"start":{"row":26,"column":76},"end":{"row":26,"column":77},"action":"insert","lines":["A"],"id":698}],[{"start":{"row":26,"column":77},"end":{"row":26,"column":78},"action":"insert","lines":["u"],"id":699}],[{"start":{"row":26,"column":78},"end":{"row":26,"column":79},"action":"insert","lines":["t"],"id":700}],[{"start":{"row":26,"column":79},"end":{"row":26,"column":80},"action":"insert","lines":["g"],"id":701}],[{"start":{"row":26,"column":79},"end":{"row":26,"column":80},"action":"remove","lines":["g"],"id":702}],[{"start":{"row":26,"column":79},"end":{"row":26,"column":80},"action":"insert","lines":["h"],"id":703}],[{"start":{"row":26,"column":11},"end":{"row":26,"column":12},"action":"insert","lines":["A"],"id":704}],[{"start":{"row":26,"column":12},"end":{"row":26,"column":13},"action":"insert","lines":["u"],"id":705}],[{"start":{"row":26,"column":13},"end":{"row":26,"column":14},"action":"insert","lines":["t"],"id":706}],[{"start":{"row":26,"column":14},"end":{"row":26,"column":15},"action":"insert","lines":["h"],"id":707}],[{"start":{"row":27,"column":19},"end":{"row":27,"column":68},"action":"remove","lines":["auth_views.login, {'template_name': 'LogIn.html'}"],"id":708},{"start":{"row":27,"column":19},"end":{"row":27,"column":20},"action":"insert","lines":["v"]}],[{"start":{"row":27,"column":20},"end":{"row":27,"column":21},"action":"insert","lines":["i"],"id":709}],[{"start":{"row":27,"column":21},"end":{"row":27,"column":22},"action":"insert","lines":["e"],"id":710}],[{"start":{"row":27,"column":22},"end":{"row":27,"column":23},"action":"insert","lines":["w"],"id":711}],[{"start":{"row":27,"column":23},"end":{"row":27,"column":24},"action":"insert","lines":["s"],"id":712}],[{"start":{"row":27,"column":24},"end":{"row":27,"column":25},"action":"insert","lines":["."],"id":713}],[{"start":{"row":27,"column":25},"end":{"row":27,"column":26},"action":"insert","lines":["L"],"id":714}],[{"start":{"row":27,"column":26},"end":{"row":27,"column":27},"action":"insert","lines":["o"],"id":715}],[{"start":{"row":27,"column":27},"end":{"row":27,"column":28},"action":"insert","lines":["g"],"id":716}],[{"start":{"row":27,"column":28},"end":{"row":27,"column":29},"action":"insert","lines":["I"],"id":717}],[{"start":{"row":27,"column":29},"end":{"row":27,"column":30},"action":"insert","lines":["n"],"id":718}]]},"ace":{"folds":[],"scrolltop":245.5,"scrollleft":0,"selection":{"start":{"row":27,"column":46},"end":{"row":27,"column":46},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1546033700905,"hash":"20abb42420e07aaaae5659baa3bdf8362bbf6efa"}
|
[
"alberto_pumarejo@hotmail.com"
] |
alberto_pumarejo@hotmail.com
|
ec6b542404a0a7cbfc24f45e2e9850da0d8e7a46
|
baab63bb8601c7d5465829a7f5ca258878093ebd
|
/simulate12.py
|
77acde8f390d7e6344b5315bc6dcfb71ca170515
|
[] |
no_license
|
marx-saul/random_walk
|
f7290b50a0ff7cdac5fe19bf321f946e2acaa609
|
4809c992820a49a5cfa95ac21543fee6978500f0
|
refs/heads/master
| 2020-05-26T11:19:24.839618
| 2019-07-09T13:33:29
| 2019-07-09T13:33:29
| 188,213,571
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,204
|
py
|
import matplotlib.pyplot as plt
import numpy as np
import random
import copy
from math import *
from statistics import mean, median,variance,stdev
INF = -1
Going = 0
PlayerLose = 1
MasterLose = 2
MAX_TIME = 2 * 10**3
MAX_SIM = 2 * 10**3
class Game:
# money, money, probability of player winning
def __init__(self, player = 20, master = INF, p = 0.50):
self.player = player; self.master = master; self.p = p
# return Going | PlayerLose | MasterLose
def step(self):
diff = 0
if random.random() < self.p: diff = 1
else: diff = -1
# master has infinite money
if self.master == INF:
self.player += diff
if self.player == 0: return PlayerLose
else: return Going
# master can bankrupt
else:
self.player += diff ; self.master -= diff
if self.player == 0: return PlayerLose
elif self.master == 0: return MasterLose
else: return Going
def simulate(game, trial_num = 500):
results = [0] * 101;
for n in range(0, trial_num):
game_ = copy.deepcopy(game)
for i in range(0, 80):
result = game_.step()
if result != Going: break
results[game_.player] += 1
return results
def simulate12(game, trial_num = 500):
t_n = 100
results = [[0 for i in range(t_n)] for j in range(101)]
#combs = calc(80)
for n in range(0, t_n):
result = simulate(game, trial_num)
#print(result)
#print('########')
for i in range(0, 101):
results[i][n] = result[i]
#print(results[i][n])
#print(results)
mean_result = [0] * 101
stdev_result = [0] * 101
for i in range(0, 101):
mean_result[i] = mean(results[i])
stdev_result[i] = stdev(results[i])
print(mean_result)
print(stdev_result)
plt.bar( np.array(range(0, 101)), np.array(mean_result), yerr = np.array(stdev_result) )
plt.show()
# ().py simulation_type player_money master_money probability
if __name__ == "__main__":
game = Game(player=20, master=INF, p=0.50)
simulate12(game)
|
[
"noreply@github.com"
] |
marx-saul.noreply@github.com
|
1128f685ef01448cdffdeab32ea8895e4d59cbfa
|
344474523310d9cab431366bdfc170e3267c4490
|
/Lesson3/task5.py
|
904211b05443202b8f2f0d5aa63156a579d9d500
|
[] |
no_license
|
SarahU/WWCPythonDataAnalysisWorkshop
|
4c8d9fb0f41c308c5d59ba52edfdcd8b88a5bf96
|
075abfe5645e400eec6bc3362b1e434f4ec50152
|
refs/heads/master
| 2022-12-07T12:27:29.423978
| 2020-08-23T12:06:43
| 2020-08-23T12:06:43
| 287,929,970
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 837
|
py
|
# Get names of the DataFrame columns and sum of losted values DF
# Replay ( Apply) missed values in the Column with average values.
# Calculate basic statistics for 2 columns
# For any column create histogram
# Create Linear Regression plot for 2 column
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
frame = pd.read_csv('https://raw.githubusercontent.com/Grossmend/CSV/master/titanic/data.csv')
# print(frame)
print('Columns')
column_names = frame.corr().columns
print(column_names)
mean_frame = frame.mean()
# #fill NAs
frame = frame.fillna(value=mean_frame)
print(frame)
# basic stats
print(frame.describe())
print(frame.corr())
# histogram
plt.hist(frame['Age'])
plt.show()
#linear regression
plt.figure(figsize=(10,15))
ax = sns.regplot(x='Age', y='Fare', data=frame, color='purple')
plt.show()
|
[
"sarahnusher@gmail.com"
] |
sarahnusher@gmail.com
|
995baba4a5a1e71d5129f15122c9f11044026302
|
782357625779cad8ee8264d50066caefbaad3024
|
/rearrange_files.py
|
cc1b1f1f0fcf4ec0203e7abf93760137f7920f6e
|
[] |
no_license
|
abtinkd/utilities
|
8e2438f9461789a914ea51f5f6bf37349aa1dcee
|
3ca9cde542480dde712ae4fcc0eea8c6cb73ad7a
|
refs/heads/master
| 2023-02-04T03:32:12.896999
| 2019-09-15T06:26:29
| 2019-09-15T06:26:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 726
|
py
|
from __future__ import print_function
from core import traverse
import os
from subprocess import call
def arrange(root_path):
for file_name, file_path in traverse.access(root_path):
st_id = file_name.split('_',1)[0]
if not os.path.exists(file_path + '/' + st_id):
os.mkdir(file_path + '/' + st_id)
cur_file = file_path + '/' + file_name
new_file = file_path + '/' + st_id + '/' + file_name
call(['mv', cur_file, new_file])
if __name__ == '__main__':
print("CAUTION! This module changes the structure of the sending directory!")
print("Give me the absolute path of the WORKING directory: ")
root_path = input()
arrange(root_path)
print('Done!')
|
[
"abtinkh+github@gmail.com"
] |
abtinkh+github@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.