blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
2
616
content_id
stringlengths
40
40
detected_licenses
listlengths
0
69
license_type
stringclasses
2 values
repo_name
stringlengths
5
118
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringlengths
4
63
visit_date
timestamp[us]
revision_date
timestamp[us]
committer_date
timestamp[us]
github_id
int64
2.91k
686M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
23 values
gha_event_created_at
timestamp[us]
gha_created_at
timestamp[us]
gha_language
stringclasses
213 values
src_encoding
stringclasses
30 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
2
10.3M
extension
stringclasses
246 values
content
stringlengths
2
10.3M
authors
listlengths
1
1
author_id
stringlengths
0
212
00fd2f7b85c5f249a96b5b79c00dc2cd07ceb413
9a3d3684df45ff87d24ee7fe0f8b0444640f830a
/scipy/interpolate/ndgriddata.py
280d9187cdeb859056afcffab219cc46ab08cd67
[]
no_license
dagss/private-scipy-refactor
fcb48627fdd7aa5820dffb48c1ae89d1d9abcbb8
dcc4efacf8702a56c2b8e8ffc546fd87499a3eeb
refs/heads/master
2020-12-25T12:41:03.863412
2010-10-14T15:39:40
2010-10-14T15:39:40
1,047,327
0
0
null
null
null
null
UTF-8
Python
false
false
5,755
py
""" Convenience interface to N-D interpolation .. versionadded:: 0.9 """ import numpy as np from interpnd import LinearNDInterpolator, NDInterpolatorBase, \ CloughTocher2DInterpolator, _ndim_coords_from_arrays from scipy.spatial import cKDTree __all__ = ['griddata', 'NearestNDInterpolator', 'LinearNDInterpolator', 'CloughTocher2DInterpolator'] #------------------------------------------------------------------------------ # Nearest-neighbour interpolation #------------------------------------------------------------------------------ class NearestNDInterpolator(NDInterpolatorBase): """ NearestNDInterpolator(points, values) Nearest-neighbour interpolation in N dimensions. .. versionadded:: 0.9 Parameters ---------- points : ndarray of floats, shape (npoints, ndims) Data point coordinates. values : ndarray of float or complex, shape (npoints, ...) Data values. Notes ----- Uses ``scipy.spatial.cKDTree`` """ def __init__(self, x, y): x = _ndim_coords_from_arrays(x) self._check_init_shape(x, y) self.tree = cKDTree(x) self.points = x self.values = y def __call__(self, xi): """ Evaluate interpolator at given points. Parameters ---------- xi : ndarray of float, shape (..., ndim) Points where to interpolate data at. """ xi = self._check_call_shape(xi) dist, i = self.tree.query(xi) return self.values[i] #------------------------------------------------------------------------------ # Convenience interface function #------------------------------------------------------------------------------ def griddata(points, values, xi, method='linear', fill_value=np.nan): """ Interpolate unstructured N-dimensional data. .. versionadded:: 0.9 Parameters ---------- points : ndarray of floats, shape (npoints, ndims) Data point coordinates. Can either be a ndarray of size (npoints, ndim), or a tuple of `ndim` arrays. values : ndarray of float or complex, shape (npoints, ...) Data values. xi : ndarray of float, shape (..., ndim) Points where to interpolate data at. method : {'linear', 'nearest', 'cubic'} Method of interpolation. One of - ``nearest``: return the value at the data point closest to the point of interpolation. See `NearestNDInterpolator` for more details. - ``linear``: tesselate the input point set to n-dimensional simplices, and interpolate linearly on each simplex. See `LinearNDInterpolator` for more details. - ``cubic`` (1-D): return the value detemined from a cubic spline. - ``cubic`` (2-D): return the value determined from a piecewise cubic, continuously differentiable (C1), and approximately curvature-minimizing polynomial surface. See `CloughTocher2DInterpolator` for more details. fill_value : float, optional Value used to fill in for requested points outside of the convex hull of the input points. If not provided, then the default is ``nan``. This option has no effect for the 'nearest' method. Examples -------- Suppose we want to interpolate the 2-D function >>> def func(x, y): >>> return x*(1-x)*np.cos(4*np.pi*x) * np.sin(4*np.pi*y**2)**2 on a grid in [0, 1]x[0, 1] >>> grid_x, grid_y = np.mgrid[0:1:100j, 0:1:200j] but we only know its values at 1000 data points: >>> points = np.random.rand(1000, 2) >>> values = func(points[:,0], points[:,1]) This can be done with `griddata` -- below we try out all of the interpolation methods: >>> from scipy.interpolate import griddata >>> grid_z0 = griddata(points, values, (grid_x, grid_y), method='nearest') >>> grid_z1 = griddata(points, values, (grid_x, grid_y), method='linear') >>> grid_z2 = griddata(points, values, (grid_x, grid_y), method='cubic') One can see that the exact result is reproduced by all of the methods to some degree, but for this smooth function the piecewise cubic interpolant gives the best results: >>> import matplotlib.pyplot as plt >>> plt.subplot(221) >>> plt.imshow(func(grid_x, grid_y).T, extent=(0,1,0,1), origin='lower') >>> plt.plot(points[:,0], points[:,1], 'k.', ms=1) >>> plt.title('Original') >>> plt.subplot(222) >>> plt.imshow(grid_z0.T, extent=(0,1,0,1), origin='lower') >>> plt.title('Nearest') >>> plt.subplot(223) >>> plt.imshow(grid_z1.T, extent=(0,1,0,1), origin='lower') >>> plt.title('Linear') >>> plt.subplot(224) >>> plt.imshow(grid_z2.T, extent=(0,1,0,1), origin='lower') >>> plt.title('Cubic') >>> plt.gcf().set_size_inches(6, 6) >>> plt.show() """ points = _ndim_coords_from_arrays(points) xi = _ndim_coords_from_arrays(xi) ndim = points.shape[-1] if ndim == 1 and method in ('nearest', 'linear', 'cubic'): ip = interp1d(points, values, kind=method, axis=0, bounds_error=False, fill_value=fill_value) return ip(xi) elif method == 'nearest': ip = NearestNDInterpolator(points, values) return ip(xi) elif method == 'linear': ip = LinearNDInterpolator(points, values, fill_value=fill_value) return ip(xi) elif method == 'cubic' and ndim == 2: ip = CloughTocher2DInterpolator(points, values, fill_value=fill_value) return ip(xi) else: raise ValueError("Unknown interpolation method %r for " "%d dimensional data" % (method, ndim))
[ "ptvirtan@d6536bca-fef9-0310-8506-e4c0a848fbcf" ]
ptvirtan@d6536bca-fef9-0310-8506-e4c0a848fbcf
45258e0051e16203d967f3eaf9ac4c325ba93335
0b4128cecc47be4f6e797633d1993d590d09174e
/Week 6/Assignment 4.6.py
96597c91712480408d3c77b38a701a81c0deff88
[]
no_license
ShubhamBhavsar101/Coursera--Programming-for-Everybody-Getting-Started-with-Python-
0acd4521e5f351498de0d5c855f4d251a79cf3f7
4f299467907771187d90fac6af65e89932fc482b
refs/heads/master
2022-12-27T14:49:02.048259
2020-09-21T15:13:03
2020-09-21T15:13:03
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,092
py
# 4.6 Write a program to prompt the user for hours and rate per hour using input to compute gross pay. # Pay should be the normal rate for hours up to 40 and time-and-a-half for the hourly rate for all hours worked above 40 hours. # Put the logic to do the computation of pay in a function called computepay() and use the function to do the computation. # The function should return a value. Use 45 hours and a rate of 10.50 per hour to test the program (the pay should be 498.75). # You should use input to read a string and float() to convert the string to a number. # Do not worry about error checking the user input unless you want to - you can assume the user types numbers properly. # Do not name your variable sum or use the sum() function. def computepay(h,r): if (h <= 40): return h*r else: return (40*r + (h-40)*1.5*r) hrs = input("Enter Hours:") hrs = float(hrs) rate= input("Enter Rate per hours") rate = float(rate) p = computepay(hrs,rate) print("Pay",p) # Enter 45 in hours dialog box and 10.50 in rate as asked in question.
[ "noreply@github.com" ]
ShubhamBhavsar101.noreply@github.com
c66a825736493a97608e9eb55aa216b772ab328f
67950fbe22d0aa42b1846075e2615e8a5345aa3f
/server/settings.py
3bbb6566b3487192978af9e598943b059ab17a99
[]
no_license
adam-balassa/traffic-signs-model
6a16eb9ad1cef72682b4de2ed29d968d1db99ced
beb86b5f09e7dfb47b3a716a9dac22cf73953cdf
refs/heads/master
2023-01-29T22:37:20.635807
2020-12-10T16:33:00
2020-12-10T16:33:00
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,855
py
""" Django settings for server project. Generated by 'django-admin startproject' using Django 3.1.1. For more information on this file, see https://docs.djangoproject.com/en/3.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.1/ref/settings/ """ from pathlib import Path # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '3lf3&-6-0!ysd4f)s*u#*9r&rll*xs)jym9fa(ln*^cdtqwfzu' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'server.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'server.wsgi.application' # Password validation # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.1/howto/static-files/ STATIC_URL = '/static/'
[ "balassaadi@gmail.com" ]
balassaadi@gmail.com
c4683ca53729e536dd66f6af04b8cf056e1e8931
cf71fb2fe62838bc5858f52c8da1e1798a76e149
/geometry/gui/widgets.py
6d9eabd9b6dd3ff00d237908bed541d970ab5720
[]
no_license
demidov91/university-oop-geometry
0d505042ffa535371a30aa69fe7926f3cdfb5d0e
5d8975492b3dfa0732c41d724bbbaf92fc2cc6d9
refs/heads/master
2020-04-29T16:33:09.063404
2019-03-25T14:37:53
2019-03-25T14:37:53
176,264,962
0
0
null
null
null
null
UTF-8
Python
false
false
1,236
py
import tkinter as tk from decimal import Decimal from geometry.core import Point import logging logger = logging.getLogger(__name__) class PointWidget(tk.Frame): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.x = DecimalWidget(self, width=10) self.y = DecimalWidget(self, width=10) self.x.pack(side=tk.LEFT) self.y.pack(side=tk.LEFT) def get_value(self): x = self.x.get_value() y = self.y.get_value() if x is not None and y is not None: return Point(x, y) return None def set_value(self, value: Point): self.x.set_value(value.x) self.y.set_value(value.y) class DecimalWidget(tk.Entry): def __init__(self, *args, **kwargs): self.text = tk.StringVar() super().__init__(*args, textvariable=self.text, **kwargs) def get_value(self): raw_value = self.text.get() if not raw_value: return None try: return Decimal(raw_value) except (ValueError, TypeError, ArithmeticError) as e: logger.warning(e) return None def set_value(self, value: Decimal): self.text.set(str(value))
[ "demidov91@mail.ru" ]
demidov91@mail.ru
6fe5fa658c2e381056eab5db42ea004abdd77e1d
d27ec32dda69e07a6ca85a3c9f76bff2dc164925
/Ex 2.py
6cddc6e1bff9d4666c5c12657bf2fc6ffeb300fa
[]
no_license
NicolasKujo/Lista-3-TPA
f881a689caae7c664573c18e231ddca1e91b05e4
9e39cc41842013831976b136e0900bf4c52c5278
refs/heads/main
2023-07-12T05:45:01.573958
2021-08-24T23:26:58
2021-08-24T23:26:58
399,627,799
2
0
null
null
null
null
UTF-8
Python
false
false
542
py
print ('ola, sou um programa que vai te ajudar com a tabuada, basta inserir um numero de 1 a 10 que eu mostrarei a tabuada dele') print() a = int(input('insira um numero de 1 a 10: ')) b = 8 d = 1 while a > 10 or a < 1: print() print ('so vale numeros de 1 a 10: ') print() a = int(input('insira um numero de 1 a 10: ')) c = a print() print (d, ' . ', a, ' = ' , a) while b > 0: d = d + 1 b = b - 1 print (d, ' . ', a, ' = ' , c + a) c = c + a print (10, ' . ', a, ' = ' , c + a) print() print('fim do programa')
[ "marcela.souza.nr@gmail.com" ]
marcela.souza.nr@gmail.com
33f4ef7b5dbd34f4124d07d0de610e533ec31f91
5e6bb81b207f3306bca3a2412dcc86525ff09b51
/Django_test01/bookstore2/admin.py
0355d7977e527ccf07eb26e8c78a61ef2d03c17f
[]
no_license
ssk1987/FullStackExerciseLibrary
f16ad4a0ab2ce6864d00905738db0832a8e916a1
e050bffce3d8497b47980aab30ea99409f61856b
refs/heads/master
2021-06-03T02:40:06.840380
2021-03-27T11:37:22
2021-03-27T11:37:22
254,331,226
0
0
null
null
null
null
UTF-8
Python
false
false
155
py
from django.contrib import admin from . import models # Register your models here. admin.site.register(models.Publisher) admin.site.register(models.Book2)
[ "10293665@qq.com" ]
10293665@qq.com
3aab0a4cc763eab0afb829f945e472bc6030ed7d
dad01fede3f4802e98d5e6ef1d7a3d2c1c448257
/Algorithm/Greedy(탐욕법)/큰 수 만들기.py
76c1e8bc8fc0e9fb25049b6e882183253d7b71b9
[]
no_license
alyssa1996/CodingExercise
55ae8b1833552f1106a08005d651289d2dd5fd60
d31c3822a39ae8a301d48e1926fa787f556cff87
refs/heads/master
2021-10-17T04:11:22.155442
2021-10-08T13:45:08
2021-10-08T13:45:08
218,627,544
0
0
null
null
null
null
UTF-8
Python
false
false
1,733
py
''' 이걸 풀려고 몇 시간동안 머리 싸매다가 결국 다른 사람들의 풀이를 보았는데, 핵심은 스택이었다. 탐욕법 알고리즘이라고 나와있긴 했지만 결국 사용해야 하는 자료구조는 스택이었고, 이 문제를 통해 알고리즘 문제를 해결하는데 자료구조가 어떻게 사용되며 왜 중요한지 어렴풋이 감이 잡혔다. stack 마지막에 들어있는 숫자(stack[-1])와 for문을 돌며 순서가 된(?) 숫자(i)를 비교하여 stack[-1]의 값이 i보다 크거나 같을 때까지 stack에서 마지막 값을 계속 삭제한다. 삭제가 될 때마다 k의 값도 감소시켜 제거해아하는 숫자의 조건을 계속 업데이트 한다. 그리고 i의 값은 무조건 stack의 마지막에 집어넣는다. 어쨌든 다음 순서의 숫자가 더 크면 while문을 통해 제거가 될 것이기 때문에 무조건 삽입해도 문제가 없다. 여기서 마지막 포인트는 if문인 것같다. for문이 계속 돌아가는데, stack[-1]<i이라는 조건이 더 이상 충족되지 않아서(그러니까 stack에 마지막으로 들어온 값이 남은 i들보다 클경우) stack에 계속해서 삽입이 되는데 k의 값은 0이 아닐경우(즉, 제거해야 할 수가 남아있는 경우) 그만큼만 stack에서 잘라내면 된다. 그것을 실행하는 부분이 바로 if k!=0으로 시작하는 구문인 것이다. ''' def solution(number, k): answer="" stack=[number[0]] for i in number[1:]: while len(stack)>0 and stack[-1]<i and k>0: k-=1 stack.pop() stack.append(i) if k!=0: stack=stack[:-k] return answer.join(stack)
[ "jisu.park@blum.com" ]
jisu.park@blum.com
aea1c99516c53221118823234f915f92a4392fff
4988a9a2ee1c82f411fae51d5ff5613ecd62774f
/helloworld/ch03/confRead.py
ede7291cd099550736f000a431b20ea6b4e4b0d6
[]
no_license
shenchuang006/python
39e7853fe7e25026e4f491bd45a33c8b60a3b66b
946aab131a749b5cbebf261fd1c87546813d44f7
refs/heads/master
2022-12-23T10:29:50.833789
2019-11-27T06:54:21
2019-11-27T06:54:21
224,332,249
0
0
null
2022-12-16T06:23:37
2019-11-27T03:01:54
Python
UTF-8
Python
false
false
868
py
# /usr/bin/python import ConfigParser import string, os, sys import importlib cf = ConfigParser.ConfigParser() cf.read("test.conf") # return all section secs = cf.sections() print('sections:', secs) opts = cf.options("db") print('options:', opts) kvs = cf.items("db") print('db:', kvs) # read by type db_host = cf.get("db", "db_host") db_port = cf.getint("db", "db_port") db_user = cf.get("db", "db_user") db_pass = cf.get("db", "db_pass") # read int threads = cf.getint("concurrent", "thread") processors = cf.getint("concurrent", "processor") print("db_host:", db_host) print("db_port:", db_port) print("db_user:", db_user) print("db_pass:", db_pass) print("thread:", threads) print("processor:", processors) # modify one value and write to file cf.set("db", "db_pass", "xgmtest") cf.write(open("test.conf", "w"))
[ "shenchuang@yunjiglobal.com" ]
shenchuang@yunjiglobal.com
3c06b630ecc6a7b987a0d44486d3347d1b66bdc8
a9e3f3ad54ade49c19973707d2beb49f64490efd
/Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/lib/tests/test_xblock_utils.py
1bd926c648d0c4209e9fe4b68a6e1af8ae596d70
[ "AGPL-3.0-only", "AGPL-3.0-or-later", "MIT" ]
permissive
luque/better-ways-of-thinking-about-software
8c3dda94e119f0f96edbfe5ba60ca6ec3f5f625d
5809eaca7079a15ee56b0b7fcfea425337046c97
refs/heads/master
2021-11-24T15:10:09.785252
2021-11-22T12:14:34
2021-11-22T12:14:34
163,850,454
3
1
MIT
2021-11-22T12:12:31
2019-01-02T14:21:30
JavaScript
UTF-8
Python
false
false
9,291
py
""" Tests for xblock_utils.py """ import uuid from unittest.mock import patch import ddt from django.conf import settings from django.test.client import RequestFactory from opaque_keys.edx.asides import AsideUsageKeyV1, AsideUsageKeyV2 from web_fragments.fragment import Fragment from xblock.core import XBlockAside from openedx.core.lib.url_utils import quote_slashes from openedx.core.lib.xblock_builtin import get_css_dependencies, get_js_dependencies from openedx.core.lib.xblock_utils import ( get_aside_from_xblock, is_xblock_aside, replace_course_urls, replace_jump_to_id_urls, replace_static_urls, request_token, sanitize_html_id, wrap_fragment, wrap_xblock ) from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.test_asides import AsideTestType @ddt.ddt class TestXblockUtils(SharedModuleStoreTestCase): """ Tests for xblock utility functions. """ @classmethod def setUpClass(cls): super().setUpClass() cls.course_mongo = CourseFactory.create( default_store=ModuleStoreEnum.Type.mongo, org='TestX', number='TS01', run='2015' ) cls.course_split = CourseFactory.create( default_store=ModuleStoreEnum.Type.split, org='TestX', number='TS02', run='2015' ) def create_fragment(self, content=None): """ Create a fragment. """ fragment = Fragment(content) fragment.add_css('body {background-color:red;}') fragment.add_javascript('alert("Hi!");') return fragment def test_wrap_fragment(self): """ Verify that wrap_fragment adds new content. """ new_content = '<p>New Content<p>' fragment = self.create_fragment() wrapped_fragment = wrap_fragment(fragment, new_content) assert '<p>New Content<p>' == wrapped_fragment.content assert 'body {background-color:red;}' == wrapped_fragment.resources[0].data assert 'alert("Hi!");' == wrapped_fragment.resources[1].data def test_request_token(self): """ Verify that a proper token is returned. """ request_with_token = RequestFactory().get('/') request_with_token._xblock_token = '123' # pylint: disable=protected-access token = request_token(request_with_token) assert token == '123' request_without_token = RequestFactory().get('/') token = request_token(request_without_token) # Test to see if the token is an uuid1 hex value test_uuid = uuid.UUID(token, version=1) assert token == test_uuid.hex @ddt.data( ('course_mongo', 'data-usage-id="i4x:;_;_TestX;_TS01;_course;_2015"'), ('course_split', 'data-usage-id="block-v1:TestX+TS02+2015+type@course+block@course"') ) @ddt.unpack def test_wrap_xblock(self, course_id, data_usage_id): """ Verify that new content is added and the resources are the same. """ fragment = self.create_fragment("<h1>Test!</h1>") fragment.initialize_js('BlockMain') # wrap_block() sets some attributes only if there is JS. course = getattr(self, course_id) test_wrap_output = wrap_xblock( runtime_class='TestRuntime', block=course, view='baseview', frag=fragment, context={"wrap_xblock_data": {"custom-attribute": "custom-value"}}, usage_id_serializer=lambda usage_id: quote_slashes(str(usage_id)), request_token=uuid.uuid1().hex ) assert isinstance(test_wrap_output, Fragment) assert 'xblock-baseview' in test_wrap_output.content assert 'data-runtime-class="TestRuntime"' in test_wrap_output.content assert data_usage_id in test_wrap_output.content assert '<h1>Test!</h1>' in test_wrap_output.content assert 'data-custom-attribute="custom-value"' in test_wrap_output.content assert test_wrap_output.resources[0].data == 'body {background-color:red;}' assert test_wrap_output.resources[1].data == 'alert("Hi!");' @ddt.data('course_mongo', 'course_split') def test_replace_jump_to_id_urls(self, course_id): """ Verify that the jump-to URL has been replaced. """ course = getattr(self, course_id) test_replace = replace_jump_to_id_urls( course_id=course.id, jump_to_id_base_url='/base_url/', block=course, view='baseview', frag=Fragment('<a href="/jump_to_id/id">'), context=None ) assert isinstance(test_replace, Fragment) assert test_replace.content == '<a href="/base_url/id">' @ddt.data( ('course_mongo', '<a href="/courses/TestX/TS01/2015/id">'), ('course_split', '<a href="/courses/course-v1:TestX+TS02+2015/id">') ) @ddt.unpack def test_replace_course_urls(self, course_id, anchor_tag): """ Verify that the course URL has been replaced. """ course = getattr(self, course_id) test_replace = replace_course_urls( course_id=course.id, block=course, view='baseview', frag=Fragment('<a href="/course/id">'), context=None ) assert isinstance(test_replace, Fragment) assert test_replace.content == anchor_tag @ddt.data( ('course_mongo', '<a href="/c4x/TestX/TS01/asset/id">'), ('course_split', '<a href="/asset-v1:TestX+TS02+2015+type@asset+block/id">') ) @ddt.unpack def test_replace_static_urls(self, course_id, anchor_tag): """ Verify that the static URL has been replaced. """ course = getattr(self, course_id) test_replace = replace_static_urls( data_dir=None, course_id=course.id, block=course, view='baseview', frag=Fragment('<a href="/static/id">'), context=None ) assert isinstance(test_replace, Fragment) assert test_replace.content == anchor_tag def test_sanitize_html_id(self): """ Verify that colons and dashes are replaced. """ dirty_string = 'I:have-un:allowed_characters' clean_string = sanitize_html_id(dirty_string) assert clean_string == 'I_have_un_allowed_characters' @ddt.data( (True, ["combined.css"]), (False, ["a.css", "b.css", "c.css"]), ) @ddt.unpack def test_get_css_dependencies(self, pipeline_enabled, expected_css_dependencies): """ Verify that `get_css_dependencies` returns correct list of files. """ pipeline = settings.PIPELINE.copy() pipeline['PIPELINE_ENABLED'] = pipeline_enabled pipeline['STYLESHEETS'] = { 'style-group': { 'source_filenames': ["a.css", "b.css", "c.css"], 'output_filename': "combined.css" } } with self.settings(PIPELINE=pipeline): css_dependencies = get_css_dependencies("style-group") assert css_dependencies == expected_css_dependencies @ddt.data( (True, ["combined.js"]), (False, ["a.js", "b.js", "c.js"]), ) @ddt.unpack def test_get_js_dependencies(self, pipeline_enabled, expected_js_dependencies): """ Verify that `get_js_dependencies` returns correct list of files. """ pipeline = settings.PIPELINE.copy() pipeline['PIPELINE_ENABLED'] = pipeline_enabled pipeline['JAVASCRIPT'] = { 'js-group': { 'source_filenames': ["a.js", "b.js", "c.js"], 'output_filename': "combined.js" } } with self.settings(PIPELINE=pipeline): js_dependencies = get_js_dependencies("js-group") assert js_dependencies == expected_js_dependencies class TestXBlockAside(SharedModuleStoreTestCase): """Test the xblock aside function.""" @classmethod def setUpClass(cls): super().setUpClass() cls.course = CourseFactory.create() cls.block = ItemFactory.create(category='aside', parent=cls.course) cls.aside_v2 = AsideUsageKeyV2(cls.block.scope_ids.usage_id, "aside") cls.aside_v1 = AsideUsageKeyV1(cls.block.scope_ids.usage_id, "aside") def test_is_xblock_aside(self): """test if xblock is aside""" assert is_xblock_aside(self.aside_v2) is True assert is_xblock_aside(self.aside_v1) is True def test_is_not_xblock_aside(self): """test if xblock is not aside""" assert is_xblock_aside(self.block.scope_ids.usage_id) is False @patch('xmodule.modulestore.xml.ImportSystem.applicable_aside_types', lambda self, block: ['test_aside']) @XBlockAside.register_temp_plugin(AsideTestType, 'test_aside') def test_get_aside(self): """test get aside success""" assert get_aside_from_xblock(self.block, "test_aside") is not None
[ "rafael.luque@osoco.es" ]
rafael.luque@osoco.es
4bc0bc66720acadb4e82c6913dc1c2cc79d5bbad
b9cb7886ba2a42dc2c50b55bfb5c730ccd6e083a
/data/checkerboard.py
c3e776a7d3f632660d01b0554e6e4754233b7e1c
[]
no_license
will-jac/sskm
0ca72d2745c2a2d9ac0289c6e571d3391d4cac68
9024e96927d906d0f09eb60f56cb4a66d52084d6
refs/heads/main
2023-04-07T18:02:58.284592
2021-04-12T05:09:54
2021-04-12T05:09:54
354,688,559
0
0
null
null
null
null
UTF-8
Python
false
false
1,487
py
import numpy as np import util def generate_data(shape=(1000000, 2), board_range=2, noise=10, seed=None, shuffle = True): board_range = float(board_range) rng = np.random.default_rng(seed) X = rng.uniform(-board_range, board_range, shape) y = np.zeros(shape[0]) noise_array = rng.normal(0, noise, shape) def gen_target(z): x = z[0] + z[2] y = z[1] + z[3] # divide into 4x4 clusters # this is the absolute worst way to do this if x / 2 > 0.5: if y / 2 > 0.5: return 1 if y > 0: return 0 elif y / 2 < -0.5: return 0 return 1 elif x > 0: if y / 2 > 0.5: return 0 elif y > 0: return 1 elif y / 2 < -0.5: return 1 return 0 elif x / 2 > -0.5: if y / 2 > 0.5: return 1 elif y > 0: return 0 elif y / 2 < -0.5: return 0 return 1 else: if y / 2 > 0.5: return 0 elif y > 0: return 1 elif y / 2 < -0.5: return 1 return 0 y = np.apply_along_axis(gen_target, 1, np.c_[X, noise_array]) if shuffle: return util.shuffle(X, y) return X, y if __name__ == '__main__': generate_data((10000, 2), noise=0)
[ "j.william@ou.edu" ]
j.william@ou.edu
a9003dd2139565c9b6a95fbf77d8a39cff6ba778
faf267a86fba5658a5b632dfcafd1fd924cef883
/images/migrations/0001_initial.py
96fd0aad1077b082bddee4ba6df65f0058882918
[]
no_license
toxicOxygen/bookmark_app-
ee07c9b85420b1c793489abf298b8f56149cebb4
41be63e15b4488e5a79815528a41986bc5b2ad6f
refs/heads/master
2022-12-13T22:13:08.070915
2020-09-30T21:34:15
2020-09-30T21:34:15
251,190,284
0
0
null
2022-12-08T03:55:43
2020-03-30T03:20:46
JavaScript
UTF-8
Python
false
false
1,090
py
# Generated by Django 3.0.4 on 2020-03-06 15:13 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Image', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200)), ('slug', models.SlugField(blank=True, max_length=200)), ('description', models.TextField(blank=True)), ('image', models.ImageField(upload_to='images/%Y/%m/%d/')), ('url', models.URLField()), ('created', models.DateField(auto_now_add=True)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images_creates', to=settings.AUTH_USER_MODEL)), ], ), ]
[ "kwakukusi30@outlook.com" ]
kwakukusi30@outlook.com
597040ed63c80dc77fce45caef2782583890d690
a4c9d1abfb283294d2d5402c1cebf8bdcc31f065
/Tests/Alliaria.py
6e5470fe7f22140269392a9a8f68616954b17027
[]
no_license
garcinc/Projet-L3
623bf8635a16d8c096efe6804bc1547f907a00ee
6ec942f99ddeef17aa7e3f85c889ed045a4e4b9a
refs/heads/main
2023-06-05T02:46:44.688327
2021-06-30T13:26:06
2021-06-30T13:26:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
487
py
import os import pandas as pd from functions import * User = "Aurélien" path_to_train , path_to_folder = user_paths(User) ############# Transform class_names into class_names_2 ############# # Dataframe with id_species, species_name data = pd.read_csv(os.path.join(path_to_folder,"class_names_2.csv")) for i in range (len(data)): if data.genus[i] == "Alliaria": print(data.id_species[i],data.species_name[i],data.Images[i]) # un doublon également pour le metasequoia
[ "aurelien-mail@orange.fr" ]
aurelien-mail@orange.fr
f42980689431b7929ec800fdedd6bd583e210daf
553aba94bca1f4d9a250a61e87d7f85e2e26a240
/filter_packets.py
a0b8265b363f2ddca232678977b02548a3823636
[]
no_license
nickman3422/Network-Packet-Analyzer
b32266163f6fae070d012e6a3eb3bf4a20bcbeab
b920b664d5bfe5128c3eb9f5f2ee8a7661e4bf20
refs/heads/main
2023-01-08T14:07:20.552846
2020-11-17T18:03:53
2020-11-17T18:03:53
313,701,915
0
0
null
null
null
null
UTF-8
Python
false
false
825
py
import os def filter() : print 'called filter function in filter_packets.py' files = ('Node1', 'Node2', 'Node3', 'Node4') for f in files: if os.path.exists('data/' + f + '_filtered.txt'): os.remove('data/' + f + '_filtered.txt') with open('data/' + f + '.txt', 'r') as fp: with open('data/' + f + '_filtered.txt', 'w') as op: line = fp.readline() while line: line = line.strip() if 'Echo' in line and ('request' in line or 'reply' in line): op.write('No. Time Source Destination Protocol Length Info\n') op.write(line + '\n\n') fp.readline() temp_line = fp.readline().strip() while temp_line != '': op.write(temp_line + '\n') temp_line = fp.readline().strip() op.write('\n') line = fp.readline()
[ "noreply@github.com" ]
nickman3422.noreply@github.com
601a3f8c97fd36a12e645631b212de82dac87682
ce6d74994bce49411f00f5053f56fb3b7c30bd50
/leetcode/prac10.py
387c9c657b00bec6eccc053770626f0dd75c3bb7
[]
no_license
zhengjiani/pyAlgorithm
9397906f3c85221e64f0415abfbb64d03eb1c51e
dbd04a17cf61bac37531e3337ba197c4af19489e
refs/heads/master
2021-07-11T19:07:26.480403
2020-07-16T00:25:24
2020-07-16T00:25:24
179,308,500
0
0
null
null
null
null
UTF-8
Python
false
false
2,039
py
# -*- coding: utf-8 -*- # @Time : 2019/4/18 16:57 # @Author : zhengjiani # @Software: PyCharm # @Blog :https://zhengjiani.github.io/ """ 题目描述: 请实现一个函数用来判断字符串是否表示数值(包括整数和小数)。 例如,字符串"+100","5e2","-123","3.1416"和"-1E-16"都表示数值。 但是"12e","1a3.14","1.2.3","+-5"和"12e+4.3"都不是。 目前看到三种解法:正则解法,剑指解法以及有限自动机解法 """ def is_number(s): """ 剑指解法 :param s: :return: """ n = len(s) if n<0: return False #正负号标志 ab = False #e标志 he = False #小数点标志 po = False i = 0 while i < n: if s[i] == '+' or s[i] == '-': #如果没有e字符串中至多出现一次正负号或者整个字符串只有一个正负号 if(ab or (i>0 and (s[i-1] != 'e') and (s[i-1] != 'E')) or n==1): return False ab = True elif s[i] == '.': #小数点前后至少有一边是数字 if(i==0 and (i+1 == n or (s[i+1].isdigit()==False))): return False if(he or po or (s[i-1].isdigit()==False and s[i+1].isdigit()==False)): return False po = True elif s[i] == 'e' or s[i] == 'E': if(i-1 < 0 or i+1 >= n or he): return False #e的左边一位可以是.或者数字 if(s[i-1] == '.' or s[i-1].isdigit()): if s[i+1].isdigit(): he = True elif((s[i+1] == '+' or s[i+1] == '-') and i+2 <n and s[i+2].isdigit == True): he = True if not he: return False ab = False po = True else: if s[i].isdigit() == False: return False i += 1 return True if __name__ == '__main__': s1 = "1a3.14" s2 = "5e2" print(is_number(s1)) print(is_number(s2))
[ "936089353@qq.com" ]
936089353@qq.com
0a668af03d88ae76632d2eed1fd4d0c2cb51036a
d066f7fe739fb78f74ec2de8ccbfefdd4270f60f
/tests/modules/deploy/apt/test_deploy.py
8efbe00aaa2d9008d24a9786855865ae5bc31f49
[ "MIT" ]
permissive
AppImageCrafters/appimage-builder
666e75363a74f615cdb3673b3ca9d51a6d292a49
f38699ef3644fa5409a5a262b7b6d99d6fb85db9
refs/heads/main
2023-08-17T06:34:54.029664
2023-06-03T17:51:04
2023-06-03T17:51:04
218,847,680
270
54
MIT
2023-09-06T17:04:18
2019-10-31T19:44:17
Python
UTF-8
Python
false
false
1,713
py
# Copyright 2021 Alexis Lopez Zubieta # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. import logging import shutil from pathlib import Path from unittest import TestCase, skipIf from appimagebuilder.modules.deploy.apt import Deploy from appimagebuilder.modules.deploy.apt.venv import Venv @skipIf(not shutil.which("apt-get"), reason="requires apt-get") class TestDeploy(TestCase): venv_path = None appdir_path = None @classmethod def setUpClass(cls): cls.venv_path = "/tmp/apt-venv" cls.appdir_path = Path("/tmp/AppDir") cls.apt_venv = Venv( cls.venv_path, ["deb [arch=amd64] http://deb.debian.org/debian/ bullseye main"], ["https://ftp-master.debian.org/keys/archive-key-10.asc"], ["amd64"], ) @classmethod def tearDownClass(cls): pass # shutil.rmtree(cls.venv_path) # shutil.rmtree(cls.appdir_path) def test_deploy(self): logging.basicConfig(level=0) apt_deploy = Deploy(self.apt_venv) apt_deploy.deploy(["perl", "util-linux"], self.appdir_path) self.assertTrue(next(self.appdir_path.glob("usr")))
[ "contact@azubieta.net" ]
contact@azubieta.net
d7b428e5f34305e7b270c4b1473e0e3b9549bd8e
a66c7158d201f98f15a3859b7e88155c4463c61c
/Choice.py
8f70697a09da145edffc19bff52fc00087680ac3
[]
no_license
pengwei11/Automation3.0
9faa957874e6897bf2dc66e94a6ed78299aeb004
862e515d6a1868032f0b267641938c52bf62cd86
refs/heads/master
2022-12-11T11:53:58.662565
2020-07-31T03:28:34
2020-07-31T03:28:34
283,929,855
1
0
null
2022-11-22T05:01:48
2020-07-31T03:09:39
Python
UTF-8
Python
false
false
2,563
py
from PyQt5 import QtCore, QtWidgets from PyQt5.QtWidgets import QDialog from PyQt5.QtGui import QIcon from Utils.ConfigRead import * from QtGui.gui import Automation from QtGui.gui import Interface class Choice(QDialog): def __init__(self, parent=None): super(Choice, self).__init__(parent) self.CreatUi() def CreatUi(self): # 设置窗口名称 self.setWindowTitle('自动化测试脚本') self.setWindowIcon(QIcon(RESOURSE_PATH + 'lable.png')) self.setWindowModality(QtCore.Qt.ApplicationModal) self.setFixedSize(350, 220) self.setAcceptDrops(True) self.setWindowTitle('脚本选择') self.listView = QtWidgets.QListView(self) self.toolButton = QtWidgets.QToolButton(self) self.toolButton.setText('Web测试') self.toolButton.setObjectName('web') self.toolButton_2 = QtWidgets.QToolButton(self) self.toolButton_2.setText('接口测试') self.toolButton_2.setObjectName('interface') self.Position() self.ButtonBind() def ButtonBind(self): self.toolButton.clicked.connect(self.web_ui) self.toolButton_2.clicked.connect(self.interface_ui) def web_ui(self): """初始化Automation窗口""" # 关闭主窗口 automation = Automation.Automation() styleSheet = automation.readQssFile(QSS_PATH + 'Automator.qss') automation.setStyleSheet(styleSheet) # 设置子窗口未关闭时无法操作父窗口 automation.setWindowModality(QtCore.Qt.ApplicationModal) automation.show() def interface_ui(self): widget = Interface.Interface() styleSheet = widget.readQssFile(QSS_PATH + 'Interface.qss') widget.setStyleSheet(styleSheet) # 设置子窗口未关闭时无法操作父窗口 widget.setWindowModality(QtCore.Qt.ApplicationModal) widget.show() def Position(self): self.listView.setGeometry(QtCore.QRect(0, 0, 350, 220)) self.toolButton.setGeometry(QtCore.QRect(30, 80, 120, 40)) self.toolButton_2.setGeometry(QtCore.QRect(200, 80, 120, 40)) def readQssFile(self, filePath): with open(filePath, 'r', encoding='utf-8') as fileObj: styleSheet = fileObj.read() return styleSheet if __name__ == "__main__": app = QtWidgets.QApplication(sys.argv) widget = Choice() styleSheet = widget.readQssFile(QSS_PATH+'Choice.qss') widget.setStyleSheet(styleSheet) widget.show() sys.exit(app.exec_())
[ "1249294960@qq.com" ]
1249294960@qq.com
cf771d30b18d12a593a2ae992d3a9b1a077f4d60
871dddb5c8059d96b767a323b0f87d3fbb62e786
/vint/ast/plugin/scope_plugin/builtin_dictionary.py
0ab9bdd21b0047f5b8dcb6864ae438e520d60f19
[ "MIT" ]
permissive
msabramo/vint
6ef12ed61d54d0d2b2a9d1da1ce90c0e2c734ab2
f13569f2a62ff13ff8ad913e7d6fb2c57953af20
refs/heads/master
2023-08-24T01:20:14.699485
2014-12-31T18:28:59
2014-12-31T18:28:59
null
0
0
null
null
null
null
UTF-8
Python
false
false
7,285
py
BuiltinVariables = { 'beval_col': True, 'beval_bufnr': True, 'beval_lnum': True, 'beval_text': True, 'beval_winnr': True, 'char': True, 'charconvert_from': True, 'charconvert_to': True, 'cmdarg': True, 'cmdbang': True, 'count': True, 'count1': True, 'ctype': True, 'dying': True, 'errmsg': True, 'exceptio': True, 'fcs_reason': True, 'fcs_choice': True, 'fname_in': True, 'fname_out': True, 'fname_new': True, 'fname_diff': True, 'folddashes': True, 'foldlevel': True, 'foldend': True, 'foldstart': True, 'hlsearch': True, 'insertmode': True, 'key': True, 'lang': True, 'lc_time': True, 'lnum': True, 'mouse_win': True, 'mouse_lnum': True, 'mouse_col': True, 'oldfiles': True, 'operator': True, 'prevcount': True, 'profiling': True, 'progname': True, 'progpath': True, 'register': True, 'scrollstart': True, 'servername': True, 'searchforward': True, 'shell_error': True, 'statusmsg': True, 'swapname': True, 'swapchoice': True, 'swapcommand': True, 'termresponse': True, 'this_session': True, 'throwpoint': True, 'val': True, 'version': True, 'warningmsg': True, 'windowid': True, } BuiltinFunctions = { 'abs': True, 'acos': True, 'add': True, 'and': True, 'append': True, 'append': True, 'argc': True, 'argidx': True, 'arglistid': True, 'argv': True, 'argv': True, 'asin': True, 'atan': True, 'atan2': True, 'browse': True, 'browsedir': True, 'bufexists': True, 'buflisted': True, 'bufloaded': True, 'bufname': True, 'bufnr': True, 'bufwinnr': True, 'byte2line': True, 'byteidx': True, 'byteidxcomp': True, 'call': True, 'ceil': True, 'changenr': True, 'char2nr': True, 'cindent': True, 'clearmatches': True, 'col': True, 'complete': True, 'complete': True, 'complete_add': True, 'complete_check': True, 'confirm': True, 'copy': True, 'cos': True, 'cosh': True, 'count': True, 'cscope_connection': True, 'cursor': True, 'cursor': True, 'deepcopy': True, 'delete': True, 'did_filetype': True, 'diff_filler': True, 'diff_hlID': True, 'empty': True, 'escape': True, 'eval': True, 'eventhandler': True, 'executable': True, 'exepath': True, 'exists': True, 'extend': True, 'exp': True, 'expand': True, 'feedkeys': True, 'filereadable': True, 'filewritable': True, 'filter': True, 'finddir': True, 'findfile': True, 'float2nr': True, 'floor': True, 'fmod': True, 'fnameescape': True, 'fnamemodify': True, 'foldclosed': True, 'foldclosedend': True, 'foldlevel': True, 'foldtext': True, 'foldtextresult': True, 'foreground': True, 'function': True, 'garbagecollect': True, 'get': True, 'get': True, 'getbufline': True, 'getbufvar': True, 'getcmdline': True, 'getcmdpos': True, 'getcmdtype': True, 'getcmdwintype': True, 'getcurpos': True, 'getcwd': True, 'getfontname': True, 'getfperm': True, 'getfsize': True, 'getftime': True, 'getftype': True, 'getline': True, 'getline': True, 'getloclist': True, 'getmatches': True, 'getpid': True, 'getpos': True, 'getqflist': True, 'getreg': True, 'getregtype': True, 'gettabvar': True, 'gettabwinvar': True, 'getwinposx': True, 'getwinposy': True, 'getwinvar': True, 'glob': True, 'globpath': True, 'has': True, 'has_key': True, 'haslocaldir': True, 'hasmapto': True, 'histadd': True, 'histdel': True, 'histget': True, 'histnr': True, 'hlexists': True, 'hlID': True, 'hostname': True, 'iconv': True, 'indent': True, 'index': True, 'input': True, 'inputdialog': True, 'inputlist': True, 'inputrestore': True, 'inputsave': True, 'inputsecret': True, 'insert': True, 'invert': True, 'isdirectory': True, 'islocked': True, 'items': True, 'join': True, 'keys': True, 'len': True, 'libcall': True, 'libcallnr': True, 'line': True, 'line2byte': True, 'lispindent': True, 'localtime': True, 'log': True, 'log10': True, 'luaeval': True, 'map': True, 'matchadd': True, 'matchaddpos': True, 'maparg': True, 'mapcheck': True, 'match': True, 'matcharg': True, 'matchdelete': True, 'matchend': True, 'matchlist': True, 'matchstr': True, 'max': True, 'min': True, 'mkdir': True, 'mode': True, 'mzeval': True, 'nextnonblank': True, 'nr2char': True, 'or': True, 'pathshorten': True, 'pow': True, 'prevnonblank': True, 'printf': True, 'pumvisible': True, 'pyeval': True, 'py3eval': True, 'range': True, 'readfile': True, 'reltime': True, 'reltimestr': True, 'remote_expr': True, 'remote_foreground': True, 'remote_peek': True, 'remote_read': True, 'remote_send': True, 'remove': True, 'remove': True, 'rename': True, 'repeat': True, 'resolve': True, 'reverse': True, 'round': True, 'screenattr': True, 'screenchar': True, 'screencol': True, 'screenrow': True, 'search': True, 'searchdecl': True, 'searchpair': True, 'searchpairpos': True, 'searchpos': True, 'server2client': True, 'serverlist': True, 'setbufvar': True, 'setcmdpos': True, 'setline': True, 'setloclist': True, 'setmatches': True, 'setpos': True, 'setqflist': True, 'setreg': True, 'settabvar': True, 'settabwinvar': True, 'setwinvar': True, 'sha256': True, 'shellescape': True, 'shiftwidth': True, 'simplify': True, 'sin': True, 'sinh': True, 'sort': True, 'soundfold': True, 'spellbadword': True, 'spellsuggest': True, 'split': True, 'sqrt': True, 'str2float': True, 'str2nr': True, 'strchars': True, 'strdisplaywidth': True, 'strftime': True, 'stridx': True, 'string': True, 'strlen': True, 'strpart': True, 'strridx': True, 'strtrans': True, 'strwidth': True, 'submatch': True, 'substitute': True, 'synID': True, 'synIDattr': True, 'synIDtrans': True, 'synconcealed': True, 'synstack': True, 'system': True, 'systemlist': True, 'tabpagebuflist': True, 'tabpagenr': True, 'tabpagewinnr': True, 'taglist': True, 'tagfiles': True, 'tempname': True, 'tan': True, 'tanh': True, 'tolower': True, 'toupper': True, 'tr': True, 'trunc': True, 'type': True, 'undofile': True, 'undotree': True, 'uniq': True, 'values': True, 'virtcol': True, 'visualmode': True, 'wildmenumode': True, 'winbufnr': True, 'wincol': True, 'winheight': True, 'winline': True, 'winnr': True, 'winrestcmd': True, 'winrestview': True, 'winsaveview': True, 'winwidth': True, 'writefile': True, 'xor': True, }
[ "yuki.kokubun@mixi.co.jp" ]
yuki.kokubun@mixi.co.jp
a172a2d0fc5b16f8e96e73a969bb995ac3af93e1
79820abb4257f099d6a8441eb05beb748798d5e1
/log_core.py
5591d6d0eab2ab3b31e9946d35e0ab510c0fb59c
[]
no_license
leighjpeter/python
f1683e166f50f74347f96af7dc527fde007accd2
b99148472f046ed746289a4b3a20ff2c959c417b
refs/heads/master
2022-03-17T05:42:49.929625
2019-05-29T03:07:49
2019-05-29T03:07:49
108,529,455
0
0
null
null
null
null
UTF-8
Python
false
false
197
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import logging logger = logging.getLogger('main.core') def run(): logger.info('Core Info') logger.debug('Core Debug') logger.error('Core Error')
[ "leighj@163.com" ]
leighj@163.com
9ebc50c3950917c8b758138d6f68fd151cad4891
4371491b93bc2e9feb7d679bd30c3f40f1ec2052
/top_10_tags_mapper.py
7424acb723861a77f71f26bbf168088d7ba42c80
[]
no_license
cmohite/Intro_To_Hadoop_Udacity_Course_1
57022df16f516f0cf8191cd69e2c1a218f951362
a14a98e5450fcbdc8baea6e8a4fa7543cc2d7146
refs/heads/master
2016-09-13T14:27:15.228536
2016-05-24T12:18:30
2016-05-24T12:18:30
58,108,463
0
0
null
null
null
null
UTF-8
Python
false
false
436
py
#!/usr/bin/python import sys from datetime import datetime for line in sys.stdin: data = line.strip().split("\t") if len(data) == 19: tag_names = data[2] author_id = data[3] node_type = data[5] if author_id == "author_id": continue #print "{0}\t{1}\t{2}".format(node_type, author_id, tag_names) if node_type == 'question': tags = tag_names.split(" ") for tag in tags: print "{0}\t{1}".format(tag, 1)
[ "cmohite@gmail.com" ]
cmohite@gmail.com
6d1276874a86938484b529f9876b2f83d6d872a0
fc85938507fe52534ef3043f24adb8edab96c46e
/tests/test_breastfeeding.py
907a9f83e40531ececb22280f2d720b3710dc206
[ "MIT" ]
permissive
gkovaig/apex_iud_nlp
00e165fcc81bb6325b460cb2ea322e2622856f02
f59afbd5d19d6bae21264f6de7ee24382ccb694e
refs/heads/master
2023-02-14T15:24:08.211611
2021-01-05T19:20:30
2021-01-05T19:20:30
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,095
py
import pytest from apex.algo.breastfeeding import LACTATION_VISIT, BF_UNKNOWN, BF_EXACT, BF_NO_EXACT, EXPRESSED_MILK_EXACT, BF_YES, \ PUMPING_ACTIVE, NIPPLE_SHIELD, BF_FEEDING, WHOLE_MILK, matches_nutrition_not_bf, BF_NOT @pytest.mark.parametrize('text', [ 'lactation consultation', ]) def test_lactationvisit_regex_match(text): assert LACTATION_VISIT.matches(text) @pytest.mark.parametrize('text', [ 'breastfeeding has been going well', 'pt breastfeeding well', 'breastfeeding exclusively', 'exclusively breastfeeding', 'currently breastfeeding', 'she is currently breast feeding', 'she is breast feeding', ]) def test_yes_regex_match(text): assert BF_YES.matches(text) @pytest.mark.parametrize('text', [ 'weaning from nipple shield' ]) def test_nippleshield_regex_match(text): assert NIPPLE_SHIELD.matches(text) @pytest.mark.parametrize('text', [ 'breast pumping', 'is using a breast pump', ]) def test_pumpactive_regex_match(text): assert PUMPING_ACTIVE.matches(text) @pytest.mark.parametrize('text', [ 'Breastfeeding: {YES NO:17553}', ]) def test_bfunknown_regex_match(text): assert BF_UNKNOWN.matches(text) @pytest.mark.parametrize('text', [ 'expressed breast milk: all', 'expressed breast milk: 12oz', 'expressed breast milk: 1/2-1 ounce total', '10-20ml of expressed breastmilk', ]) def test_expressedexact_regex_match(text): assert EXPRESSED_MILK_EXACT.matches(text) @pytest.mark.parametrize('text', [ 'nutrition: both breast and ebm', 'he takes both breast and formula', 'she is taking breast only', ]) def test_bffeeding_regex_match(text): assert BF_FEEDING.matches(text) @pytest.mark.parametrize('text', [ 'Nutrition: solids and formula and whole milk', ]) def test_wholemilk_regex_match(text): assert WHOLE_MILK.matches(text) @pytest.mark.parametrize('text', [ 'Water: public water supply Nutrition: whole milk And still breast feeding', 'Nutrition: solids and breast and whole milk' ]) def test_not_wholemilk_regex_match(text): assert not WHOLE_MILK.matches(text) @pytest.mark.parametrize('text', [ 'Breastfeeding frequency: every 2-2.5 hours one 3-4 hour interval at night', 'pumping every 2-3 hours', 'pumping every hour', 'Breastfeeding frequency:8-10 x 45 minutes', 'nursing frequency: 7-8 times/24 hours', 'Breastfeeding frequency:8x day', 'Intake at breast: 2.5 oz', 'breast feeding every 3-4 hours for 10 minutes per side', 'Problems with breastfeeding: yes', # problems imply breastfeeding ongoing ]) def test_bfexact_regex_match(text): assert BF_EXACT.matches(text) assert not BF_NO_EXACT.matches(text) @pytest.mark.parametrize('text', [ 'She is not breast feeding', 'pt is not breastfeeding', ]) def test_bfnot_match(text): assert BF_NOT.matches(text) # NOT @pytest.mark.parametrize('text', [ 'prelactation consultation', 'The lactation consultant usually advises waiting 4-5 days between offering new foods', ]) def test_not_lactationvisit_regex_match(text): assert not LACTATION_VISIT.matches(text) @pytest.mark.parametrize('text', [ 'advised exclusively breastfeeding', 'Some exclusively breast feeding babies stool infrequently', 'ideally after breast-feeding exclusively', ]) def test_not_yes_regex_match(text): assert not BF_YES.matches(text) @pytest.mark.parametrize('text', [ 'not initiating breast pumping', 'Breast pumping and galactogogue therapy may help', ]) def test_not_pumpactive_regex_match(text): assert not PUMPING_ACTIVE.matches(text) @pytest.mark.parametrize('text', [ 'breastfeeding: No Tobacco Use: quit', 'breastfeeding: None Tobacco Use: quit', 'breastfeeding: N Tobacco Use: quit', 'breastfeeding: denies Tobacco Use: quit', ]) def test_bfno_regex_match(text): assert BF_NO_EXACT.matches(text) @pytest.mark.parametrize('text', [ 'Problems with breastfeeding: no', 'breastfeeding: NA Tobacco Use: quit', 'breastfeeding: N/A Tobacco Use: quit', ]) def test_not_bfno_regex_match(text): assert not BF_NO_EXACT.matches(text) @pytest.mark.parametrize('text', [ 'discussed: nutrition: both breast and ebm', 'teaching/guidance:\nwhat?\nnutrition: both breast and ebm', ]) def test_not_bffeeding_regex_match(text): assert not BF_FEEDING.matches(text) @pytest.mark.parametrize('text', [ 'nutrition: formula - similac, sometimes gets pedialite', 'Nutrition: Bottle Frequency: 2 ounces 4 times per day.', "Nutrition: solids and cow's milk", 'Nutrition : solids and cow \'s milk Frequency : 4 - 5 x day Solids : dairy , cereals , fruits , ', ]) def test_matches_nutrition_not_bf(text): assert matches_nutrition_not_bf(text) @pytest.mark.parametrize('text', [ 'nutrition: weaning from bottle', 'Information provided: nutrition: breastfeeding', 'Nutrition: Br Frequency: 2 ounces 4 times per day.', ]) def test_not_matches_nutrition_not_bf(text): assert not matches_nutrition_not_bf(text)
[ "dcronkite@gmail.com" ]
dcronkite@gmail.com
461eaee8cd2b9f1017a7b1239a1c50f5402a998c
c6aa7ebf6e5368196ea03c2ed82cad3d115f75ab
/src/seedless_util.py
808a7ed8eaecf9c22d52e8b674cde2ebb8ec8548
[ "MIT" ]
permissive
metamarcdw/PyBitmessage-I2P
9c6df8cc1c8233f75fd9f6a70df5aa4873ea379a
a6e4f956dd7f8329af4f4b225539bff591b577ff
refs/heads/master
2021-01-17T07:03:30.010150
2016-12-17T10:23:26
2016-12-17T10:23:26
35,043,932
16
2
null
2016-05-14T15:04:23
2015-05-04T15:47:01
Python
UTF-8
Python
false
false
3,006
py
#!/usr/bin/env python import seedless from random import choice from urllib2 import HTTPError from socket import timeout def result_to_list(result, index): list = [] for entry in result[1]: item = entry.split(" ")[index] list.append(item) return list service = "" def scrapePeers(dest): # LOCATE tryPlugin = True while True: # Try plugin first, then keep trying random servers cmd = "locate" query = "seedless %s" % service try: if tryPlugin: tryPlugin = False res = seedless.call_seedless_plugin(cmd, query) else: randomServer = choice(seedless.knownSeedlessServers) res = seedless.call_seedless_server(randomServer, cmd, query) except ( HTTPError, timeout ) as e: if e.code != 404: print "Seedless:locate", e continue currentServerList = result_to_list(res, 0) if len(currentServerList) > 0: print "servers: ", len(currentServerList) break maxServers = len(currentServerList) if maxServers > 3: maxServers = 3 elif maxServers <= 0: raise Exception("No %s compatible seedless servers were found." % service) # ANNOUNCE for server in currentServerList: while True: try: ann_res = seedless.call_seedless_server( \ server, "announce", "%s %s" % (service, dest) ) except ( HTTPError, timeout ) as e: print "Seedless:announce", e continue if ann_res: break # SCRAPE peerDestinations = set() serversScraped = 0 while True: currentServer = choice(currentServerList) try: res = seedless.call_seedless_server( \ currentServer, "locate", "%s " % service) except ( HTTPError, timeout ) as e: print "Seedless:scrape", e continue peers = result_to_list(res, 2) if len(peers) > 0: currentServerList.remove(currentServer) peerDestinations.update(peers) serversScraped += 1 if serversScraped >= maxServers: break return list( peerDestinations ) if __name__ == "__main__": service = "pybitmsg-i2p" mydest = "fmzq6Y8MLj2IPwmym2d1xuM5oSRT2-Db0Zv4yAPS3deGZNDPTZu4ZguWXQrqcZ6~lptHql4~h4y6ttjzg3NA2cGF44x5JvGTkOwmcopLLx5WsD-LzXwDqU1ncO6K7nmRQovgwZCWgqOrjs9TugN9ci3c2QzUIqN4TgUVMMJHm4yMScAsR4tFBlJpSXe9RnWyXskgf3IvcDWNmCuiNryXNMlb~hyy1lnC29rNIgjYh1nHL9RQ0RUyPuUiyid~GkBCNeSfwzCYi5W8pMErEYmKSwLBcR1MSaRcXD~Tkr3K4KNSPQNCxaCnsuOZoDVMw64NECqAkVVHMnZKU1R0exHiDEds4gJelP-5qSdlYXR6azVde4rrC559Mh9XM1Dlw6kAS1Gv3B7ZyGt6HPhn2uN8rwLvpk60N03J0vi8oubBqkmAhHI2w~FL4apkORdjquz~m~r5bW3CPxH7P1LadnrsNE-m5gO6Sts~9UwVRG-Wmz-L2Za~cDXTP~HXG61IE9IKAAAA" print scrapePeers(mydest)
[ "metamarcdw@gmail.com" ]
metamarcdw@gmail.com
dc354b7de3bbe6ea71022ae2f679b77bbdd981c9
ccc2dceeffa8078883f2971ea145779fc1dcf34e
/COOK OFF/March Cook-Off 2018/p1.py
8ad5b34a1176595804987e13941a80a38bd2dbad
[]
no_license
dineshsonachalam/competitive-programming-2
85bbf8679660b2964ad56a711a6b60e0793148f8
2e85f58427eddd098dbb9e56362232cec8d1968e
refs/heads/master
2020-04-05T17:23:30.347444
2018-11-04T01:48:04
2018-11-04T01:48:04
157,058,120
1
0
null
2018-11-11T07:42:53
2018-11-11T07:42:53
null
UTF-8
Python
false
false
2,320
py
# Akash Kandpal # My Domain => http://harrypotter.tech/ # from fractions import gcd import math # from itertools import permutations # import statistics def readInts(): return list(map(int, raw_input().strip().split())) def readInt(): return int(raw_input()) def readIntsindex0(): return list(map(lambda x: int(x) - 1, input().split())) def readStrs(): return raw_input().split() def readStr(): return raw_input() def numlistTostr(list1): return ''.join(list1) def strlistTostr(list1): return ''.join(str(e) for e in list1) def strTolist(str): return str.split() def strlistTointlist(str): return map(int, str) def slicenum(number,x): return int(str(number)[:x]) def precise(num): return "{0:.10f}".format(num) def rsorted(a): return sorted(a,reverse=True) def binar(x): return '{0:031b}'.format(x) def findpermute(word): perms = [''.join(p) for p in permutations(word)] return set(perms) def findsubsets(S,m): return set(itertools.combinations(S, m)) def sort1(yy,index): return yy.sort(key = lambda x:x[index]) def reversepair(yy): return yy[::-1] MOD = 10 ** 9 + 7 # arr = [] for __ in range(readInt()): n,m = readInts() arr =[] for i in range(n): arr.append(readInts()) # print arr if(arr[0][0]==-1): arr[0][0]=1 for i in range(n-1): if(arr[i+1][0]==-1): arr[i+1][0]=arr[i][0] for i in range(m-1): if(arr[0][i+1]==-1): arr[0][i+1]=arr[0][i] for i in range(1,n): for j in range(1,m): if(arr[i][j]==-1): arr[i][j]=max(arr[i-1][j],arr[i][j-1]) flag = 0 for i in range(n-1): for j in range(m-1): if(arr[i][j+1]<arr[i][j]): flag=1 break if(arr[i+1][j]<arr[i][j]): flag=1 break for i in range(n-1): if(arr[i+1][m-1]<arr[i][m-1]): flag=1 break if(flag!=1): for i in range(n): for j in range(m): print(arr[i][j]), print else: print "-1" ''' Input: 2 4 4 1 2 2 3 1 -1 7 -1 6 -1 -1 -1 -1 -1 -1 -1 2 3 1 4 -1 1 -1 3 1 3 4 -1 -1 -1 -1 -1 2 3 -1 -1 -1 -1 2 Output: 1 2 2 3 1 7 7 100 6 10 20 101 7 11 21 20000 -1 '''
[ "9654263057akashkandpal@gmail.com" ]
9654263057akashkandpal@gmail.com
e636f1750dec7d0f6586e42ce223ade8ddc3963f
f45195ad67119ac8be5cf9898554a8eccc5b6522
/Bookstore-using-tkinter-sqlite-python/frontend.py
6b536bd71446cbb6d4d6f652a26e358628a7da43
[]
no_license
gurupratap-matharu/my-first-repo
7a06184063ee1ba7bb66a082fe5d0fe24fb37b2c
9ccd946a98b0a00f24813fc732bf274d9f9b32f9
refs/heads/master
2022-12-06T03:36:43.555920
2020-08-27T01:03:27
2020-08-27T01:03:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,685
py
""" A program that stores this book information: Titlee, Author Year, ISBN User can: View all records Search an entry Update entry Delete entry Close entry """ from tkinter import * from backend import Database database = Database("books.db") class Command(): """Base class for actions of commands issued on the frontend""" def __init__(self): pass def get_selected_row(self, event): try: index = list1.curselection()[0] self.selected_tuple = list1.get(index) e1.delete(0, END) e1.insert(END, self.selected_tuple[1]) e2.delete(0, END) e2.insert(END, self.selected_tuple[2]) e3.delete(0, END) e3.insert(END, self.selected_tuple[3]) e4.delete(0, END) e4.insert(END, self.selected_tuple[4]) except IndexError: pass def view(self): """Populates the list box with the view function from backend""" list1.delete(0, END) for row in database.view(): list1.insert(END, row) def add(self): """Adds entry to the backend database with the string populated in the entry boxes.""" database.insert(title_text.get(), author_text.get(), year_text.get(), isbn_text.get()) list1.delete(0, END) list1.insert(END, (title_text.get(), author_text.get(), year_text.get(), isbn_text.get())) def delete(self): """Deletes the selected item from the list widget from the backend database""" database.delete(self.selected_tuple[0]) def update(self): """Fetches new data from the entry widgets and updates them in the database""" database.update(self.selected_tuple[0], title_text.get(), author_text.get(), year_text.get(), isbn_text.get()) def search(self): """Populates the list widget with the search matches between the entry widgets and the backend database.""" row = database.search(title_text.get(), author_text.get(), year_text.get(), isbn_text.get()) list1.delete(0, END) list1.insert(END, row) command = Command() window = Tk() window.wm_title("Book Store") l1 = Label(window, text="Title") l1.grid(row=0, column=0) l2 = Label(window, text="Author") l2.grid(row=0, column=2) l3 = Label(window, text="Year") l3.grid(row=1, column=0) l4 = Label(window, text="ISBN") l4.grid(row=1, column=2) title_text = StringVar() e1 = Entry(window, textvariable=title_text) e1.grid(row=0, column=1) author_text = StringVar() e2 = Entry(window, textvariable=author_text) e2.grid(row=0, column=3) year_text = StringVar() e3 = Entry(window, textvariable=year_text) e3.grid(row=1, column=1) isbn_text = StringVar() e4 = Entry(window, textvariable=isbn_text) e4.grid(row=1, column=3) list1 = Listbox(window, height=6, width=40) list1.grid(row=2, column=0, rowspan=6, columnspan=2) sb1 = Scrollbar(window) sb1.grid(row=2, column=2) list1.configure(yscrollcommand=sb1.set) sb1.configure(command=list1.yview) list1.bind('<<ListboxSelect>>', command.get_selected_row) b1 = Button(window, text="View all", width=12, command=command.view) b1.grid(row=2, column=3) b2 = Button(window, text="Search Entry", width=12, command=command.search) b2.grid(row=3, column=3) b3 = Button(window, text="Add Entry", width=12, command=command.add) b3.grid(row=4, column=3) b4 = Button(window, text="Update Selected", width=12, command=command.update) b4.grid(row=5, column=3) b5 = Button(window, text="Delete Selected", width=12, command=command.delete) b5.grid(row=6, column=3) b6 = Button(window, text="Close", width=12, command=window.destroy) b6.grid(row=7, column=3) window.mainloop()
[ "gurupratap.matharu@gmail.com" ]
gurupratap.matharu@gmail.com
33ae20be8ff4d09fa0651c942b5a4d06b9da7e38
e994de2d306241babf30e0d334cd958e52d28eb5
/assignment1/test_swiss_reader.py
86d7d020a76fcccff3799f7183c7a4c2d0817b33
[]
no_license
linhvoyo/python_training
9dc03868cadfbf284f3278cae2bf68e3a9f70c90
cd56013cd18ff283858d11af2edb58e08e5b0212
refs/heads/master
2021-05-09T00:24:05.172714
2018-02-12T18:26:57
2018-02-12T18:26:57
119,743,179
0
0
null
null
null
null
UTF-8
Python
false
false
2,857
py
""" How many records are in the file? How many records have a sequence of length 260? What are the first 20 residues of 143X_MAIZE? What is the identifier for the record with the shortest sequence? Is there more than one record with that length? What is the identifier for the record with the longest sequence? Is there more than one record with that length? How many contain the subsequence "ARRA"? How many contain the substring "KCIP-1" in the description? """ class SwissRecord(object): def __init__(self, title, sequence_id, sequence, description): self.title = title self.sequence_id = sequence_id self.sequence = "".join(sequence) self.description = "".join(description) input_file = open("swissprot.dat", "r") def shortest_seq(short_seq, shortest, length): if shortest == None or length <= shortest: if shortest == None or length < shortest: short_seq = [] shortest = length short_seq.append(read.title) else: short_seq.append(read.title) return(short_seq, shortest) def longest_seq(long_seq, longest, length): if longest == None or length >= longest: if longest == None or length > longest: long_seq = [] longest = length long_seq.append(read.title) else: long_seq.append(read.title) return(long_seq, longest) short_seq = [] shortest = None long_seq =[] longest = None seq = [] de = [] x_maize = "" count = 0; r_260 = 0; seq_id = "" count_arra = 0 count_kcip = 0 for i in input_file: if i[0:2] == "ID": title = i[:len(i)-1] elif i[0:2] == "//": read = SwissRecord(title, seq_id, seq, de) if len(read.sequence) == 260: r_260 += 1 if "143X_MAIZE" in read.title: x_maize = read.sequence if "ARRA" in read.sequence: count_arra += 1 if "KCIP-1" in read.description: count_kcip += 1 short_seq, shortest = shortest_seq(short_seq, shortest,len(read.sequence)) long_seq, longest = longest_seq(long_seq, longest, len(read.sequence)) seq = [] de = [] count += 1 # break; elif i[0:2] == "DE": de.append(i[:len(i)-1]) elif i[0:2] == "SQ": seq_id = i[:len(i)-1] elif i[0:2] == " ": seq.append(i[5:len(i)-1].replace(" ","")) print "There are %d records in the file" %(count) print "There are %d records with sequence length of 260" %(r_260) print "The first 20 residues of 143X_MAIZE are: %s" %(x_maize[:20]) print 'There are %d records contain subsequence "ARRA"' %(count_arra) print 'There are %d records contain the substring "KCIP-1" in the description' %(count_kcip) print "The shortest sequence ID is: %s" %(short_seq[0]) print "The longest sequence ID is: %s" %(long_seq[0])
[ "linh@linhs-work-MacBook.local" ]
linh@linhs-work-MacBook.local
9ce4f8302380dd5ab6aab99c3fcd416e8bb35178
d497c263eb5afa241ba47f2ffc71f457088fdddd
/migrations/versions/2d760b1c2fa0_add_blacklist_table.py
ade62a0767dd55dcf161b1274e9d5f4a04615cde
[]
no_license
Leovincent98/SanLigtas-API
5049767e7fdf4a7840069679192bea4fc80df758
5229922a57a9bebe7fccb566f123c7b81d0b7891
refs/heads/master
2020-05-07T12:55:01.451388
2019-04-02T01:35:59
2019-04-02T01:35:59
null
0
0
null
null
null
null
UTF-8
Python
false
false
891
py
"""add blacklist table Revision ID: 2d760b1c2fa0 Revises: 9d10c4f3992c Create Date: 2019-03-12 23:19:23.325320 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '2d760b1c2fa0' down_revision = '9d10c4f3992c' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('blacklist_tokens', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('token', sa.String(length=500), nullable=False), sa.Column('blacklisted_on', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token') ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('blacklist_tokens') # ### end Alembic commands ###
[ "skytrizero@gmail.com" ]
skytrizero@gmail.com
e76de7cca7dea3e33b3edc9baebfd8216e7abe6f
942d0888f47aaf0302dc694e5b90f38a44458b5f
/tsDjango/timesheetdj/urls.py
3d6ed9efe1a03e05f37ace9f7032a90965101436
[]
no_license
santiagotiprigan/tsdjango
61f211c15a94d0b89e0ddc0b3fe291c7a6ec53d0
9c6de1b4bf61a8590d667a3d4aca32fb527ce314
refs/heads/master
2022-11-16T12:22:13.571707
2020-07-16T19:28:59
2020-07-16T19:28:59
280,244,618
0
0
null
null
null
null
UTF-8
Python
false
false
128
py
from django.urls import path from . import views urlpatterns = [ path('', views.activitate_list, name='activitate_list'), ]
[ "santiagotiprigan@yahoo.co.uk" ]
santiagotiprigan@yahoo.co.uk
a1f10db505b4ce00f7dcb3c581e0dff01382c21d
f9499f90b82f91a23d936647f61d7306714e48a3
/blog/views.py
e080f8f878355a95d41d8138e4129fbe1203941b
[]
no_license
Rootjon/djangopractice
eb57dcb1e6f2f156d6ac5f38e2bcd59aa646eca4
1d69bed20e9ce2e076534d042504a84f3a3afbf3
refs/heads/main
2023-05-30T15:45:30.878552
2021-06-08T19:12:00
2021-06-08T19:12:00
364,292,442
2
0
null
null
null
null
UTF-8
Python
false
false
2,157
py
from django.core import paginator from django.db.models import query from django.db.models.query import QuerySet from django.shortcuts import render from .forms import CommentForm from django.http import HttpResponseRedirect from .models import Post from django.db.models import Q from django.core.paginator import Paginator from django.contrib import messages from django.shortcuts import redirect # Create your views here. def blog_list(request): posts = Post.objects.all() paginator = Paginator(posts,1) page_number = request.GET.get('page') page_obj = paginator.get_page(page_number) context = { 'posts': posts, 'page_obj':page_obj } return render (request,'blog/index.html',context) def blog_details(request, slug): posts = Post.objects.get(slug=slug) similar_post = posts.tags.similar_objects()[:1] comments = posts.comment.all() if request.method == 'POST': comment_from = CommentForm(request.POST) if comment_from.is_valid(): new_comment = comment_from.save(commit=False) new_comment.posts=posts new_comment.save() #redirect to a new URL: messages.success(request,'Your comment submited.') return HttpResponseRedirect (request.path_info) #if a Get (or any other method) we'll create a blank from else: comment_from=CommentForm () context = { 'posts':posts, 'similar_post':similar_post, 'comments':comments, } return render (request,'blog/details.html',context) def search_blog (request): querySet = Post.objects.all() query = request. GET.get('q') paginator = Paginator (querySet,1) page_number = request.GET.get('page') page_obj = Paginator.get_page(page_number) if query: querySet = querySet.filter( Q(title_icontains=querySet) | Q (short_description_icontains=query) | Q(description_icontains=query) ).distinct() context = { 'queryset': querySet, 'query': query } return render (request,'blog/search.html',context)
[ "ahmadjonaid9@gmail.com" ]
ahmadjonaid9@gmail.com
d3cdf3ab75fa24eb73dcdb0ef22a68a5603fb95e
defbdb519125a97138706a70fee3f86dd00a6fc7
/apps/historical_prices_comparison.py
03bdeed85230783b7a99c94ded6d83649acf000d
[]
no_license
junwei2110/Stock-Predictor-Website
f8451b86d89ad8bf33988a3a2b467278ebcfe489
a5be9e64b4b93c94fe359e201b187317efb4df47
refs/heads/main
2023-07-06T21:39:18.787245
2021-08-13T08:58:00
2021-08-13T08:58:00
381,242,458
0
0
null
null
null
null
UTF-8
Python
false
false
5,419
py
import dash import dash_core_components as dcc import dash_html_components as html import plotly.express as px import plotly.graph_objects as go from plotly.subplots import make_subplots from dash.dependencies import Input, Output, State from dash.exceptions import PreventUpdate import dash_bootstrap_components as dbc import json import pandas as pd from app import app ################################################################ Begin the app layout ############################################################################# styles = { 'pre': { 'border': 'thin lightgrey solid', 'overflowX': 'scroll' } } comparison_prices_layout = dbc.Container([ # This is the graph for the prices dbc.Row([ dbc.Col( dcc.Graph(id = "prices-comparison-fig", figure = {} ), width = 12), ]), dbc.Row([ dbc.Col( html.Pre(id='selected-data', style=styles['pre']), ) ]), ], fluid = True) ################################################################ End of app layout ############################################################################# ###################################################### Connect the Plotly graphs with Dash Components ########################################################### # Callback to get all the necessary data for the graphs based on the specified date range @app.callback( Output(component_id = 'prices-comparison-fig', component_property = 'figure'), Input(component_id = "technical-analysis-tabs", component_property = "value"), Input(component_id = "historical-prices-comparison-data", component_property = "data"), State(component_id = "multiple-tickers-input-bar", component_property = "value") ) def figure_callback(tab, data_comparison, stock_list): # When you enter a ticker, the callback is triggered. This time, if the data store comes up empty, stop the callback again if len(stock_list) == 0: raise PreventUpdate # When you enter a ticker, the callback is triggered. This time, if the data store comes up empty, stop the callback again if len(pd.read_json(data_comparison)) == 0: raise PreventUpdate # When you are not in the right tab, do not update if tab != "tab-3": raise PreventUpdate # Create a copy since you don't want to mess up the original prices_dff = pd.read_json(data_comparison) prices_dff = prices_dff.sort_values(by = ["date"]) fig_prices = px.scatter(prices_dff, x = "date", y = "close", color = "symbol", height = 600) fig_prices.update_layout( title_text="Stock Daily Closing Price", yaxis=dict( title='USD', titlefont_size=16, tickfont_size=14, ), xaxis=dict( rangeselector=dict( buttons=list([ dict(count=1, label="1m", step="month", stepmode="backward"), dict(count=6, label="6m", step="month", stepmode="backward"), dict(count=1, label="YTD", step="year", stepmode="todate"), dict(count=1, label="1y", step="year", stepmode="backward"), dict(step="all") ]) ), rangeslider=dict( visible=True ), type="date"), clickmode='event+select' ), return fig_prices @app.callback( Output('selected-data', 'children'), Input('prices-comparison-fig', 'selectedData'), State(component_id = "historical-prices-comparison-data", component_property = "data") ) def display_click_data(selectedData, data_comparison): if selectedData is None: raise PreventUpdate fig_info = selectedData # Create a copy since you don't want to mess up the original prices_dff = pd.read_json(data_comparison) if len(fig_info["points"]) < 2: return "Select one more data point to begin analysis" else: fig_df = pd.DataFrame(fig_info["points"], index = list(range(len(fig_info["points"])))) prices_filtered = prices_dff[prices_dff['date'].isin(fig_df["x"])] percentagechangedict = {} for stock in prices_filtered["symbol"].unique(): prices_stock_filtered = prices_filtered.loc[prices_filtered["symbol"] == stock] prices_stock_filtered = prices_stock_filtered.sort_values(by = ["date"]).reset_index(drop = True) percentagechangearray = [] for i in range(len(prices_stock_filtered["date"]) -1): percentagechange = ((prices_stock_filtered["close"][i+1] - prices_stock_filtered["close"][i])/prices_stock_filtered["close"][i]) * 100 percentagechangearray.append(percentagechange) percentagechangedict[stock] = percentagechangearray return json.dumps(percentagechangedict) ######################################################################## Create the server ####################################################################################
[ "noreply@github.com" ]
junwei2110.noreply@github.com
7f71c3af60ff399691ae55eec2ba7d149fab3c92
b011f0ee2ba2867009002ef75baaf38fd2e76988
/app.py
3cf7b2bb3423928ac967cf8c2a3c494876dc7d6d
[]
no_license
praj98/stores
39511f82919bfdc8d8dccf0adc0341fc0ad33258
88db7c7c8d1ccf6c34c23b45750bf4d7c52179ea
refs/heads/master
2022-11-19T21:08:38.292652
2020-07-26T08:27:26
2020-07-26T08:27:26
282,437,351
0
0
null
null
null
null
UTF-8
Python
false
false
811
py
import os from flask import Flask from flask_restful import Api from flask_jwt import JWT from security import authenticate,identity from resources.user import UserRegister from resources.item import Item,ItemList from resources.store import Store,StoreList app=Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS']=False app.config['SQLALCHEMY_DATABASE_URI']= os.environ.get('DATABASE_URL','sqlite:///data.db') app.secret_key='piyush' api=Api(app) jwt=JWT(app,authenticate,identity) api.add_resource(Item,'/item/<string:name>') api.add_resource(Store,'/store/<string:name>') api.add_resource(ItemList,'/items') api.add_resource(StoreList,'/stores') api.add_resource(UserRegister,'/register') if __name__=='__main__': from db import db db.init_app(app) app.run(port=5000,debug=True)
[ "praj98730@gmail.com" ]
praj98730@gmail.com
86fb14e94de8d3da034ce2c0d09d245930c16388
e6a404fcf62c9b84a46acb06617dfeef2b98a454
/apps/organization/adminx.py
e094c703621b3671e399372afb02680b521a30a6
[]
no_license
flyerooo/djangomuke4
c3335a9a59d5b59633b18762e53b28b164d35f8f
ee1f613dd449d71dc61ca21a293691b0a91146b3
refs/heads/master
2021-01-25T08:12:38.868937
2019-03-09T06:11:50
2019-03-09T06:11:50
93,725,836
0
0
null
null
null
null
UTF-8
Python
false
false
1,020
py
# -*- coding:utf-8 -*- __author__ = 'Jeff' __date__ = '2017/6/9 7:34' import xadmin from .models import CityDict,CourseOrg,Teacher class CityDictAdmin(object): list_display = ['name','desc','add_time'] search_fields = ['name'] list_filter = ['name','add_time'] class CourseOrgAdmin(object): list_display = ['name','desc','click_nums','fav_nums','image','address','city'] search_fields = ['name','click_nums','fav_nums','address'] list_filter = ['name','city__name'] class TeacherAdmin(object): list_display = ['org','name','work_years','work_company','work_position','points','fav_nums','click_nums','add_time'] search_fields = ['org','name','work_years','work_company','work_position','points','fav_nums','click_nums'] list_filter = ['org__name','name','work_years','work_company','work_position','points','fav_nums','click_nums','add_time'] xadmin.site.register(CityDict,CityDictAdmin) xadmin.site.register(CourseOrg,CourseOrgAdmin) xadmin.site.register(Teacher,TeacherAdmin)
[ "wxl.pure@gmail.com" ]
wxl.pure@gmail.com
45ae26fef5d6851a9535a15605db7aeeb2b147d4
868d1bd002a66bce3f86054b00a69c49f285126f
/books/01.DeepLearningScratch/chapter02/03.ArrayNAND/NAND.py
8acd7a67999200afe6a3474142a9baac393a00ea
[]
no_license
doukheeWon-gmail/DeepLearningStudy
cf81ac5867373c8028519133a1cca80024f8f0ff
d346d0572c45e2f2229bd14e5aadeb077074ffa9
refs/heads/master
2023-03-16T19:05:49.594092
2021-03-08T09:03:46
2021-03-08T09:03:46
null
0
0
null
null
null
null
UTF-8
Python
false
false
210
py
#coding: utf-8 import numpy as np def NAND(x1, x2): x = np.array([x1, x2]) w = np.array([-0.5, -0.5]) b = 0.7 tmp = np.sum(x*w) + b if tmp <= 0: return 0 else: return 1
[ "fain9301@yahoo.com" ]
fain9301@yahoo.com
6b651a0f10ff1c3e3a42e19f9bdb66378032cac2
d9286ee4b057d82fbc62680bd617e1fbdb0a1bee
/opp.py
2afc6bfec35796e94dac2cc2748398666cb99970
[]
no_license
N-biswas000/python-projects
8ea9d531cce88e7c564513825180d384df371a8b
6549d0435634fbad363bdab2b5601a36f2350c6f
refs/heads/master
2022-11-22T08:16:56.992416
2020-07-26T11:35:24
2020-07-26T11:35:24
282,635,962
0
0
null
null
null
null
UTF-8
Python
false
false
395
py
# ------class Demo-------- class Person: #define the class def __init__(self, first_name, last_name, age): #<--------define constructor/ INIT Method self.first_name=first_name #<-----------define Attributes/ Instance Variables self.last_name=last_name self.age=age # ------Create Object----- p1=Person('Niladri', 'Biswas', 22) p2=Person('Akash','Biswas', 21)
[ "57007026+N-biswas000@users.noreply.github.com" ]
57007026+N-biswas000@users.noreply.github.com
2ac22ac91a55e7ed607fe0b001a82138c6c48259
728e57a80995d7be98d46295b780d0b433c9e62a
/src/data_manager/chromeos/chromeos_data_manager_test.gyp
f3f68fa55d884dc5648b0450effc6b201f86ff40
[ "Apache-2.0", "MIT", "BSD-3-Clause", "GPL-1.0-or-later" ]
permissive
SNQ-2001/Mozc-for-iOS
7936bfd9ff024faacfd2d96af3ec15a2000378a1
45b0856ed8a22d5fa6b4471548389cbde4abcf10
refs/heads/master
2023-03-17T22:19:15.843107
2014-10-04T05:48:29
2014-10-04T05:48:42
574,371,060
0
0
Apache-2.0
2022-12-05T06:48:07
2022-12-05T06:48:06
null
UTF-8
Python
false
false
3,276
gyp
# Copyright 2010-2014, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. { 'variables': { 'relative_dir': 'data_manager/chromeos', 'relative_mozc_dir': '', 'gen_out_dir': '<(SHARED_INTERMEDIATE_DIR)/<(relative_dir)', 'gen_out_mozc_dir': '<(SHARED_INTERMEDIATE_DIR)/<(relative_mozc_dir)', }, 'targets': [ { 'target_name': 'chromeos_data_manager_test', 'type': 'executable', 'sources': [ 'chromeos_data_manager_test.cc', ], 'dependencies': [ '../../testing/testing.gyp:gtest_main', '../data_manager_test.gyp:data_manager_test_base', 'chromeos_data_manager.gyp:chromeos_data_manager', 'chromeos_data_manager.gyp:gen_chromeos_segmenter_inl_header#host', ], 'variables': { 'test_size': 'small', }, 'copies': [ { 'destination': '<(mozc_data_dir)/data/dictionary_chromeos/', 'files': [ '<(gen_out_dir)/connection_single_column.txt', '../../data/dictionary_chromeos/dictionary00.txt', '../../data/dictionary_chromeos/dictionary01.txt', '../../data/dictionary_chromeos/dictionary02.txt', '../../data/dictionary_chromeos/dictionary03.txt', '../../data/dictionary_chromeos/dictionary04.txt', '../../data/dictionary_chromeos/dictionary05.txt', '../../data/dictionary_chromeos/dictionary06.txt', '../../data/dictionary_chromeos/dictionary07.txt', '../../data/dictionary_chromeos/dictionary08.txt', '../../data/dictionary_chromeos/dictionary09.txt', '../../data/dictionary_chromeos/suggestion_filter.txt' ], }, ], }, ], }
[ "kishikawakatsumi@mac.com" ]
kishikawakatsumi@mac.com
1c4f0a726de47474de20f4802d34d0cf92310207
7dd9a7c5316dc4ae1b9524f214928b9c96c03dc3
/BlocksToCpp/archive/blockread.py
75fbb29779685f76ee6577151e6402374373dda9
[]
no_license
NVSL/TAZI
035ae70b7fc9d424a47ae79434ff888880c87a6c
ac7887b1491a1c8af7373a42cd7fb81284acc396
refs/heads/master
2021-01-18T09:39:20.984516
2017-04-13T19:14:44
2017-04-13T19:14:44
44,073,629
3
0
null
null
null
null
UTF-8
Python
false
false
515
py
import ast tree = ast.parse('block.py') lines = [None] + code.splitlines() # None at [0] so we can index lines from 1 test_namespace = {} for node in tree.body: wrapper = ast.Module(body=[node]) try: co = compile(wrapper, "<ast>", 'exec') exec(co, test_namespace) except AssertionError: print("Assertion failed on line", node.lineno, ":") print(lines[node.lineno]) # If the error has a message, show it. if e.args: print(e) print()
[ "pnquach@ucsd.edu" ]
pnquach@ucsd.edu
d65262375ebe4019c8b19ba47acfafc247be6636
2c018d24dcc4e25c139feb70975e8715c121a845
/napari/utils/__init__.py
234d7ba3f8dc337865b3bb8a1a3a45963ce7448a
[ "BSD-3-Clause" ]
permissive
justinelarsen/napari
a7bb3eb03ff958f4cb0c96209af9c9bbd12dfc06
f9d3c9dadbc79ba47d9e500e2f848dac19f4051e
refs/heads/master
2020-09-09T20:28:55.504142
2020-04-16T15:36:45
2020-04-16T15:36:45
221,559,928
0
0
BSD-3-Clause
2019-11-13T22:02:07
2019-11-13T22:02:06
null
UTF-8
Python
false
false
42
py
from .info import sys_info, citation_text
[ "sofroniewn@gmail.com" ]
sofroniewn@gmail.com
3e0e00016d668e162bb554940ef36041d2e0aa1f
85beadc8b8c79715cb06f0f3ac4ee07091cd6f96
/chap4/4-23.py
924a196208c8c8b2169cdd88eb684b63dbd666c4
[]
no_license
CHENTHIRTEEN/PTA-PYTHON
7547dbf611471442a6cf8b8258abf73317910b21
f3d665cb894741aacc54fa1623e386016bb47a6f
refs/heads/master
2023-01-28T17:00:59.615166
2020-12-14T02:24:07
2020-12-14T02:24:07
321,207,127
4
0
null
null
null
null
UTF-8
Python
false
false
441
py
m, n = map(int, input().split()) nums = [] flag = True for i in range(m): num = list(map(int, input().split())) nums.append(num) for i in range(1, m - 1): for j in range(1, n - 1): if nums[i][j] > nums[i - 1][j] and nums[i][j] > nums[i + 1][j] and nums[i][j] > nums[i][j - 1] and nums[i][j] > nums[i][j + 1]: print(f'{nums[i][j]} {i + 1} {j + 1}') flag = False if flag: print(f'None {m} {n}')
[ "1030339437@qq.com" ]
1030339437@qq.com
af3b9c935f5c7c8d15341e50ce6ea6600933211e
06a7dc7cc93d019e4a9cbcf672b23a0bbacf8e8b
/2016_AUSZ/2017/sept_2017/Freesurfer/regression_patients.py
bd91b200cca427d3f4c0e40ad72e1b67ea479128
[]
no_license
neurospin/scripts
6c06cd218a5f32de9c3c2b7d1d8bda3f3d107458
f14a2c9cf2cd7f5fbea767b017c3faf36d170bdb
refs/heads/master
2021-07-11T22:55:46.567791
2021-07-02T13:08:02
2021-07-02T13:08:02
10,549,286
2
2
null
null
null
null
UTF-8
Python
false
false
2,030
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Jan 20 16:44:25 2017 @author: ad247405 """ import os import json import numpy as np from sklearn.cross_validation import StratifiedKFold import nibabel as nib from sklearn import linear_model from sklearn.metrics import precision_recall_fscore_support from scipy.stats import binom_test from collections import OrderedDict from sklearn import preprocessing from sklearn.metrics import roc_auc_score from sklearn import datasets from sklearn import linear_model import matplotlib.pyplot as plt import sklearn from scipy import stats import array_utils import nilearn from nilearn import plotting from nilearn import image import array_utils WD = '/neurospin/brainomics/2016_AUSZ/september_2017/results/Freesurfer/linear_regression_patients_only' INPUT_DATA_X = '/neurospin/brainomics/2016_AUSZ/september_2017/results/Freesurfer/data/X_patients_only.npy' INPUT_DATA_y = '/neurospin/brainomics/2016_AUSZ/september_2017/results/Freesurfer/data/y_patients_only.npy' INPUT_MASK_PATH = '/neurospin/brainomics/2016_AUSZ/september_2017/results/Freesurfer/data/mask.npy' ################################################################################## penalty_start = 3 ################################################################################## n_folds = 5 X = np.load(INPUT_DATA_X) y = np.load(INPUT_DATA_y) lr = linear_model.LinearRegression() # cross_val_predict returns an array of the same size as `y` where each entry # is a prediction obtained by cross validation: pred = sklearn.cross_validation.cross_val_predict(lr,X , y, cv=n_folds) slope, intercept, r_value, p_value, std_err = stats.linregress(y, pred) plt.plot(y, pred, 'o', label='original data') plt.plot(y, intercept + slope*y, 'r', label='fitted line') plt.xlabel("True") plt.ylabel("Predicted") plt.legend() plt.show() #Obain coef map lr = linear_model.LinearRegression() lr.fit(X,y) beta = lr.coef_ beta = beta[penalty_start:] np.save(os.path.join(WD,"weight_map.npy"),beta)
[ "ad247405@is222241.intra.cea.fr" ]
ad247405@is222241.intra.cea.fr
d35f1befcd4418d036bcbd94991b06ee61b0f6d4
43e563e4514f362b628e3a38af734e36d65cfdbf
/conference_rooms_reservations/views.py
28be76383e2201811df0ab592f95fa30b8179f7d
[ "MIT" ]
permissive
MarcinSzyc/conf_contact_combo
434074f0bce9060eb4f43952f6211f8286a41b6d
b39f7b4798b1b04bd14bb6876dc107b31232bfe3
refs/heads/master
2022-05-15T13:25:43.317362
2019-05-25T08:56:23
2019-05-25T08:56:23
169,214,960
0
0
null
2022-04-22T21:08:52
2019-02-05T09:16:10
CSS
UTF-8
Python
false
false
6,730
py
from django.shortcuts import render, redirect from .forms import NewRoomForm, NewReservationForm from django.contrib import messages from django.views.generic import View from .models import Room, Reservation from datetime import datetime from warsztat.mixins import MessageReturnMixin from django.core.paginator import Paginator # Conference reservation main page view class Layout(View): def get(self, request): return redirect('conference_rooms_reservations:all_rooms') # Class view to show Add Room form and to accept data class AddRoom(MessageReturnMixin, View): login_url = '/conf_rooms_reservations/address/' form_class = NewRoomForm template = 'conference_rooms_reservations/add_room_view.html' def post(self, request): full_form = self.form_class(request.POST) if full_form.is_valid(): full_form.save() messages.success(request, 'Room created successfully') else: messages.error(request, 'Room already exist!') return redirect('conference_rooms_reservations:all_rooms') def get(self, request): new_room_form = self.form_class return render(request, self.template, locals()) # Class view to show All Rooms class AllRooms(View): template = 'conference_rooms_reservations/all_rooms_view.html' def get(self, request): rooms = Room.objects.select_related().order_by('id') paginator = Paginator(rooms, 20) page = request.GET.get('page') all_rooms = paginator.get_page(page) date_now = datetime.now().date() reserved_today = [] for item in rooms: for reservation in item.reservation_set.all(): if reservation.date == date_now: reserved_today.append(item.name) return render(request, self.template, locals()) # Class view to process Delete Room request class DeleteRoom(MessageReturnMixin, View): login_url = '/conf_rooms_reservations/address/' def get(self, request, **kwargs): instance = Room.objects.get(pk=self.kwargs['id']) instance.delete() messages.error(request, 'Room deleted successfully') return redirect('conference_rooms_reservations:all_rooms') # Class view to show and process Modify Room request class ModifyRoom(MessageReturnMixin, View): login_url = '/conf_rooms_reservations/address/' template = 'conference_rooms_reservations/modify_room.html' form = NewRoomForm def get(self, request, id): instance = Room.objects.get(pk=id) filled_form = self.form(instance=instance) return render(request, self.template, locals()) def post(self, request, id): instance = Room.objects.get(pk=id) full_form = self.form(request.POST, instance=instance) if full_form.is_valid(): full_form.save() messages.success(request, 'Room modified successfully') return redirect('conference_rooms_reservations:all_rooms') # Class view to show detailed info about Room class InfoView(View): template = 'conference_rooms_reservations/info_view.html' form = NewRoomForm def get(self, request, id): instance = Room.objects.get(pk=id) reservation = Reservation.objects.all().filter(room_id=id) return render(request, self.template, locals()) # Class view to show Room Reservation form class ReservationView(View): form_class = NewReservationForm template = 'conference_rooms_reservations/reservations.html' def get(self, request, id): initial = Room.objects.get(pk=id) new_reservation_form = self.form_class(initial={'room': initial, 'date': datetime.today().date()}) return render(request, self.template, locals()) # Class view to process new Room Reservation class AddReservation(View): form_class = NewReservationForm def post(self, request): full_form = self.form_class(request.POST) if full_form.is_valid(): room_id = full_form.cleaned_data['room'].id all_reservations = [item.date for item in Reservation.objects.all().filter(room_id=room_id)] if full_form.cleaned_data["date"] in all_reservations: messages.error(request, 'Room already booked. Try different date or change room!') return redirect('conference_rooms_reservations:reserve_room_view', id=room_id) else: full_form.save() messages.success(request, 'Room reserved successfully') return redirect('conference_rooms_reservations:all_rooms') else: messages.error(request, f'Invalid data or date is in the past!') return redirect('conference_rooms_reservations:reserve_room_view', id=request.POST['room']) # Class view to show and process Room Search class RoomSearch(View): form_class_reservations = NewReservationForm form_class_room = NewRoomForm template = 'conference_rooms_reservations/room_search.html' global output def get(self, request): empty_reservations = self.form_class_reservations(initial={'date': datetime.today().date()}) empty_room = self.form_class_room return render(request, self.template, locals()) def post(self, request): control = True empty_reservations = self.form_class_reservations(initial={'date': datetime.today().date()}) empty_room = self.form_class_room room_name = request.POST.get('name') room_capacity = request.POST.get('capacity', default=0) date_day = int(request.POST.get('date_day')) date_month = int(request.POST.get('date_month')) date_year = int(request.POST.get('date_year')) room_date = datetime(year=date_year, day=date_day, month=date_month).date() room_projector = request.POST.get('projector') output = Room.objects.select_related() if room_name.upper() in [item.name.upper() for item in output]: output = output.filter(name=room_name.capitalize()) elif room_name == '': output = output else: messages.error(request, "Room with this name does not exist!!") if room_capacity is not None: output = output.filter(capacity__gte=room_capacity) if room_projector == 'on': output = output.filter(projector=True) else: output = output.filter(projector=False) for item in output: for reservation in item.reservation_set.all(): if reservation.date == room_date: output = output.exclude(name=item.name) return render(request, self.template, locals())
[ "szyc.marcin@gmail.com" ]
szyc.marcin@gmail.com
453b2bd72e519ea782b7f73c49b28f1c979a7b5e
182d1872479504c8d5e10740863fe15d45c89ce4
/common/__init__.py
f2eba4182ff60ae52f95a7a7008d860536bae2a5
[]
no_license
offtools/BLive
096a0b4c02042347e40d4f39039bbef0551e1e6c
275a469cb64224b5edfdf0d73f3adabb639db312
refs/heads/master
2020-04-06T06:39:46.068274
2015-02-24T19:43:05
2015-02-24T19:43:05
4,153,464
5
2
null
null
null
null
UTF-8
Python
false
false
1,437
py
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # Script copyright (C) 2012 Thomas Achtner (offtools) # Settings holds all common data for the addon. # They are added to bpy.data.windowmanger[...], because its # a unique stucture in the BlendData # import modules if "bpy" in locals(): print("imp.reload") import imp imp.reload(props) imp.reload(ops) imp.reload(ui) else: from . import props from . import ops from . import ui pass def register(): print("common.register") props.register() ops.register() ui.register() def unregister(): print("common.unregister") ui.unregister() ops.unregister() props.unregister()
[ "info@offtools.de" ]
info@offtools.de
6fca64a778402ddeb17a1f69f5242ee7ebae89da
b5a4d29942efcbec4a645d5f513873d4d67e6458
/test1.py
cec57044f1d37547a6c92779b3ef7ea59f153168
[ "MIT", "LicenseRef-scancode-proprietary-license" ]
permissive
starcraftvs/DeepHomography
238c52d7b7d78dee6a91759c67bed9d83d65556c
7a3a063402d35e52db34f3f18a687fda33817b15
refs/heads/master
2023-05-31T21:37:27.815461
2021-07-08T02:21:53
2021-07-08T02:21:53
383,973,899
0
0
MIT
2021-07-08T02:19:25
2021-07-08T02:02:11
null
UTF-8
Python
false
false
8,489
py
# coding: utf-8 import argparse import torch from torch.utils.data import DataLoader import torch.nn as nn import imageio from torch_homography_model import build_model from dataset import * from utils import transformer as trans import os import numpy as np def geometricDistance(correspondence, h): """ Correspondence err :param correspondence: Coordinate :param h: Homography :return: L2 distance """ p1 = np.transpose(np.matrix([correspondence[0][0], correspondence[0][1], 1])) estimatep2 = np.dot(h, p1) estimatep2 = (1/estimatep2.item(2))*estimatep2 p2 = np.transpose(np.matrix([correspondence[1][0], correspondence[1][1], 1])) error = p2 - estimatep2 return np.linalg.norm(error) def create_gif(image_list, gif_name, duration=0.35): frames = [] for image_name in image_list: frames.append(image_name) imageio.mimsave(gif_name, frames, 'GIF', duration=0.5) return def test(args): RE = ['0000011', '0000016', '00000147', '00000155', '00000158', '00000107', '00000239', '0000030'] LT = ['0000038', '0000044', '0000046', '0000047', '00000238', '00000177', '00000188', '00000181'] LL = ['0000085', '00000100', '0000091', '0000092', '00000216', '00000226'] SF = ['00000244', '00000251', '0000026', '0000034', '00000115'] LF = ['00000104', '0000031', '0000035', '00000129', '00000141', '00000200'] MSE_RE = [] MSE_LT = [] MSE_LL = [] MSE_SF = [] MSE_LF = [] exp_name = os.path.abspath(os.path.join(os.path.dirname("__file__"), os.path.pardir)) work_dir = os.path.join(exp_name, 'Data') pair_list = list(open(os.path.join(work_dir, 'Test1.txt'))) #npy_path = os.path.join(work_dir, 'Coordinate/') result_name = "exp_result_Oneline-FastDLT" result_files = os.path.join(exp_name, result_name) if not os.path.exists(result_files): os.makedirs(result_files) result_txt = "result_ours_exp.txt" res_txt = os.path.join(result_files, result_txt) f = open(res_txt, "w") net = build_model(args.model_name, pretrained=args.pretrained) if args.finetune == True: model_path = os.path.join(exp_name, 'models/freeze-mask-first-fintune.pth') print(model_path) state_dict = torch.load(model_path, map_location='cpu') # create new OrderedDict that does not contain `module.` from collections import OrderedDict new_state_dict = OrderedDict() for k, v in state_dict.state_dict().items(): namekey = k[7:] # remove `module.` new_state_dict[namekey] = v # load params net = build_model(args.model_name) model_dict = net.state_dict() new_state_dict = {k: v for k, v in new_state_dict.items() if k in model_dict.keys()} model_dict.update(new_state_dict) net.load_state_dict(model_dict) net = torch.nn.DataParallel(net) if torch.cuda.is_available(): net = net.cuda() M_tensor = torch.tensor([[args.img_w/ 2.0, 0., args.img_w/ 2.0], [0., args.img_h / 2.0, args.img_h / 2.0], [0., 0., 1.]]) if torch.cuda.is_available(): M_tensor = M_tensor.cuda() M_tile = M_tensor.unsqueeze(0).expand(1, M_tensor.shape[-2], M_tensor.shape[-1]) # Inverse of M M_tensor_inv = torch.inverse(M_tensor) M_tile_inv = M_tensor_inv.unsqueeze(0).expand(1, M_tensor_inv.shape[-2], M_tensor_inv.shape[-1]) test_data = TestDataset(data_path=exp_name, patch_w=args.patch_size_w, patch_h=args.patch_size_h, rho=16, WIDTH=args.img_w, HEIGHT=args.img_h) test_loader = DataLoader(dataset=test_data, batch_size=1, num_workers=0, shuffle=False, drop_last=True) print("start testing") net.eval() for i, batch_value in enumerate(test_loader): img_pair = pair_list[i] pari_id = img_pair.split(' ') npy_name = pari_id[0].split('/')[1] + '_' + pari_id[1].split('/')[1][:-1] + '.npy' #npy_id = npy_path + npy_name video_name = img_pair.split('/')[0] org_imges = batch_value[0].float() input_tesnors = batch_value[1].float() patch_indices = batch_value[2].float() h4p = batch_value[3].float() print_img_1 = batch_value[4] print_img_2 = batch_value[5] print_img_1_d = print_img_1.cpu().detach().numpy()[0, ...] print_img_2_d = print_img_2.cpu().detach().numpy()[0, ...] print_img_1_d = np.transpose(print_img_1_d, [1, 2, 0]) print_img_2_d = np.transpose(print_img_2_d, [1, 2, 0]) if torch.cuda.is_available(): input_tesnors = input_tesnors.cuda() patch_indices = patch_indices.cuda() h4p = h4p.cuda() print_img_1 = print_img_1.cuda() batch_out = net(org_imges, input_tesnors, h4p, patch_indices) H_mat = batch_out['H_mat'] output_size = (args.img_h, args.img_w) H_point = H_mat.squeeze(0) H_point = H_point.cpu().detach().numpy() H_point = np.linalg.inv(H_point) H_point = (1.0 / H_point.item(8)) * H_point # print(H_point) # point_dic = np.load(npy_id, allow_pickle=True) # data = point_dic.item() # err_img = 0.0 # for j in range(6): # points_LR = data['matche_pts'][j] # points_RL = [points_LR[1], points_LR[0]] # err_LR = geometricDistance(points_LR, H_point) # because of the order of the Coordinate of img_A and img_B is inconsistent # err_RL = geometricDistance(points_RL, H_point) # the data annotator has no fixed left or right when labelling # err = min(err_LR, err_RL) # err_img += err # err_avg = err_img / 6 # name = "0"*(8-len(str(i)))+str(i) # line = name + ":" + str(err_avg)+"\n" # f.write(line) # print("{}:{}".format(i, err_avg)) # if video_name in RE: # MSE_RE.append(err_avg) # elif video_name in LT: # MSE_LT.append(err_avg) # elif video_name in LL: # MSE_LL.append(err_avg) # elif video_name in SF: # MSE_SF.append(err_avg) # elif video_name in LF: # MSE_LF.append(err_avg) H_mat = torch.matmul(torch.matmul(M_tile_inv, H_mat), M_tile) pred_full, _ = trans(print_img_1, H_mat, output_size) # pred_full = warped imgA pred_full = pred_full.cpu().detach().numpy()[0, ...] pred_full = pred_full.astype(np.uint8) pred_full = cv2.cvtColor(pred_full, cv2.COLOR_BGR2RGB) print_img_1_d = cv2.cvtColor(print_img_1_d, cv2.COLOR_BGR2RGB) print_img_2_d = cv2.cvtColor(print_img_2_d, cv2.COLOR_BGR2RGB) input_list = [print_img_1_d, print_img_2_d] output_list = [pred_full, print_img_2_d] name=pari_id[0].split('/')[0] print(os.path.join(result_files, name+"_input_["+result_name+"].gif")) create_gif(input_list, os.path.join(result_files, name+"_input_["+result_name+"].gif")) create_gif(output_list, os.path.join(result_files, name+ "_output_[" + result_name + "].gif")) MSE_RE_avg = np.mean(MSE_RE) MSE_LT_avg = np.mean(MSE_LT) MSE_LL_avg = np.mean(MSE_LL) MSE_SF_avg = np.mean(MSE_SF) MSE_LF_avg = np.mean(MSE_LF) res = {'RE':MSE_RE_avg, 'LT':MSE_LT_avg, 'LL':MSE_LL_avg, 'SF':MSE_SF_avg, 'LF':MSE_LF_avg} print(res) f.write(str(res)) return res if __name__=="__main__": parser = argparse.ArgumentParser() parser.add_argument('--gpus', type=int, default=4, help='Number of splits') parser.add_argument('--cpus', type=int, default=10, help='Number of cpus') parser.add_argument('--img_w', type=int, default=640) parser.add_argument('--img_h', type=int, default=360) parser.add_argument('--patch_size_h', type=int, default=315) parser.add_argument('--patch_size_w', type=int, default=560) parser.add_argument('--batch_size', type=int, default=1) parser.add_argument('--lr', type=float, default=1e-9, help='learning rate') parser.add_argument('--model_name', type=str, default='resnet34') parser.add_argument('--pretrained', type=bool, default=False, help='Use pretrained waights?') parser.add_argument('--finetune', type=bool, default=True, help='Use pretrained waights?') print('<==================== Loading data ===================>\n') args = parser.parse_args() print(args) test(args)
[ "noreply@github.com" ]
starcraftvs.noreply@github.com
602962b9e0b1bcfc8ecc98d8c98ab4638a415180
003cc915904585701d3cefa4c603c2e7ab86155f
/libraryapp/views/libraries/form.py
0226ac5b5ea4dd9f08718cea5fa2c4ead5b0a61d
[]
no_license
dhobson21/Library-Project
97f243d5ad00554f115d50dde614c5e4bb8931cb
676939ff680d225b2dd667a03b50102ddcd4354c
refs/heads/master
2023-04-26T23:42:12.111779
2019-09-23T15:05:58
2019-09-23T15:05:58
209,570,690
0
0
null
2023-04-21T20:37:55
2019-09-19T14:09:28
Python
UTF-8
Python
false
false
314
py
import sqlite3 from django.shortcuts import render from django.contrib.auth.decorators import login_required from ..connection import Connection @login_required def library_form(request): if request.method == "GET": template ='libraries/form.html' return render(request, template, {})
[ "dhobson21@gmail.com" ]
dhobson21@gmail.com
3254ceb99ef503583373c9a64c646a2bfc0e7fe1
c95696ab64eaad2515259fb45f1dd2bca31f33db
/BoW/BoW_train.py
4a963f58047e7cecfc3ccec11156a09d2dbede8c
[]
no_license
Condor-G/CVwork-FashionMnist
5af3f161d99d8074e7f78a14e0197f5bbbf243d0
f253f5dfa227a14055f0557a1baed72660774da9
refs/heads/master
2022-09-10T02:31:28.258425
2020-05-25T02:10:09
2020-05-25T02:10:09
266,652,928
1
0
null
null
null
null
UTF-8
Python
false
false
2,359
py
import warnings warnings.filterwarnings("ignore") import os import cv2 import pickle import numpy as np import matplotlib.pyplot as plt from imutils import paths from sklearn.cluster import KMeans from scipy.cluster.vq import vq from sklearn.svm import LinearSVC from sklearn.model_selection import train_test_split from tensorflow.examples.tutorials.mnist import input_data import time print(time.asctime( time.localtime(time.time()) )) start_time = time.time() mnist = input_data.read_data_sets('../data') x_train = mnist.train.images y_train = mnist.train.labels sifts_img = [] # 存放所有图像的文件名和sift特征 limit = 10000 # 最大训练个数 #limit = y_train.size count = 0 # 词袋特征个数 num = 0 # 有效个数 label = [] for i in range(limit): img = x_train[i].reshape(28,28) img = np.uint8(np.double(img) * 255) sift = cv2.xfeatures2d.SIFT_create() kp,des = sift.detectAndCompute(img,None) if des is None: continue sifts_img.append(des) label.append(y_train[i]) count = count + des.shape[0] num = num + 1 label = np.array(label) data = sifts_img[0] for des in sifts_img[1:]: data = np.vstack((data, des)) print("train file:",num) count = int(count / 40) count = max(4,count) # 对sift特征进行聚类 k_means = KMeans(n_clusters=int(count), n_init=4) k_means.fit(data) # 构建所有样本的词袋表示 image_features = np.zeros([int(num),int(count)],'float32') for i in range(int(num)): ws, d = vq(sifts_img[i],k_means.cluster_centers_)# 计算各个sift特征所属的视觉词汇 for w in ws: image_features[i][w] += 1 # 对应视觉词汇位置元素加1 x_tra, x_val, y_tra, y_val = train_test_split(image_features,label,test_size=0.2) # 构建线性SVM对象并训练 clf = LinearSVC(C=1, loss="hinge").fit(x_tra, y_tra) # 训练数据预测正确率 print (clf.score(x_val, y_val)) end_time = time.time() print("Execution Time: ", int(end_time - start_time),'s') # save the training model as pickle with open('bow_kmeans.pickle','wb') as fw: pickle.dump(k_means,fw) with open('bow_clf.pickle','wb') as fw: pickle.dump(clf,fw) with open('bow_count.pickle','wb') as fw: pickle.dump(count,fw) print('Trainning successfully and save the model')
[ "935532831@qq.com" ]
935532831@qq.com
b4b948cb7e7c4cfe8969c2fd72c9ffa6a8477e37
65139c1612261f4b26f3aaf0df4e4b3485c1e0a3
/pep_dpr_boxpol.py
9a6b7fac0ad7b441145885e2e825722923bfa8b3
[]
no_license
cycle13/gpm
449be99a97bfa888068905e1474aae0905031a5d
79c28c4d16fa8bea6b23f05c52cba22f8ec4fdd9
refs/heads/master
2022-01-26T00:06:11.519143
2019-08-20T05:28:02
2019-08-20T05:28:02
null
0
0
null
null
null
null
UTF-8
Python
false
false
8,097
py
'''Dieses Program soll dazu dienen die Radardaten von BoxPol mit den GPM Daten hinsichtlich der Reflektivitat zu validieren. Hier werden mehrere Ueberflug analysiert''' #!/usr/bin/env python import h5py import numpy as np import matplotlib.pyplot as plt import pandas as pd import wradlib import glob import math import pandas as pd from scipy import stats # ftp://ftp.meteo.uni-bonn.de/pub/pablosaa/gpmdata/ import matplotlib.cm as cm my_cmap = cm.get_cmap('jet',40) my_cmap.set_under('lightgrey') my_cmap.set_over('darkred') from pcc import get_miub_cmap as my_cmap from pcc import plot_radar from pcc import boxpol_pos from pcc import plot_borders import wradlib as wrl from osgeo import osr Pos = boxpol_pos() blon0, blat0 = Pos['lon_ppi'], Pos['lat_ppi'] bbx, bby = Pos['gkx_ppi'], Pos['gky_ppi'] # Pfad mit String # --------------- # Hohe von DPR TH = 18 #Threshold um Nullen fuer Niederschlag raus zu filtern ipoli = [wradlib.ipol.Idw, wradlib.ipol.Linear, wradlib.ipol.Nearest, wradlib.ipol.OrdinaryKriging] offset = 2 #ZP = '20141007023744' ; pfadnr=0# 0.47 #ZP = '20140826220500'; pfadnr=1 # 0.82 #ZP = '20141008094000'; pfadnr=1 # 0.82 #!!!!!!!!!!!!!!NICE #ZP = '20141008094500'; pfadnr=1 # 0.679 #!!!!!!!!!!!!!!NICE #ZP = '20150128171500'; pfadnr=0 #0.28 #ZP = '20150128172208'; pfadnr=0#0.321 #ZP = '20160209103500'; pfadnr=1 # 0.23 #ZP = '20151216024501'; pfadnr=0#0.589 #ZP = '20151216023500' ; pfadnr=0# 0.651 ZP = '20160209103000'; pfadnr=1 ###PFAD=1 year = ZP[0:4] m = ZP[4:6] d = ZP[6:8] ht = ZP[8:10] mt = ZP[10:12] st = ZP[12:14] pfad_radar = glob.glob('/automount/ags/velibor/gpmdata/dpr/2A.GPM.DPR.V6-20160118.' + year + m + d + '*.HDF5') print pfad_radar pfad_radar = pfad_radar[pfadnr] #pfad_radar_Ku = pfad_radar[0] deg_scan = ["/ppi_1p5deg/","/ppi_2p4deg/","/ppi_3p4deg/", "/n_ppi_010deg/","/n_ppi_045deg/", "/n_ppi_082deg/","/n_ppi_110deg/","/n_ppi_140deg/", "/n_ppi_180deg/","/n_ppi_280deg/","/n_vertical_scan/"][0] try: ppi_datapath=glob.glob('/automount/radar-archiv/scans/' + year+ "/" + year +"-"+ m + "/" + year+ "-" + m +"-"+ d + deg_scan+ year + "-" + m +"-"+ d + "--" +ht +":"+mt+":"+st+",*.mvol") print ppi_datapath ppi_datapath = ppi_datapath[0] except: ppi_datapath=glob.glob('/automount/radar/scans/' + year+ "/" + year +"-"+ m + "/" + year+ "-" + m +"-"+ d + deg_scan+ year + "-" + m +"-"+ d + "--" +ht +":"+mt+":"+st+",*.mvol") print ppi_datapath ppi_datapath = ppi_datapath[0] # PPI BoxPol Daten einlesen #--------------------------- ppi=h5py.File(ppi_datapath,'r') data, attrs = wradlib.io.read_gamic_hdf5(ppi_datapath) ZH0 = data['SCAN0']['ZH']['data'] PHIDP = data['SCAN0']['PHIDP']['data'] r = attrs['SCAN0']['r'] az = attrs['SCAN0']['az'] lon_ppi = attrs['VOL']['Longitude'] lat_ppi = attrs['VOL']['Latitude'] alt_ppi = attrs['VOL']['Height'] rho = data['SCAN0']['RHOHV']['data'] R = ZH0 R[151:165]=np.nan """ print ("________ATTCORR______") pia_harrison = wrl.atten.correctAttenuationHB( R, coefficients = dict(a=4.57e-5, b=0.731, l=1.0), mode="warn", thrs=59.) pia_harrison[pia_harrison > 4.8] = 4.8 print ("________ATTCORR2______") R = R + pia_harrison""" print ("________CLUTTER______") rho_th = 0.85 R[rho<= rho_th] = np.nan################WARUM GEHT DAS NICHT ? print ("________ofset______") print ("________beambl.______") #R = (R0 + R1)/2. # DPR Einlesen # ------------ gpmku = h5py.File(pfad_radar, 'r') gpmku_HS = gpmku['NS']['SLV'] dpr_lat = np.array(gpmku['NS']['Latitude']) #(7934, 24) dpr_lon = np.array(gpmku['NS']['Longitude']) #(7934, 24) dpr_pp = np.array(gpmku_HS['zFactorCorrectedNearSurface']) dpr_pp[dpr_pp < 0] = np.nan # Koordinaten Projektion # ------------------ proj_stereo = wrl.georef.create_osr("dwd-radolan") proj_wgs = osr.SpatialReference() proj_wgs.ImportFromEPSG(4326) #from pcc import boxpol_pos #bonn_pos = boxpol_pos() dpr_lon, dpr_lat = wradlib.georef.reproject(dpr_lon, dpr_lat, projection_target=proj_stereo , projection_source=proj_wgs) blon, blat = wradlib.georef.reproject(blon0, blat0, projection_target=proj_stereo , projection_source=proj_wgs) print(np.nanmin(blon), np.nanmax(blon)) print(np.nanmin(blat), np.nanmax(blat)) # Dpr zuschneiden #----------------- lon0, lat0, radius = blon, blat, 100 rr = np.sqrt((dpr_lat - lat0)**2 + (dpr_lon - lon0)**2) position = rr < radius pp = dpr_pp.copy() pp[np.where(rr > radius)] = np.nan from wradlib.trafo import idecibel from wradlib.trafo import decibel R = idecibel(R) radar_location = (lon_ppi, lat_ppi, alt_ppi) elevation = 1.5 azimuths = az ranges = r polargrid = np.meshgrid(ranges, azimuths) lon, lat, alt = wradlib.georef.polar2lonlatalt_n(polargrid[0], polargrid[1], elevation, radar_location) lon, lat = wradlib.georef.reproject(lon, lat, projection_target=proj_stereo , projection_source=proj_wgs) grid_xy = np.vstack((dpr_lon.ravel(), dpr_lat.ravel())).transpose() xy=np.concatenate([lon.ravel()[:,None],lat.ravel()[:,None]], axis=1) gridded = wradlib.comp.togrid(xy, grid_xy, ranges[-1], np.array([lon.mean(), lat.mean()]), R.ravel(), ipoli[0],nnearest=80,p=2) gridded = np.ma.masked_invalid(gridded).reshape(dpr_lon.shape) gridded[np.where(rr > radius)]=np.nan R = decibel(R) gridded = decibel(gridded) fig = plt.figure(figsize=(14,12)) fig.suptitle('BoXPol vs DPR '+ZP+' Rho_th: '+str(rho_th)) ################### ax1 = fig.add_subplot(221, aspect='auto') plt.pcolormesh(dpr_lon, dpr_lat,np.ma.masked_invalid(pp),vmin=0, vmax=40, cmap=my_cmap()) plt.colorbar() plot_borders(ax1) plot_radar(blon0, blat0, ax1, reproject=True, cband=False,col='black') plt.plot(dpr_lon[:,0],dpr_lat[:,0], color='black',lw=1) plt.plot(dpr_lon[:,-1],dpr_lat[:,-1], color='black',lw=1) plt.plot(dpr_lon[:,dpr_lon.shape[1]/2],dpr_lat[:,dpr_lon.shape[1]/2], color='black',lw=1, ls='--') plt.xlim(-350,-100) plt.ylim(-4350, -4100) plt.title('GPM - DPR') plt.tick_params( axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off') plt.grid() ax2 = fig.add_subplot(222, aspect='auto') plt.pcolormesh(dpr_lon, dpr_lat,np.ma.masked_invalid(gridded),vmin=0, vmax=40, cmap=my_cmap()) plt.colorbar() plot_borders(ax2) plot_radar(blon0, blat0, ax2, reproject=True, cband=False,col='black') plt.plot(dpr_lon[:,0],dpr_lat[:,0], color='black',lw=1) plt.plot(dpr_lon[:,-1],dpr_lat[:,-1], color='black',lw=1) plt.plot(dpr_lon[:,dpr_lon.shape[1]/2],dpr_lat[:,dpr_lon.shape[1]/2], color='black',lw=1, ls='--') plt.xlim(-350,-100) plt.ylim(-4350, -4100) plt.title('BoXPol - onDPR') plt.tick_params( axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off') plt.grid() ax3 = fig.add_subplot(223, aspect='auto') plt.pcolormesh(lon, lat,R,vmin=0, vmax=40, cmap=my_cmap()) plt.colorbar() plot_borders(ax3) plot_radar(blon0, blat0, ax3, reproject=True, cband=False,col='black') plt.plot(dpr_lon[:,0],dpr_lat[:,0], color='black',lw=1) plt.plot(dpr_lon[:,-1],dpr_lat[:,-1], color='black',lw=1) plt.plot(dpr_lon[:,dpr_lon.shape[1]/2],dpr_lat[:,dpr_lon.shape[1]/2], color='black',lw=1, ls='--') plt.xlim(-350,-100) plt.ylim(-4350, -4100) plt.title('BoXPol - DPR') plt.tick_params( axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off') plt.grid() ax4 = fig.add_subplot(224, aspect='auto') A, B = gridded.copy(), pp.copy() tt = np.nanmax([np.nanmin(A),np.nanmin(B)]) print ('The Threshold is: ', str(tt)) A[A<=tt]=np.nan B[B<=tt]=np.nan maske = ~np.isnan(A) & ~np.isnan(B) slope, intercept, r_value, p_value, std_err = stats.linregress(A[maske], B[maske]) line = slope * A +intercept plt.scatter(A[maske],B[maske], color='black') plt.hist2d(A[maske],B[maske], bins=10, cmap='PuBu') plt.title('r:'+str(round(r_value,3))+' + '+str(round(std_err,3))) plt.colorbar() plt.grid() #plt.savefig('/automount/ags/velibor/plot/boxpol/boxpol_vs_DPR/boxdpr_'+ZP) plt.show() # Plot # ----
[ "velibor.pejcic@gmx.de" ]
velibor.pejcic@gmx.de
79f39bbd63fb0447ed5f2aa649e85a0e310a66e9
567d2c777100179d8e823156c421c371df805588
/macros/leastsq_fit.py
8779620afa69f60d9f84878ea5758771126cea0f
[]
no_license
ManolisKar/Efield_multipoles
e18708dda73abc222411c802c08e2149f21bdd6d
37a5fc219234f7747716b6d2228b3df46b660b69
refs/heads/master
2022-05-26T19:48:07.344149
2022-05-19T16:43:48
2022-05-19T16:43:48
183,103,794
0
1
null
null
null
null
UTF-8
Python
false
false
849
py
import numpy as np def residual(pars, coordinates, V, err_V): pol0 = pars[0] pol1 = pars[1] pol2 = pars[2] pol3 = pars[3] x = coordinates[:,0] y = coordinates[:,1] z = coordinates[:,2] model = pol0 + pol1*x + pol2*y + pol3*z print 'model ::\n', model norm_residual = (V - model)/err_V print 'normalized residual ::\n', norm_residual return norm_residual # return (data-model) / eps_data from scipy.optimize import leastsq data = np.loadtxt('test.dat',comments='!') print 'data:\n', data #x=data[:,0] #y=data[:,1] #z=data[:,2] coordinates = data[:,0:3] print 'coordinates:\n', coordinates V=data[:,3] err_V=0.5 pars = [0.1, 1.2, 1.3, 0.9] print 'pars = ', pars residual(pars,coordinates,V,err_V) result = leastsq(residual, pars, args=(coordinates, V, err_V)) print 'Result :: ', result
[ "ekargian@fnal.gov" ]
ekargian@fnal.gov
478cb198b6020277b8687cfe9bc9bf469146f2e1
15af94958b27798ad1a8a7270594b3b80be98b3a
/Nextinfo/wsgi.py
205518f1d9b6f44a8339d0f4fe2049baad27e926
[ "MIT" ]
permissive
joe-diego/DjangoTutorialpythonClub
1dfeb12c3a7cdb2668bc82f78347cad9cb9aec21
cd90d14ec7b5776c35f079a30bae3a6a65c40be1
refs/heads/master
2021-01-10T09:54:09.325889
2016-01-18T17:48:34
2016-01-18T17:48:34
49,892,870
0
0
null
null
null
null
UTF-8
Python
false
false
393
py
""" WSGI config for Nextinfo project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Nextinfo.settings") application = get_wsgi_application()
[ "joediego@gmail.com" ]
joediego@gmail.com
898a48042b680099cbbdb0d349744b1439306132
0c642472361fee79d00c82aa53fc9a4b6911f4ee
/Lesson_4.py
c0b157e81bff18cfbb5ba61c64f6672e4f816825
[]
no_license
wh-debug/LearnNetwork
9fe8cd9fb3ffa9b68d0fd60d9953c4e5ce4a351a
9bb26e6b13d36255a83dc9ab3d54092148d0bc42
refs/heads/master
2023-08-01T21:50:48.649438
2021-09-24T12:17:24
2021-09-24T12:17:24
409,949,044
0
0
null
null
null
null
UTF-8
Python
false
false
565
py
magicans = ['alice', 'david', 'carolina'] for magican in magicans: print(magican.title()) print('\n') for magican in magicans: print(magican.upper()) print('\n') for magican in magicans: print(f"{magican.title()}, that was a great trick!") # 创建数字列表 numbers = list(range(1, 6)) print(numbers) squares = [value ** 3 for value in range(1, 11)] print(squares) # 可以被3整除的倍数 tempnumber = list(range(3, 30, 3)) print(tempnumber) # 使用切片 players = ['charles', 'martina', 'michael', 'florence', 'eli'] print(players[0:8])
[ "1813763848@qq.com" ]
1813763848@qq.com
1696148a136b8298f1574ee342018b92df3f8680
02c226ae22cb42901cbd5a0c796f6ef592fdc0a7
/characters.py
a89274bcb75118d3c5a42c672fae5b78e5c02880
[]
no_license
Drummerboy444/genetic-phrase-finder
6aa652ad53945d713b11234377d9f4c67af2672b
9fd277756dd0542ab624d16bca9b9e48146d99ce
refs/heads/master
2020-06-04T19:58:18.725974
2019-06-22T10:14:57
2019-06-22T10:14:57
192,170,944
0
0
null
null
null
null
UTF-8
Python
false
false
60
py
import string CHARACTERS = f'!?,. {string.ascii_letters}'
[ "am13571@my.bristol.ac.uk" ]
am13571@my.bristol.ac.uk
707a0a337221757acf70a22d09f82aab628b468b
e1864d3c68e06edbbe3b7f395b8c0ceeed72ae62
/[338]比特位计数.py
d8a562e0a6db78fbc0fcb9ec2ef646ad6bdd5392
[]
no_license
TianhengZhao/LeetCode
239c3b886c0ebe172f57b40c78eacddb27075577
301f9725777b8462bc64800b01f580fb7d9c711f
refs/heads/master
2023-03-05T15:30:49.126149
2021-02-21T14:00:06
2021-02-21T14:00:06
277,789,481
0
0
null
null
null
null
UTF-8
Python
false
false
1,475
py
# 给定一个非负整数 num。对于 0 ≤ i ≤ num 范围中的每个数字 i ,计算其二进制数中的 1 的数目并将它们作为数组返回。 # # 示例 1: # # 输入: 2 # 输出: [0,1,1] # # 示例 2: # # 输入: 5 # 输出: [0,1,1,2,1,2] # # 进阶: # # # 给出时间复杂度为O(n*sizeof(integer))的解答非常容易。但你可以在线性时间O(n)内用一趟扫描做到吗? # 要求算法的空间复杂度为O(n)。 # 你能进一步完善解法吗?要求在C++或任何其他语言中不使用任何内置函数(如 C++ 中的 __builtin_popcount)来执行此操作。 # # Related Topics 位运算 动态规划 # leetcode submit region begin(Prohibit modification and deletion) from typing import List class Solution: def countBits1(self, num: int) -> List[int]: """ O(n*sizeof(integer)) """ res = [0] * (num + 1) for i in range(num + 1): tmp = i while tmp: res[i] += tmp & 1 tmp >>= 1 return res def countBitsAns(self, num: int) -> List[int]: """ O(n) 动态规划 (1001011101) = 605 ​ (100101110 ) = 302 i除0位外,其余位和 i >> 1 相同 """ dp = [0] * (num + 1) for i in range(1, num + 1): dp[i] = dp[i >> 1] + (i & 1) return dp # leetcode submit region end(Prohibit modification and deletion)
[ "1146824110@qq.com" ]
1146824110@qq.com
e71ae60c1f2199d91a915ed2e518b5b058c3cfd4
10f094a1f456c04c3bb86144fa3c80631e8d4a72
/01_extract_blobs_from_ai2d.py
5f23321c41795c9160bc9b4c73b0f64e4d1faeda
[]
no_license
InsaneX4/distant-viewing-diagrams
6108cbe688f03034b5bffb8f25a8c58f2c2144e3
2ed56f85710df379749d5a7313c04645d70e207e
refs/heads/main
2023-08-13T07:35:32.929556
2021-10-07T09:46:27
2021-10-07T09:46:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,021
py
# -*- coding: utf-8 -*- # Import libraries import numpy as np import cv2 import json from pathlib import Path from hdf5writer import DataWriter from skimage import feature from tqdm import tqdm """ Usage: 1. Download the AI2D corpus from https://ai2-public-datasets.s3.amazonaws.com/diagrams/ai2d-all.zip 2. Extract the AI2D corpus into the directory "ai2d" 3. Run the script using the command below: python 01_extract_blobs_from_ai2d.py 4. The blobs will be placed in the directory "png_blobs/" """ # Set up paths to AI2D annotation and images ai2d_json_dir = Path("ai2d/annotations/") ai2d_img_dir = Path("ai2d/images/") ai2d_rst_dir = Path("ai2d/ai2d-rst") # Calculate the number of JSON files ai2d_json = list(ai2d_json_dir.glob("*.json")) # Create target directory Path("png_blobs").mkdir() # Set up progress bar with tqdm(total=len(ai2d_json)) as pbar: # Loop over the AI2D JSON files for (b, i) in enumerate(range(0, len(ai2d_json), 20)): # Fetch files from list json_files = ai2d_json[i: i + 20] # Loop over AI2D annotation for ann_file in json_files: # Get path to image; cast to string for OpenCV img_file = str(ai2d_img_dir / ann_file.stem) # Load and convert image img = cv2.imread(img_file) img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # Open file containing JSON annotation with open(ann_file, 'r') as json_file: # Load JSON ann = json.load(json_file) # Get blobs blobs = ann['blobs'] # Loop over blobs for blob in blobs.keys(): # Get polygon and cast to NumPy array polygon = np.array([blobs[blob]['polygon']]) # Create empty mask mask = np.zeros((img.shape[0], img.shape[1]), dtype=np.uint8) # Fill the masked area with white pixels (255) cv2.fillPoly(mask, polygon, (255)) # Add masks to the images with different colour spaces masked_img = cv2.bitwise_and(img, img, mask=mask) # Add mask as alpha channel masked_img = cv2.merge([masked_img, mask]) # Convert to BGRA masked_img = cv2.cvtColor(masked_img, cv2.COLOR_RGB2BGRA) # Get filename filename = f"{ann_file.stem}_{blob}.png" # Get bounding box bbox = cv2.boundingRect(polygon) # Crop the image cropped_bbox = masked_img[bbox[1]: bbox[1] + bbox[3], bbox[0]: bbox[0] + bbox[2]] # Write cropped image to disk cv2.imwrite(f"png_blobs/{filename}", cropped_bbox) # Update progress bar pbar.update(20)
[ "tuomo.hiippala@iki.fi" ]
tuomo.hiippala@iki.fi
2755646ee1fac70ca8011fcd5c88dc98f830ad79
0bd10e37ca6ca5a1c5664473cf9ce08753e7a2d0
/generate_datasets.py
940c6b2c9cdfe89e68275e828f00bc6faecd3652
[]
no_license
andrwkoval/ad-fontes-algo
764ad7dbabd5a72474637406a51ca116619fad64
a9d6497378339c042fdd3cb2a90ff7f776615b58
refs/heads/master
2020-04-10T11:05:26.298200
2018-12-10T21:42:28
2018-12-10T21:42:28
160,983,398
0
0
null
null
null
null
UTF-8
Python
false
false
704
py
from itertools import chain, combinations from random import randint import pickle def all_subsets(ss): return tuple(chain(*map(lambda x: combinations(ss, x), range(1, len(ss))))) def generate_datasets(min_agents, max_agents): filename_spec = "_dataset" for i in range(min_agents, max_agents + 1): for j in range(3): with open(str(i) + filename_spec + str(j)) as agents: all_coals = tuple(j for j in range(1, i + 1)) subsets = all_subsets(all_coals) dataset = {c: randint(10, 40) * len(c) for c in all_coals} dataset[subsets] = randint(10, 40) * len(subsets) pickle.dump(dataset, agents)
[ "drone.kov@gmail.com" ]
drone.kov@gmail.com
9aa2319524dc87ebf4879ccb9acd0b6c71fe00aa
83bc96df34fc2311a33a68e8e79af802d84370b9
/vmraid/patches/v12_0/setup_comments_from_communications.py
8655459600d580c372b2c64f79f3c19a0d6c30de
[ "MIT" ]
permissive
sowrisurya/vmraid
001072130ac6be5a3ef5a84523d8949d891e6954
f833e00978019dad87af80b41279c0146c063ed5
refs/heads/main
2023-05-05T13:52:45.386039
2021-05-31T10:23:56
2021-05-31T10:23:56
372,466,378
0
0
null
null
null
null
UTF-8
Python
false
false
1,242
py
from __future__ import unicode_literals import vmraid def execute(): vmraid.reload_doctype("Comment") if vmraid.db.count('Communication', filters = dict(communication_type = 'Comment')) > 20000: vmraid.db.auto_commit_on_many_writes = True for comment in vmraid.get_all('Communication', fields = ['*'], filters = dict(communication_type = 'Comment')): new_comment = vmraid.new_doc('Comment') new_comment.comment_type = comment.comment_type new_comment.comment_email = comment.sender new_comment.comment_by = comment.sender_full_name new_comment.subject = comment.subject new_comment.content = comment.content or comment.subject new_comment.reference_doctype = comment.reference_doctype new_comment.reference_name = comment.reference_name new_comment.link_doctype = comment.link_doctype new_comment.link_name = comment.link_name new_comment.creation = comment.creation new_comment.modified = comment.modified new_comment.owner = comment.owner new_comment.modified_by = comment.modified_by new_comment.db_insert() if vmraid.db.auto_commit_on_many_writes: vmraid.db.auto_commit_on_many_writes = False # clean up vmraid.db.sql("delete from `tabCommunication` where communication_type = 'Comment'")
[ "sowrisurya@outlook.com" ]
sowrisurya@outlook.com
453de1c849d08b8ac7faeae40fcd6024045ca060
fd5ce70d4a7255cc02c362293f7b867b3aa3654e
/MyAPI/forms.py
5b3a23c8b236afc67228d6f2d636573308dc35d0
[]
no_license
Ahmad-Noor/Loan_ML_Deploy_Model_
f8d3e4b15e7039d053520f531ca350000eec1d03
db123d195d6fe70568e4ddbb56428771e12b3a99
refs/heads/master
2022-04-08T23:37:32.781967
2020-02-10T01:02:20
2020-02-10T01:02:20
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,310
py
from django import forms class ApprovalForm(forms.Form): Firstname=forms.CharField(max_length=150,widget=forms.TextInput(attrs={'placeholder':'Enter Firstname'})) Lastname=forms.CharField(max_length=150,widget=forms.TextInput(attrs={'placeholder':'Enter Lastname'})) Dependents=forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'Enter Number of Dependents'})) ApplicantIncome=forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'Enter Monthly Gross Income'})) CoapplicantIncome=forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'Enter Co-Applicant Gross income'})) LoanAmount=forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'Request Loan Amount'})) Loan_Amount_Term=forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'Request Loan Terms'})) Credit_History=forms.ChoiceField(choices=[('0',0),('1',1),('2',2),('3',3)]) Gender=forms.ChoiceField(choices=[('Male','Male'),('Female','Female')]) Married=forms.ChoiceField(choices=[('Yes','Yes'),('No','No')]) Education=forms.ChoiceField(choices=[('Graduate','Graduated'),('Not_Graduate','Not_Graduated')]) Self_Employed=forms.ChoiceField(choices=[('Yes','Yes'),('No','No')]) Property_Area=forms.ChoiceField(choices=[('Rural','Rural'),('Semiurban','Semiurban'),('Urban','Urban')])
[ "shahadanshaari@gmail.com" ]
shahadanshaari@gmail.com
8f6fc5bed1caec22b62db2bbddbbd80e628db2db
4b2a975bbc9a5db32ca27549d4d079c06a124ca4
/DNNShowCase/test/createmodel.py
2b659c910618675d696ab93210da9cdbf3b4972e
[]
no_license
riga/CMSSW-DNNShowCase
52eb4bbdc21cba23d5832919024e5af294036221
3606a3fc86d4c2588ca4f9c59fa3459ff271179c
refs/heads/master
2021-01-20T18:20:13.365512
2016-12-13T00:10:22
2016-12-13T00:10:22
59,830,731
1
1
null
null
null
null
UTF-8
Python
false
false
953
py
# -*- coding: utf-8 -*- """ Example of how a tfdeploy model is created from a tensorflow computation tree. """ import os import sys # update the sys path to import tfdeploy showCaseBase = os.path.normpath(os.path.join(os.path.abspath(__file__), "../..")) sys.path.insert(0, os.path.join(showCaseBase, "python")) import tfdeploy as td import tensorflow as tf # create the tensorflow tree sess = tf.Session() x = tf.placeholder("float", shape=[None, 3], name="input") W = tf.Variable(tf.truncated_normal([3, 10], stddev=0.05)) b = tf.Variable(tf.zeros([10])) y = tf.reduce_mean(tf.nn.relu(tf.matmul(x, W) + b), name="output") sess.run(tf.initialize_all_variables()) # normally, this would be the right spot to create a cost function that uses labels # and start the training but we skip this here for simplicity # create the tfdeploy model model = td.Model() model.add(y, sess) model.save(os.path.join(showCaseBase, "data", "showcasemodel.pkl"))
[ "marcelrieger@me.com" ]
marcelrieger@me.com
a9332848060d6a9fcacd4d37a0bd7462db9e7acf
d568ff0a99862b7202a2847c58487cd444102a87
/asset.py
4d2782f4c43905f423232cda4095a29dfdc7f6ba
[]
no_license
jhexperiment/grow
d60f88640104418ac1f4feb24c4eef32f52cad09
9bce90a8d32893ada0b39001075b30c7f0d354fd
refs/heads/master
2020-03-27T06:49:37.082879
2018-08-22T04:02:31
2018-08-22T04:02:31
null
0
0
null
null
null
null
UTF-8
Python
false
false
445
py
class Asset: def __init__(self, amount, symbol='TLOS'): self.symbol = symbol self.amount = amount def __str__(self): return ('%s %s') % ('%.4f'%(self.amount), self.symbol) def __add__(self, other): return Asset(self.amount + other.amount) def __sub__(self, other): return Asset(self.amount - other.amount) def __iadd__(self, other): return Asset(self.amount + other.amount)
[ "peter@hornofthemoon.com" ]
peter@hornofthemoon.com
e7495dce76e2f15c5e707a6f00ded28859b5cbd4
69952d78ff308d48d5f242f589cccb917aecc94a
/tasks.py
dfaa294a66ec5f90c3694c15e59422b45f40bc19
[]
no_license
juyalpriyank/celery_task
b919cb3f166ea58d7ff791dc8b6891c3406e4462
bf729c14a66bb2d32680cf44be3c1fd35ad34932
refs/heads/master
2020-04-10T16:06:58.875017
2019-11-01T10:01:07
2019-11-01T10:01:07
161,133,635
0
0
null
null
null
null
UTF-8
Python
false
false
4,076
py
import logging import json import asyncio import rethinkdb as r from celery import Celery import datetime from celery.result import AsyncResult import time import pytz with open('config.json', 'r') as f: config = json.load(f) celery_config = config['CELERY'] app = Celery(celery_config['TASK_NAME'], backend=celery_config['BACKEND'], broker= celery_config['BROKER']) r.set_loop_type('asyncio') logging.basicConfig(filename='celery_task.log', level=logging.INFO, format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') db_config = config['DATABASE'] tz = pytz.timezone('Asia/Kolkata') @app.task def revoke_certi_task(c_id, revoke_date): """It is a celery task which takes two arguments c_id --> Certificate id of the certificate to be revoked revoke_date --> Date(epoch) on which the certificate should be revoked """ loop1 = asyncio.get_event_loop() task1 = loop1.create_task(connection()) conn = loop1.run_until_complete(task1) loop2 = asyncio.get_event_loop() task2 = loop2.create_task(revoke_flag(c_id, conn, revoke_date)) loop2.run_until_complete(task2) async def connection(): """The function establishes a rethinkdb connection to server Asynchronously.""" return await r.connect(db='main_db') # return await r.connect(host=db_config['ip'], port=db_config['port'], user=db_config['user'], password=db_config['password'], db=db_config['dbname']) async def revoke_flag(c_id, conn, revoke_date): """The function takes 2 arguments and updates the revoked_flag to 1 c_id --> Certificate id of the certificate to be revoked conn --> connection to rethinkdb asyncio pool""" epoch_revoke_date = await (await r.table('share_assets').filter({'id' : c_id}).pluck('revoked_on').run(conn)).next() iso_revoke_date = datetime.datetime.fromtimestamp(epoch_revoke_date['revoked_on'], tz) if epoch_revoke_date['revoked_on'] == revoke_date: print ('API Call') #API Call return await r.table('share_assets').filter({'id' : c_id}).update({"revoked_flag" : 1}).run(conn) # return await r.table(db_config['revoke_table']).filter({'c_id' : c_id}).update({"revoked_flag" : "1"}).run(conn) else: print ('API call else') task_res = revoke_certi_task.apply_async((c_id, epoch_revoke_date['revoked_on']),eta=iso_revoke_date) await task_status_logging(task_res.id, iso_revoke_date) async def change_feed_filter(): """It is a rethinkdb Changefeed function which invokes an event whenever an entry with revoke_date key is inserted in the table """ conn = await connection() feed = await r.table('share_assets').has_fields('revoked_on').changes().run(conn) # feed = await r.table(db_config['revoke_table']).has_fields('revoked_on').changes().run(conn) while (await feed.fetch_next()): change = await feed.next() c_id = change['new_val']['id'] revoke_date = datetime.datetime.fromtimestamp(change['new_val']['revoked_on'], tz) revoked_flag_new = change['new_val']['revoked_flag'] try: revoked_flag_old = change['old_val']['revoked_flag'] if (revoked_flag_new == 0 and revoked_flag_old == 1): task_res = revoke_certi_task.apply_async((c_id,change['new_val']['revoked_on']),eta=revoke_date) await task_status_logging(task_res.id, revoke_date) except KeyError: task_res = revoke_certi_task.apply_async((c_id,change['new_val']['revoked_on']),eta=revoke_date) await task_status_logging(task_res.id, revoke_date) async def task_status_logging(task_id, revoke_date): res = AsyncResult(task_id, app = app) logging.info('X----------X------------TASK----------------X-----------X') logging.info('Task has been registered with task id ' + str(task_id) + ' and will be excuted at ' + str(revoke_date)) return def main(): loop = asyncio.get_event_loop() task = loop.create_task(change_feed_filter()) loop.run_until_complete(task) if __name__ == '__main__': main()
[ "juyalpriyank@gmail.com" ]
juyalpriyank@gmail.com
b669194564c48315f3dbe003e2da20e68a7d9659
ea378480ba678eb123ef826e3ca0c3eb8f4e538f
/py ref/agg:PIL/01-aggTkinterLines.py
9d88b4b166b270d15d9e50e4cd4e23b80fad7d2d
[]
no_license
msarch/py
67235643666b1ed762d418263f7eed3966d3f522
dcd25e633a87cdb3710e90224e5387d3516c1cd3
refs/heads/master
2021-01-01T05:21:58.175043
2017-05-25T08:15:26
2017-05-25T08:15:26
87,453,820
1
0
null
null
null
null
UTF-8
Python
false
false
372
py
import aggdraw import random import Tkinter root = Tkinter.Tk() import Image img = Image.new("RGB", (1000,2000), "#FFFFFF") import aggdraw as draw canvas = draw.Draw(img) pen = draw.Pen("black", 0.5) canvas.line((5,20,200,100), pen) canvas.line((0,500,500,0), draw.Pen("blue", 0.7)) canvas.flush() img.save("love.png", "PNG") img.save("love.gif", "GIF")
[ "msarch@free.fr" ]
msarch@free.fr
9b92eb72f89103d08ab3cbd996189b790efd62a0
756be73ae5b44c716c3c1f2c6ae8e53903973386
/problems/arrays/python/problem13.py
a736f19c030971f4fd0518cd85c8ed69f23d9828
[]
no_license
sumitd-archives/coding_problems
4e8290b22740355be1e36238844d43ade33cc58b
4b3dc1a24750f710cc25a111f4b061ae57601c85
refs/heads/master
2020-07-01T03:45:14.968828
2016-11-18T06:27:35
2016-11-18T06:27:35
74,099,022
0
0
null
null
null
null
UTF-8
Python
false
false
677
py
def leftswap(start, end, string): temp = string[end]; for i in range(end - 1, start - 1, -1): string[i + 1] = string[i]; string[start] = temp; def rightswap(start, end, string): temp = string[start]; for i in range(start, end): string[i] = string[i + 1]; string[end] = temp; def lexico_anagrams(pos, string): if pos == len(string) - 1: print ''.join(string); for i in range(pos , len(string)): leftswap(pos, i, string); lexico_anagrams(pos + 1, string); rightswap(pos, i, string); def main(): string = list("abcd"); lexico_anagrams(0, string); if __name__ == "__main__" : main()
[ "sumit.083@gmail.com" ]
sumit.083@gmail.com
bb0199410c4738c988f1f6d0d10a2a88cb983db3
ec40422dea1feb7ed8c9556319d85fe78e90dc0a
/wtm_attack.py
f6cb4228c8b33edf2a4c91a390e04daa3601f1f2
[]
no_license
reproteq/WTM
c7929a5b51be5fe236c1d725f179e6495f34d701
cc82a73b105caaf0919406fca8e72c7b5792a0ce
refs/heads/master
2020-05-27T21:17:58.508532
2017-03-02T10:12:09
2017-03-02T10:12:09
83,655,344
0
0
null
null
null
null
UTF-8
Python
false
false
20,811
py
#!/usr/bin/python ############# # COLORS # ############# W = '\033[0m' # white (normal) R = '\033[31m' # red G = '\033[32m' # green O = '\033[33m' # orange B = '\033[34m' # blue P = '\033[35m' # purple C = '\033[36m' # cyan GR = '\033[37m' # gray ############### import wtm_settings, os, sys, time , csv # Executing, communicating with, killing processes from sys import stdout # Flushing from subprocess import Popen, call, PIPE from signal import SIGINT, SIGTERM # /dev/null, send output from programs so they don't print to screen. DN = open(os.devnull, 'w') ERRLOG = open(os.devnull, 'w') OUTLOG = open(os.devnull, 'w') class RunConfiguration: def __init__(self): self.WIRELESS_IFACE='' self.IFACE_TO_TAKE_DOWN ='' self.TX_POWER ='' self.WIRELESS_IFACE = '' self.PRINTED_SCANNING = False self.TX_POWER = 0 self.temp = '/tmp/wtm' self.WPS_DISABLE = False # Flag to skip WPS scan and attacks self.PIXIE = False self.WPS_FINDINGS = [] # List of (successful) results of WPS attacks self.WPS_TIMEOUT = 660 # Time to wait (in seconds) for successful PIN attempt self.WPS_RATIO_THRESHOLD = 0.01 # Lowest percentage of tries/attempts allowed (where tries > 0) self.WPS_MAX_RETRIES = 0 # Number of times to re-try the same pin before giving up completely. self.WPA_FINDINGS = [] # List of strings containing info on successful WPA attacks self.CRACKED_TARGETS = [] # List of targets we have already cracked self.CRACKED_TARGETS = self.load_cracked() def load_cracked(self): """ Loads info about cracked access points into list, returns list. """ result = [] if not os.path.exists('cracked.csv'): return result with open('cracked.csv', 'rb') as csvfile: targetreader = csv.reader(csvfile, delimiter=',', quotechar='"') for row in targetreader: t = Target(row[0], 0, 0, 0, row[1], row[2]) t.key = row[3] t.wps = row[4] result.append(t) return result def save_cracked(self, target): """ Saves cracked access point key and info to a file. """ self.CRACKED_TARGETS.append(target) with open('cracked.csv', 'wb') as csvfile: targetwriter = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) for target in self.CRACKED_TARGETS: targetwriter.writerow([target.bssid, target.encryption, target.ssid, target.key, target.wps]) def exit_gracefully(self, code=0): self.RUN_ENGINE.disable_monitor_mode() print GR + " [+]" + W + " quitting" # wifite will now exit" print '' print (GR + ' ['+ G +'B'+ GR +']' + B +" Back "+ G +"OK!") print (GR + ' ['+ G +'E'+ GR +']' + B +" Exit "+ G +"OK!") #return #exit(code) import wtm_main choice = raw_input(" >> ") wtm_main.exec_menu(choice) #return #exit(code) class Target: """ Holds data for a Target (aka Access Point aka Router) """ def __init__(self, bssid, power, data, channel, encryption, ssid): self.bssid = bssid self.power = power self.data = data self.channel = channel self.encryption = encryption self.ssid = ssid self.wps = False # Default to non-WPS-enabled router. self.key = '' class RunEngine: def __init__(self, run_config): self.RUN_CONFIG = run_config self.RUN_CONFIG.RUN_ENGINE = self self.bssid = wtm_settings.myglobalistbssid[0] self.essid = wtm_settings.myglobalistessid[0] self.chan = wtm_settings.myglobalistchan[0] self.encrypt = wtm_settings.myglobalistencrypt[0] self.wps = wtm_settings.myglobalistwps[0] self.iface = wtm_settings.myglobalistiface[0] self.clients = wtm_settings.myglobalistclients self.rutacap = wtm_settings.myglobalistrutacap[0] self.filename = wtm_settings.myglobalistfilename[0] self.cap = self.rutacap + self.filename def enable_monitor_mode(self, iface): #mac_anonymize(iface) print GR + ' [+]' + W + ' enabling monitor mode on %s...' % (G + iface + W), stdout.flush() call(['airmon-ng', 'check', 'kill'], stdout=DN, stderr=DN) call(['airmon-ng', 'start', iface], stdout=DN, stderr=DN) print 'done' self.RUN_CONFIG.WIRELESS_IFACE = '' # remove this reference as we've started its monitoring counterpart self.RUN_CONFIG.IFACE_TO_TAKE_DOWN = self.get_iface() if self.RUN_CONFIG.TX_POWER > 0: print GR + ' [+]' + W + ' setting Tx power to %s%s%s...' % (G, self.RUN_CONFIG.TX_POWER, W), call(['iw', 'reg', 'set', 'BO'], stdout=OUTLOG, stderr=ERRLOG) call(['iwconfig', iface, 'txpower', self.RUN_CONFIG.TX_POWER], stdout=OUTLOG, stderr=ERRLOG) print 'done' return self.RUN_CONFIG.IFACE_TO_TAKE_DOWN def disable_monitor_mode(self): if self.RUN_CONFIG.IFACE_TO_TAKE_DOWN == '': return print GR + ' [+]' + W + ' disabling monitor mode on %s...' % (G + self.RUN_CONFIG.IFACE_TO_TAKE_DOWN + W), stdout.flush() call(['airmon-ng', 'stop', self.RUN_CONFIG.IFACE_TO_TAKE_DOWN], stdout=DN, stderr=DN) print 'done' def rtl8187_fix(self): # Check if current interface is using the RTL8187 chipset proc_airmon = Popen(['airmon-ng'], stdout=PIPE, stderr=DN) proc_airmon.wait() using_rtl8187 = False for line in proc_airmon.communicate()[0].split(): line = line.upper() if line.strip() == '' or line.startswith('INTERFACE'): continue if line.find(iface.upper()) and line.find('RTL8187') != -1: using_rtl8187 = True if not using_rtl8187: # Display error message and exit print R + ' [!]' + O + ' unable to generate airodump-ng CSV file' + W print R + ' [!]' + O + ' you may want to disconnect/reconnect your wifi device' + W self.RUN_CONFIG.exit_gracefully(1) print O + " [!]" + W + " attempting " + O + "RTL8187 'Unknown Error 132'" + W + " fix..." original_iface = iface # Take device out of monitor mode airmon = Popen(['airmon-ng', 'stop', iface], stdout=PIPE, stderr=DN) airmon.wait() for line in airmon.communicate()[0].split('\n'): if line.strip() == '' or \ line.startswith("Interface") or \ line.find('(removed)') != -1: continue original_iface = line.split()[0] # line[:line.find('\t')] # Remove drive modules, block/unblock ifaces, probe new modules. print_and_exec(['ifconfig', original_iface, 'down']) print_and_exec(['rmmod', 'rtl8187']) print_and_exec(['rfkill', 'block', 'all']) print_and_exec(['rfkill', 'unblock', 'all']) print_and_exec(['modprobe', 'rtl8187']) print_and_exec(['ifconfig', original_iface, 'up']) print_and_exec(['airmon-ng', 'start', original_iface]) print '\r \r', print O + ' [!] ' + W + 'restarting scan...\n' return True def get_iface(self): if not self.RUN_CONFIG.PRINTED_SCANNING: print GR + ' [+]' + W + ' Attack Wps ...' self.RUN_CONFIG.PRINTED_SCANNING = True proc = Popen(['iwconfig'], stdout=PIPE, stderr=DN) iface = '' monitors = [] adapters = [] for line in proc.communicate()[0].split('\n'): if len(line) == 0: continue if ord(line[0]) != 32: # Doesn't start with space iface = line[:line.find(' ')] # is the interface if line.find('Mode:Monitor') != -1: if iface not in monitors: #print GR + ' [+] found monitor inferface: ' + iface monitors.append(iface) else: if iface not in adapters: #print GR + ' [+] found wireless inferface: ' + iface adapters.append(iface) if self.RUN_CONFIG.WIRELESS_IFACE != '': if monitors.count(self.RUN_CONFIG.WIRELESS_IFACE): return self.RUN_CONFIG.WIRELESS_IFACE else: if self.RUN_CONFIG.WIRELESS_IFACE in adapters: # valid adapter, enable monitor mode print R + ' [!]' + O + ' could not find wireless interface %s in monitor mode' % ( R + '"' + R + self.RUN_CONFIG.WIRELESS_IFACE + '"' + O) return self.enable_monitor_mode(self.RUN_CONFIG.WIRELESS_IFACE) else: # couldnt find the requested adapter print R + ' [!]' + O + ' could not find wireless interface %s' % ( '"' + R + self.RUN_CONFIG.WIRELESS_IFACE + O + '"' + W) self.RUN_CONFIG.exit_gracefully(0) if len(monitors) == 1: return monitors[0] # Default to only device in monitor mode elif len(monitors) > 1: print GR + " [+]" + W + " interfaces in " + G + "monitor mode:" + W for i, monitor in enumerate(monitors): print " %s. %s" % (G + str(i + 1) + W, G + monitor + W) ri = raw_input("%s [+]%s select %snumber%s of interface to use for capturing (%s1-%d%s): %s" % \ (GR, W, G, W, G, len(monitors), W, G)) while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors): ri = raw_input("%s [+]%s select number of interface to use for capturing (%s1-%d%s): %s" % \ (GR, W, G, len(monitors), W, G)) i = int(ri) return monitors[i - 1] proc = Popen(['airmon-ng'], stdout=PIPE, stderr=DN) for line in proc.communicate()[0].split('\n'): if len(line) == 0 or line.startswith('Interface') or line.startswith('PHY'): continue if line.startswith('phy'): line = line.split('\t', 1)[1] monitors.append(line) if len(monitors) == 0: print R + ' [!]' + O + " no wireless interfaces were found." + W print R + ' [!]' + O + " you need to plug in a wifi device or install drivers.\n" + W self.RUN_CONFIG.exit_gracefully(0) elif self.RUN_CONFIG.WIRELESS_IFACE != '' and monitors.count(self.RUN_CONFIG.WIRELESS_IFACE) > 0: monitor = monitors[0][:monitors[0].find('\t')] return self.enable_monitor_mode(monitor) elif len(monitors) == 1: monitor = monitors[0][:monitors[0].find('\t')] if monitor.startswith('phy'): monitor = monitors[0].split()[1] return self.enable_monitor_mode(monitor) print GR + " [+]" + W + " available wireless devices:" for i, monitor in enumerate(monitors): print " %s%d%s. %s" % (G, i + 1, W, monitor) ri = raw_input( GR + " [+]" + W + " select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W)) while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors): ri = raw_input(" [+] select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W)) i = int(ri) monitor = monitors[i - 1][:monitors[i - 1].find('\t')] return self.enable_monitor_mode(monitor) def attack_wps(self, pin): def attack_interrupted_prompt(): self.RUN_CONFIG.exit_gracefully(1) def sec_to_hms(sec): if sec <= -1: return '[endless]' h = sec / 3600 sec %= 3600 m = sec / 60 sec %= 60 return '[%d:%02d:%02d]' % (h, m, sec) def send_interrupt(process): try: os.kill(process.pid, SIGINT) # os.kill(process.pid, SIGTERM) except OSError: pass # process cannot be killed except TypeError: pass # pid is incorrect type except UnboundLocalError: pass # 'process' is not defined except AttributeError: pass # Trying to kill "None" print GR + ' [0:00:00]' + W + ' initializing %sWPS PIN attack%s on %s' % \ (G, W, G + self.essid + W + ' (' + G + self.bssid + W + ')' + W) cmd = ['reaver', '-i', self.iface, '-b', self.bssid, '-o', self.RUN_CONFIG.temp + 'out.out', # Dump output to file to be monitored '-a', # auto-detect best options, auto-resumes sessions, doesn't require input! '-c', self.chan, # '--ignore-locks', '-vv', '-p', pin] # verbose output proc = Popen(cmd, stdout=DN, stderr=DN) cracked = False # Flag for when password/pin is found percent = 'x.xx%' # Percentage complete aps = 'x' # Seconds per attempt time_started = time.time() last_success = time_started # Time of last successful attempt last_pin = '' # Keep track of last pin tried (to detect retries) retries = 0 # Number of times we have attempted this PIN tries_total = 0 # Number of times we have attempted all pins tries = 0 # Number of successful attempts pin = '' key = '' try: while not cracked: time.sleep(1) if proc.poll() != None: # Process stopped: Cracked? Failed? inf = open(self.RUN_CONFIG.temp + 'out.out', 'r') lines = inf.read().split('\n') inf.close() for line in lines: # When it's cracked: if line.find("WPS PIN: '") != -1: pin = line[line.find("WPS PIN: '") + 10:-1] if line.find("WPA PSK: '") != -1: key = line[line.find("WPA PSK: '") + 10:-1] cracked = True break if not os.path.exists(self.RUN_CONFIG.temp + 'out.out'): continue inf = open(self.RUN_CONFIG.temp + 'out.out', 'r') lines = inf.read().split('\n') inf.close() for line in lines: if line.strip() == '': continue # Status if line.find(' complete @ ') != -1 and len(line) > 8: percent = line.split(' ')[1] i = line.find(' (') j = line.find(' seconds/', i) if i != -1 and j != -1: aps = line[i + 2:j] # PIN attempt elif line.find(' Trying pin ') != -1: pin = line.strip().split(' ')[-1] if pin == last_pin: retries += 1 elif tries_total == 0: last_pin = pin tries_total -= 1 else: last_success = time.time() tries += 1 last_pin = pin retries = 0 tries_total += 1 # Warning elif line.endswith('10 failed connections in a row'): pass # Check for PIN/PSK elif line.find("WPS PIN: '") != -1: pin = line[line.find("WPS PIN: '") + 10:-1] elif line.find("WPA PSK: '") != -1: key = line[line.find("WPA PSK: '") + 10:-1] cracked = True if cracked: break print ' %s WPS attack, %s success/ttl,' % \ (GR + sec_to_hms(time.time() - time_started) + W, \ G + str(tries) + W + '/' + O + str(tries_total) + W), if percent == 'x.xx%' and aps == 'x': print '\r', else: print '%s complete (%s sec/att) \r' % (G + percent + W, G + aps + W), if self.RUN_CONFIG.WPS_TIMEOUT > 0 and (time.time() - last_success) > self.RUN_CONFIG.WPS_TIMEOUT: print R + '\n [!]' + O + ' unable to complete successful try in %d seconds' % ( self.RUN_CONFIG.WPS_TIMEOUT) print R + ' [+]' + W + ' skipping %s' % (O + self.essid + W) break if self.RUN_CONFIG.WPS_MAX_RETRIES > 0 and retries > self.RUN_CONFIG.WPS_MAX_RETRIES: print R + '\n [!]' + O + ' unable to complete successful try in %d retries' % ( self.RUN_CONFIG.WPS_MAX_RETRIES) print R + ' [+]' + O + ' the access point may have WPS-locking enabled, or is too far away' + W print R + ' [+]' + W + ' skipping %s' % (O + self.essid + W) break if self.RUN_CONFIG.WPS_RATIO_THRESHOLD > 0.0 and tries > 0 and ( float(tries) / tries_total) < self.RUN_CONFIG.WPS_RATIO_THRESHOLD: print R + '\n [!]' + O + ' successful/total attempts ratio was too low (< %.2f)' % ( self.RUN_CONFIG.WPS_RATIO_THRESHOLD) print R + ' [+]' + W + ' skipping %s' % (G + self.essid + W) break stdout.flush() # Clear out output file if bigger than 1mb inf = open(self.RUN_CONFIG.temp + 'out.out', 'w') inf.close() # End of big "while not cracked" loop if cracked: if pin != '': print GR + '\n\n [+]' + G + ' PIN found: %s' % (C + pin + W) if key != '': print GR + ' [+] %sWPA key found:%s %s' % (G, W, C + key + W) self.RUN_CONFIG.WPA_FINDINGS.append(W + "found %s's WPA key: \"%s\", WPS PIN: %s" % ( G + self.essid + W, C + key + W, C + pin + W)) self.RUN_CONFIG.WPA_FINDINGS.append('') t = Target(self.bssid, 0, 0, 0, 'WPA', self.essid) t.key = key t.wps = pin self.RUN_CONFIG.save_cracked(t) except KeyboardInterrupt: print R + '\n (^C)' + O + ' WPS brute-force attack interrupted' + W if attack_interrupted_prompt(): send_interrupt(proc) print '' self.RUN_CONFIG.exit_gracefully(0) send_interrupt(proc) return cracked def StartWPS(self): print (GR + 'WPS ATTACK'+ G +' ON ' +GR +' ... Victim Bssid ' + G + self.bssid + GR) time_started = time.time() lowbssid = self.bssid[:8] for line in open("patts.csv"): if lowbssid in line: pines = line.split(',')[4].lower().split(' ') if self.filename == '': for pin in pines: print (GR +'\n') print (G + '[+]' + cmd + GR + '\n') os.system(cmd) timelost = round(time.time() - time_started) print (G + '[+]' + str(timelost) + GR + ' seg \n' + GR) self.RUN_CONFIG.exit_gracefully(1) def StartWPA(self): print (GR + 'WPA ATTACK'+ G +' ON ' +GR +' ... Victim Bssid ' + G + self.bssid + GR) time_started = time.time() lowbssid = self.bssid[:8] for line in open("patts.csv"): if lowbssid in line: if self.filename != '': attack_crunch = line.split(',')[7].replace('\n','') cmd = "crunch "+str(attack_crunch)+" | pyrit -r "+str(self.cap)+" -e "+str(self.essid)+" -b "+str(self.bssid)+" --all-handshakes --aes -i - attack_passthrough" print (G + '[+]' + cmd + GR + '\n') os.system(cmd) timelost = round(time.time() - time_started) print (G + '[+]' + str(timelost) + GR + ' seg \n' + GR)
[ "root@localhost.localdomain" ]
root@localhost.localdomain
5fe0303762fa8663ebcec9d3b05324340cbb5f0b
e1517a4f37fe141e1fed134e05954be841a0d3b5
/bin/piclusterdeamon
8f8c268d9ca23675bf967f2f1039d787a136fa70
[ "Apache-2.0" ]
permissive
Lukas0025/piClusterManager
4ff6240ffc6c2d3c434e63a9e8f8bc76cf98ad88
c2ecd2169d2b1e6189e325ead3056afdcf33884f
refs/heads/master
2020-11-24T14:58:11.713184
2020-01-11T10:37:14
2020-01-11T10:37:14
228,204,366
0
0
null
null
null
null
UTF-8
Python
false
false
1,090
#!/usr/bin/python3 # -*- coding: utf-8 -*- # # Copyright 2019 Lukáš Plevač <lukasplevac@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # from piClusterManager import config from piClusterManager.protocol import agent try: filehandler = open("/no-hello","rb") hide = True filehandler.close() except IOError: hide = False #start agent if not(hide): agent(config)
[ "lukasplevac@gmail.com" ]
lukasplevac@gmail.com
88392cba273f55e85a5eb4525c261811706d73fe
ac52625e698bcc66973fc40399c1a278f535406d
/project1/framework.py
3d7b1e7cd7cf4ba6f6e2226bc3aac2354a5c48d5
[]
no_license
myselfliu/mobile_web
b4599febeccc255e44e28029aa262c3f9c556e88
658e15fd881b17c8715a58a906ac5aad91928acf
refs/heads/master
2020-05-07T18:50:16.017954
2019-04-12T05:00:08
2019-04-12T05:00:08
180,785,951
0
0
null
2019-04-12T05:00:11
2019-04-11T12:19:53
Python
UTF-8
Python
false
false
4,021
py
import pymysql import json import logging route_list = [] def route(path): # 装饰器 def decorator(fn): # 当执行装饰器装饰指定函数的时候,把路径和函数添加到路由表 route_list.append((path, fn)) def inner(): # 执行指定函数 return fn() return inner # 返回装饰器 return decorator # 获取首页数据 @route("/index.html") def index(): # index=out(index) # 响应状态 status = "200 ok" # 响应头 heads = [("Server", "HjjW/9.0")] # 1.打开模板文件,读取数据 with open("template/index.html", "r")as file: file_data = file.read() # 处理后的数据,从数据库查询 coon = pymysql.connect( host="127.0.0.1", port=3306, user="root", password="mysql", database="stock_db", charset="utf8") cs = coon.cursor() sql = "select * from info;" cs.execute(sql) result = cs.fetchall() print(result) response_body = "" for row in result: response_body += '''<tr> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td><input type="button" value="添加" id="toAdd" name="toAdd" systemidvaule="000007"></td> </tr>''' % row # 2.替换模板文件中的模板遍历 result = file_data.replace("{%content%}", response_body) # 返回请求状态信息,请求头请求体 return status, heads, result # 个人中心数据接口开发 @route("/center_data.html") def center_data(): # 响应状态 status = "200 ok" # 响应头 heads = [("Server", "HjjW/9.0"), ("Center-Type", "text/html;charset=utf-8")] coon = pymysql.connect( host="127.0.0.1", port=3306, user="root", password="mysql", database="stock_db", charset="utf8") # 处理后的数据,从数据库查询 cs = coon.cursor() sql = '''select i.code,i.short,i.chg,i.turnover,i.price, i.highs,f.note_info from info as i inner join focus as f on i.id=f.info_id; ''' cs.execute(sql) result = cs.fetchall() cs.close() coon.close() center_data_list = list() # 遍历每一行数据转成字典 for row in result: # 创建空的字典 center_dict = dict() center_dict["code"] = row[0] center_dict["short"] = row[1] center_dict["chg"] = row[2] center_dict["turnover"] = row[3] center_dict["price"] = row[4] center_dict["highs"] = row[5] center_dict["note_info"] = row[6] # 添加每个字典信息 center_data_list.append(center_dict) # 把列表字典转成json字符串,并在控制台显示 json_str = json.dumps(center_data_list, ensure_ascii=False) print(json_str) return status, heads, json_str def not_found(): # 响应状态 status = "404 ont found" # 响应头 heads = [("Server", "HjjW/9.0")] # 处理的数据 response_body = "404 NOT FOUND!!!" return status, heads, response_body # 处理动态资源请求 def handle_request(env): # 获取动态资源路径 request_path = env["recv_path"] print("接收到动态资源请求:", request_path) # 遍历路由列表,选择执行的函数,不管页面有多少,用户选择了哪个路径就会遍历到相应的路径 for path, fn in route_list: if request_path == path: result = fn() return result else: logging.error("没有设置相应的路由找不到页面资源", request_path) # 没有找到 result = not_found() return result
[ "hufafa@163.com" ]
hufafa@163.com
7c3bc8f3ce18c045889bdf345fb28f13c847e7cc
02fe4832da9fee6c71263eaa5465964323698901
/piconzerodistancefromwallVL53v5.py
b50b1fa3cb95cee174087e37bf9f8a6235caa4ec
[]
no_license
MarkEdwards63/PiWars2017
8abcd30d2fbdb6ee953d351e79fe62e0b5ba608e
9a7946a9a0759bc1f12919388e646fe924cb83c9
refs/heads/master
2021-01-18T19:08:34.565141
2017-04-01T06:35:55
2017-04-01T06:35:55
86,890,175
0
0
null
null
null
null
UTF-8
Python
false
false
6,531
py
#====================================================================== # # Python Module to handle an HC-SR04 Ultrasonic Module # Aimed at use on Picon Zero # #====================================================================== import RPi.GPIO as GPIO, time import piconzero as pz import VL53L0X # GPIO for ToF Sensor 1 shutdown pin sensor1_shutdown = 23 # GPIO for ToF Sensor 2 shutdown pin sensor2_shutdown = 24 sensoronright = 0 # set to 1 if on right 0 if on left piwidth = 100 # width of pi robot in cm runwidth = 540 # width of speed test run in cm frontDistanceTurn = 200 mediumturndistance = 200 # distance from wall for medium turn fastturndistance = 100 # distance from wall for fast turn turnleft = 180 turnright = 150 hardturnleft = 250 hardturnright = 80 sensordeltaforturn = 5 # turn if difference between sensors in greater (mm) mediumspeed = 40 # slow speed for medium turn 15 fastspeed = 50 # slow speed for fast turn 20 speed = 100 # normal speed speedLeft = speed speedRight = speed mediumspeedLeft = 25 mediumspeedRight = 25 fastspeedLeft = 40 fastspeedRight = 40 rightwheel = 1 # wheel number for setMotor function leftwheel = 0 #===================================================================== # General Functions # def init(): GPIO.setwarnings(False) # GPIO.setmode(GPIO.BOARD) # GPIO.setwarnings(False) # Setup GPIO for shutdown pins on each VL53L0X GPIO.setmode(GPIO.BCM) GPIO.setup(sensor1_shutdown, GPIO.OUT) GPIO.setup(sensor2_shutdown, GPIO.OUT) # Set all shutdown pins low to turn off each VL53L0X GPIO.output(sensor1_shutdown, GPIO.LOW) GPIO.output(sensor2_shutdown, GPIO.LOW) # Keep all low for 500 ms or so to make sure they reset time.sleep(0.50) pz.init() def cleanup(): GPIO.cleanup() init() # initialise the boards time.sleep(2) # wait for pi to settle down # VL53L0X_GOOD_ACCURACY_MODE = 0 # Good Accuracy mode # VL53L0X_BETTER_ACCURACY_MODE = 1 # Better Accuracy mode # VL53L0X_BEST_ACCURACY_MODE = 2 # Best Accuracy mode # VL53L0X_LONG_RANGE_MODE = 3 # Longe Range mode # VL53L0X_HIGH_SPEED_MODE = 4 # High Speed mode # Create one object per VL53L0X passing the address to give to # each. tof1 = VL53L0X.VL53L0X(address=0x2B) tof2 = VL53L0X.VL53L0X(address=0x2D) # Set shutdown pin high for the first VL53L0X then # call to start ranging GPIO.output(sensor1_shutdown, GPIO.HIGH) time.sleep(0.50) tof1.start_ranging(VL53L0X.VL53L0X_LONG_RANGE_MODE) # Set shutdown pin high for the second VL53L0X then # call to start ranging GPIO.output(sensor2_shutdown, GPIO.HIGH) time.sleep(0.50) tof2.start_ranging(VL53L0X.VL53L0X_BETTER_ACCURACY_MODE) timing = tof2.get_timing() if (timing < 20000): timing = 20000 print ("Timing %d ms" % (timing/1000)) interval = timing/1000000.00 #interval = 0.005 distanceFront = tof1.get_distance() if (distanceFront > 0): print ("sensor %d - %d cm" % (tof1.my_object_number, distanceFront)) else: print ("%d - Error" % tof1.my_object_number) distanceSide = tof2.get_distance() if (distanceSide > 0): print ("sensor %d - %d cm" % (tof2.my_object_number, distanceSide)) else: print ("%d - Error" % tof2.my_object_number) time.sleep(interval) lastDistanceFront = distanceFront # used to test if heading left or right lastDistanceSide = distanceSide pz.setMotor(leftwheel, speedLeft) pz.setMotor(rightwheel, speedRight) # go go go count = 0 try: while True: # get new sensor value distanceFront = tof1.get_distance() print("Front", distanceFront) while distanceFront <= frontDistanceTurn: # if distanceclose to front walls top distanceFront = tof1.get_distance() print("Stop!!!") pz.stop() count += 1 distanceSide = tof2.get_distance() # get new distance from VL53 if distanceSide >= 550: # if distance reading over 550 mm then discard as greater than distance between walls print("Faulty distance reading", distanceSide) distanceSide = lastDistanceSide print("ToF 2", count, distanceSide, lastDistanceSide) # pz.setMotor(leftwheel, speedLeft) # set speed back to default. will be overwritten if need for turn # pz.setMotor(rightwheel, speedRight) # check if need to turn - slow down wheel on side to turn if (distanceSide >= (lastDistanceSide + sensordeltaforturn)): # heading left so turn right pz.setMotor(rightwheel, speedRight) pz.setMotor(leftwheel, speedLeft - fastspeedLeft) print("Turning Left 2", distanceSide, lastDistanceSide) elif (distanceSide <= (lastDistanceSide - sensordeltaforturn)): # heading right so turn left pz.setMotor(rightwheel, speedRight - fastspeedRight) pz.setMotor(leftwheel, speedLeft) print("Turning Right 2", distanceSide, lastDistanceSide) elif distanceSide >= hardturnleft: # if too near wall then fast turn pz.setMotor(rightwheel, speedRight) pz.setMotor(leftwheel, speedLeft - fastspeedLeft) print("Hard Left", distanceSide, lastDistanceSide) elif distanceSide <= hardturnright: # if too near wall then fast turn pz.setMotor(rightwheel, speedRight - fastspeedRight) pz.setMotor(leftwheel, speedLeft) print("Hard Right", distanceSide, lastDistanceSide) elif distanceSide >= turnleft: # if close to wall then turn pz.setMotor(rightwheel, speedRight) pz.setMotor(leftwheel, speedLeft - mediumspeedLeft) print("Bare Left", distanceSide, lastDistanceSide) elif distanceSide <= turnright: # if close to wall then turn pz.setMotor(leftwheel, speedLeft) pz.setMotor(rightwheel, speedRight - mediumspeedRight) print("Bare Right", distanceSide, lastDistanceSide) else: pz.setMotor(leftwheel, speedLeft) # set speed back to default. w pz.setMotor(rightwheel, speedRight) time.sleep(interval) lastDistanceSide = distanceSide # set distance for checking movement to/from wall except KeyboardInterrupt: print ("KeyBoard Interript") finally: pz.cleanup() # cleanup piconzero and stop ToF tof1.stop_ranging() GPIO.output(sensor1_shutdown, GPIO.LOW) tof2.stop_ranging() GPIO.output(sensor2_shutdown, GPIO.LOW)
[ "noreply@github.com" ]
MarkEdwards63.noreply@github.com
b94078a34bd48cdb8c3bbdaeda07fc9953dbb317
5c53bd12358278a471b6b4404ad15ebbb048fa52
/py_bacy/tasks/io.py
d2ed99232875ea284d1d9366438d2d9829ba96f3
[ "MIT" ]
permissive
tobifinn/py_bacy
526992cefff604e5516e9da99f02f215eb312e05
f550876fe0303eb1711866268871f6fd478ef1c1
refs/heads/main
2023-04-08T07:34:44.769622
2021-01-25T12:36:00
2021-01-25T12:36:00
317,234,427
1
1
MIT
2022-10-15T13:16:20
2020-11-30T13:36:05
Python
UTF-8
Python
false
false
5,870
py
#!/bin/env python # -*- coding: utf-8 -*- # # Created on 15.01.21 # # Created for py_bacy # # @author: Tobias Sebastian Finn, tobias.sebastian.finn@uni-hamburg.de # # Copyright (C) {2021} {Tobias Sebastian Finn} # System modules from typing import List, Union from shutil import copyfile # External modules import prefect from prefect import task import dask from distributed import Client, get_client import netCDF4 as nc4 import xarray as xr import numpy as np from tqdm.autonotebook import tqdm # Internal modules def load_single_member( file_paths: List[str] ) -> xr.Dataset: """ Load data from given file paths in NetCDF-4 format. Parameters ---------- file_paths : List[str] Returns ------- loaded_ds : xr.Dataset """ loaded_ds = xr.open_mfdataset( file_paths, parallel=True, combine='nested', concat_dim='time', decode_cf=True, decode_times=True, data_vars='minimal', coords='minimal', compat='override' ) return loaded_ds @task def load_ens_data( file_paths: Union[List[str], List[List[str]]], client: Client ) -> xr.Dataset: """ Load ensemble data with xarray and dask from given file paths. The dataset will be concatenated along a new unnumbered ensemble dimension. Parameters ---------- file_paths : List[str] or List[List[str]] The items of this list will be passed to `xarray.open_mfdataset`. All ensemble members have to result to the same ensemble structure. client : None or distributed.Client, optional This client is used to load the data for each ensemble member in paralllel. Returns ------- ds_ens : xr.Dataset The loaded dataset with a concatenated ensemble dimension, which is unnumbered. """ logger = prefect.context.get('logger') logger.debug('Source file paths: {0}'.format(file_paths)) ds_ens_list = [] pbar_paths = tqdm(file_paths) for mem_paths in pbar_paths: ds_mem = load_single_member(file_paths=mem_paths) ds_ens_list.append(ds_mem) logger.info('Starting to concat ensemble') ds_ens = xr.concat(ds_ens_list, dim='ensemble') return ds_ens def write_single_ens_mem( source_path: str, target_path: str, analysis_dataset: xr.Dataset, assim_vars: List[str] ) -> str: """ Write a single ensemble member where the source and target path are specified. Variables that are specified within the `assim_vars` list are overwritten with the corresponding excerpts of the analysis dataset. Parameters ---------- source_path : str This netCDF4-file will be copied to the target path. target_path : str The netCDF4-file will be created in this target path and manipulated with given analysis dataset. analysis_dataset : xr.Dataset This dataset specifies the analysis data that should be written to the target path. assim_vars : List[str] These list of assimilation variables specifies which variables were changed during the assimilation process. Returns ------- target_path : str The target path with the written data. """ copyfile(source_path, target_path) with nc4.Dataset(target_path, mode='r+') as loaded_ds: for var_name in assim_vars: loaded_ds[var_name][:] = analysis_dataset[var_name] return target_path @task def write_ens_data( dataset_to_write: xr.Dataset, source_paths: List[str], target_paths: List[str], assim_vars: List[str], client: Union[None, Client] = None ) -> str: """ Write a given dataset with ensemble members to given target paths. The source path is used as base netCDF4-file and will be copied to the target paths. Parameters ---------- dataset_to_write : xr.Dataset This dataset will be written to the target paths. The number of ensemble members have to be the same as the length of the source paths and target paths. source_paths : List[str] Each item of this source path list is used as base file and will be copied to the target paths. The length of this list has to be the same as the ensemble dimension within `dataset_to_write`. target_paths : List[str] For each ensemble member, the corresponding dataset will be written to these target paths. The base-file of these target paths are specified within the source paths argument. The length of this list has to be the same as the ensemble dimension within `dataset_to_write`. assim_vars : List[str] This list specifies the variable name that were changed during the assimilation. client : None or distributed.Client, optional This client is used to write the analysis for each ensemble member in paralllel. Returns ------- target_paths : str The target paths with the written analyses. """ logger = prefect.context.get('logger') if client is None: logger.warning('No client was given, I try to infer the client') client = get_client(timeout=10) ens_delayed_list = [] for member_num, source_path in enumerate(source_paths): dataset_scattered = client.scatter( dataset_to_write.isel(ensemble=member_num) ) tmp_delayed = dask.delayed(write_single_ens_mem)( source_path, target_paths[member_num], dataset_scattered, assim_vars ) ens_delayed_list.append(tmp_delayed) ens_delayed_list = client.compute(ens_delayed_list) _ = client.gather(ens_delayed_list) logger.debug( 'Finished writing of ensemble data to {0}'.format(target_paths) ) return target_paths
[ "noreply@github.com" ]
tobifinn.noreply@github.com
f75f09173cf40072dfb7123448b9ec2d6cd5e80c
0736ea6d2ed7d26167e17cdbedc50825c051ce78
/GenericConfigurationTool/WorkstationUI.py
042539b1463680a22460b1c09b24aa40a3fc2946
[ "Apache-2.0" ]
permissive
wbeebe/pyqt
2c25cf7b71513cd960b7bce9aa16df5e73c45f1d
bbaee3b965b54f93b9091d232e752762be1d0cb5
refs/heads/master
2021-06-27T15:54:33.157223
2021-03-08T15:34:29
2021-03-08T15:34:29
204,618,834
1
0
null
null
null
null
UTF-8
Python
false
false
3,641
py
# # Copyright (c) 2021 William H. Beebe, Jr. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import sys import psutil from PyQt6.QtCore import Qt from PyQt6.QtWidgets import ( QMainWindow, QFileDialog, QPushButton, QLineEdit, QGridLayout, QHBoxLayout, QLabel, QWidget) class Workstation(QWidget): def __init__(self, parent, top): super(QWidget, self).__init__(parent) self.top = top hlayout = QHBoxLayout() self.layout = QGridLayout() hlayout.addLayout(self.layout) hlayout.setAlignment(hlayout, Qt.Alignment.AlignTop) self.setLayout(hlayout) self.row = 0 self.__addLabel__("Federate Name") self.federateName = QLineEdit('REMOTE_WORKSTATION') self.__addInput__(self.federateName) self.__addLabel__("Message Directory Cache") self.messageDirectoryCache = QLineEdit(self) self.__addInputAndSelect__(self.messageDirectoryCache, self.top) self.__addLabel__("Map Data Cache") self.mapDataCache = QLineEdit(self) self.__addInputAndSelect__(self.mapDataCache, self.top) self.__addLabel__("Raster Map Cache") self.rasterMapCache = QLineEdit(self) self.__addInputAndSelect__(self.rasterMapCache, self.top) self.__addLabel__("Remote Control Location") self.remoteControlLocation = QLineEdit(self) self.__addInputAndSelect__(self.remoteControlLocation, self.top) def __addLabel__(self, label): lbl = QLabel(label) self.layout.addWidget(lbl, self.row, 0, 1, -1) self.row += 1 def __addInput__(self, input): self.layout.addWidget(input, self.row, 0, 1, 4) self.row += 1 def __addSelect__(self, input): self.layout.addWidget(BrowseButton(self, input), self.row - 1, 4, 1, 1) def __addInputAndSelect__(self, input, top): hbox = QHBoxLayout() hbox.setContentsMargins(0, 0, 0, 0) hbox.addWidget(input) browseButton = BrowseButton(self, input, top) browseButton.adjustSize() hbox.addWidget(browseButton) widget = QWidget(self) widget.setLayout(hbox) self.layout.addWidget(widget, self.row, 0, 1, -1) self.row += 1 def tabName(self): return 'Workstation' class BrowseButton(QPushButton, QLineEdit, QMainWindow): def __init__(self, parent, input, top): super(QPushButton, self).__init__(parent) self.input = input self.top = top self.setText('...') self.clicked.connect(self.on_click) def on_click(self): print('BrowseButton clicked {}'.format(self.input.text())) # self.input.setText('{} - Bar'.format(self.input.text())) folder = QFileDialog.getExistingDirectory(self, "Select Folder", "", QFileDialog.Options.ShowDirsOnly | QFileDialog.Options.DontUseNativeDialog) if folder: self.input.setText(folder) self.input.setStyleSheet("background-color:#ffff80") self.top.setEdited() self.input.setFocus()
[ "wbeebe@gmail.com" ]
wbeebe@gmail.com
3fb0d96724c1854632f74679770e8364c96312f6
e19cc86817cefa4ebe4354612b2ec7f0e22f7712
/main.py
35f769b12efb060fbac1c95c5e60f08491b3886a
[]
no_license
ayaz479/blogapp
89a1f592e7f9739b0527599d8fa4f900019eb33e
63819c091a66f9ac8cfbc3d777830e301833b3ab
refs/heads/main
2023-04-12T13:01:39.338583
2021-04-28T19:10:55
2021-04-28T19:10:55
362,579,573
0
0
null
null
null
null
UTF-8
Python
false
false
8,115
py
from flask import Flask, render_template, flash, redirect, url_for, session, logging, request from flask_mysqldb import MySQL from wtforms import Form, StringField, TextAreaField, PasswordField, validators from passlib.hash import sha256_crypt from functools import wraps # kullanci adi dicureter def login_required(f): @wraps(f) def decorated_function(*args, **kwargs): if "logged_in" in session: return f(*args, **kwargs) else: flash("Please login to view this page", "danger") return redirect(url_for("login")) return decorated_function class RegisterForm(Form): name = StringField("Name and Surname", validators=[validators.Length(min=5, max=25)]) username = StringField("Username", validators=[validators.Length(min=5, max=25)]) email = StringField("E-mail", validators=[validators.Email()]) password = PasswordField("password:", validators=[validators.DataRequired(message="please enter a password"), validators.EqualTo(fieldname="confirm", message="password does not match")]) confirm = PasswordField("password verification") class Loginform(Form): username = StringField("Username") password = PasswordField("Password") app = Flask(__name__) app.secret_key = "horbax" app.config["MYSQL_HOST"] = "localhost" app.config["MYSQL_USER"] = "root" app.config["MYSQL_PASSWORD"] = "" app.config["MYSQL_DB"] = "horbax company" app.config["MYSQL_CURSORCLASS"] = "DictCursor" mysql = MySQL(app) @app.route("/") def homepage(): return render_template("Homepage.html") @app.route("/dashboard") @login_required def control_panel(): cursor = mysql.connection.cursor() sorgu = "Select * From article where author = %s" result = cursor.execute(sorgu, (session["username"],)) if result > 0: article = cursor.fetchall() return render_template("dashboard.html", article=article) else: return render_template("dashboard.html") @app.route("/about") def about(): return render_template("about.html") @app.route("/services") def services(): cursor = mysql.connection.cursor() sorgu = "Select * From article" result = cursor.execute(sorgu) if result > 0: articles = cursor.fetchall() return render_template("service.html", articles=articles) else: return render_template("service.html") @app.route("/Contact") def contact(): return render_template("contact.html") @app.route("/join", methods=["GET", "POST"]) def joinus(): form = RegisterForm(request.form) if request.method == "POST" and form.validate(): name = form.name.data username = form.username.data email = form.email.data password = sha256_crypt.encrypt(form.password.data) cursor = mysql.connection.cursor() sorgu = "Insert into userss(name,username,password,email) VALUES(%s,%s,%s,%s)" cursor.execute(sorgu, (name, username, password, email)) mysql.connection.commit() cursor.close() flash("You have successfully registered", "success") return redirect(url_for("login")) else: return render_template("join.html", form=form) @app.route("/Login", methods=["GET", "POST"]) def login(): form = Loginform(request.form) if request.method == "POST": username = form.username.data password_enterd = form.password.data cursor = mysql.connection.cursor() sorgu = "Select * From userss where username = %s" result = cursor.execute(sorgu, (username,)) if result > 0: data = cursor.fetchone() real_pass = data["password"] if sha256_crypt.verify(password_enterd, real_pass): flash("You have successfully logged in", "success") session["logged_in"] = True session["username"] = username return redirect(url_for("homepage")) else: flash("password is wrong", "danger") return redirect(url_for("login")) else: flash("There is no such user", "danger") return redirect(url_for("login")) return render_template("login.html", form=form) @app.route("/Logout") def logout(): session.clear() return redirect(url_for("homepage")) @app.route("/addarticle", methods=["GET", "POST"]) def addarticle(): form = Articleform(request.form) if request.method == "POST": title = form.title.data content = form.content.data cursor = mysql.connection.cursor() sorgu = "Insert into article(title,author,content) VALUES(%s,%s,%s) " cursor.execute(sorgu, (title, session["username"], content)) mysql.connection.commit() cursor.close() flash("article successfully added") return redirect(url_for("control_panel")) return render_template("Addarticle.html", form=form) class Articleform(Form): title = StringField("Article title", validators=[validators.Length(min=5, max=10)]) content = TextAreaField("Article content", validators=[validators.Length(min=10, max=30)]) @app.route("/article/<string:id>") def article(id): cursor = mysql.connection.cursor() sorgu = "Select * From article where id = %s" result = cursor.execute(sorgu, (id,)) if result > 0: article = cursor.fetchone() return render_template("article.html", article=article) else: return render_template("article.html") # article delete @app.route("/delete/<string:id>") @login_required def delete(id): cursor = mysql.connection.cursor() sorgu = "Select * from article where author = %s and id =%s" result = cursor.execute(sorgu, (session["username"], id)) if result > 0: sorgu1 = "Delete from article where id = %s" cursor.execute(sorgu1, (id,)) mysql.connection.commit() return redirect(url_for("control_panel")) else: flash("There is no such article or we do not have the authority to process such an article.", "danger") return redirect(url_for("homepage")) # Article update @app.route("/edit/<string:id>",methods=["GET","POST"]) @login_required def update(id): if request.method == "GET": cursor = mysql.connection.cursor() sorgu = "Select * from article where id = %s and author = %s" result = cursor.execute(sorgu,(id,session["username"])) if result == 0 : flash("There is no such article or we do not have the authority to process such an article.", "danger") return redirect(url_for("homepage")) else: article = cursor.fetchone() form = Articleform() form.title.data = article["title"] form.content.data = article["content"] return render_template("update.html",form=form) else: form = Articleform(request.form) newTitle = form.title.data newContent = form.content.data sorgu1 = "Update article Set title = %s,content = %s where id = %s" cursor = mysql.connection.cursor() cursor.execute(sorgu1,(newTitle,newContent,id)) mysql.connection.commit() flash("the article has been successfully updated","success") return redirect(url_for("control_panel")) #search url @app.route("/search",methods = ["GET","POST"]) def search(): if request.method == "GET": return redirect(url_for("homepage")) else: keyword = request.form.get("keyword") cursor = mysql.connection.cursor() sorgu = "Select * from article where title Like '%" + keyword + "%'" result = cursor.execute(sorgu) if result == 0 : flash("There is no such article","warning") return redirect(url_for("services")) else: articles = cursor.fetchall() return render_template("service.html",articles=articles) if __name__ == "__main__": app.run(debug=True)
[ "eiad.is1996@gmail.com" ]
eiad.is1996@gmail.com
aa083f2b87d7e143d77e77fe7d1f62b70edfa115
22b00137790f18cab99eea787e798e46d195e0f8
/djmod/blog/views.py
ddbf636b86359e495577c6400936856d71153734
[]
no_license
lalit-vasoya/djangostartproject
0db70c94b3fd71fcd155b7b18dbf2a22d474c267
8f5509a98fbdd7aef0d8c866cbb497f690804403
refs/heads/master
2020-12-18T19:37:51.419657
2020-02-17T04:56:34
2020-02-17T04:56:34
235,500,451
0
1
null
null
null
null
UTF-8
Python
false
false
948
py
from django.shortcuts import render from django.views.generic.base import TemplateView,TemplateResponseMixin,ContextMixin from django.views.generic import View from django.http import HttpResponse # """Create your views here.""" class AboutusTemplateView(TemplateView): template_name="aboutus.html" extra_context={"titile":"This is about us page"} # def get_context_data(self,*args,**kargs): # context=super(AboutusTemplateView,self).get_context_data(*args,**kargs) # # context["title"]="About Us Page" # return context # def get(self,request,*args,**kargs): # return HttpResponse("asajsdbkl") class ContactusView(TemplateResponseMixin,View,ContextMixin): def get(self,request,*args,**kargs): context=self.get_context_data(**kargs) context["title"]="Some new" return self.render_to_response(context) # return HttpResponse("<h1>Contact us Page</h1>")
[ "lalitvasoya.286@gmail.com" ]
lalitvasoya.286@gmail.com
32d3db86f8e3f1438d2eaf5ad7687d07fa0cad4d
42f9a216b4c11b4f6f2edf6f7fd8a190e8d5096a
/qsbk/qsbk/settings.py
2314b881a98a9e086916a9f5416e098d7886019b
[]
no_license
KnightCicada/PythonArchive
91d6b19cf144f4291a5067f7e9d94a3e1fd3bb36
c172a403be4bdc09595bdfb6972b5124b7e13f4b
refs/heads/master
2023-07-09T09:41:34.088940
2021-02-07T05:53:11
2021-02-07T05:53:11
312,238,171
0
0
null
null
null
null
UTF-8
Python
false
false
3,115
py
# -*- coding: utf-8 -*- # Scrapy settings for qsbk project # # For simplicity, this file contains only settings considered important or # commonly used. You can find more settings consulting the documentation: # # https://docs.scrapy.org/en/latest/topics/settings.html # https://docs.scrapy.org/en/latest/topics/downloader-middleware.html # https://docs.scrapy.org/en/latest/topics/spider-middleware.html BOT_NAME = 'qsbk' SPIDER_MODULES = ['qsbk.spiders'] NEWSPIDER_MODULE = 'qsbk.spiders' # Crawl responsibly by identifying yourself (and your website) on the user-agent #USER_AGENT = 'qsbk (+http://www.yourdomain.com)' # Obey robots.txt rules ROBOTSTXT_OBEY = False # Configure maximum concurrent requests performed by Scrapy (default: 16) #CONCURRENT_REQUESTS = 32 # Configure a delay for requests for the same website (default: 0) # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs DOWNLOAD_DELAY = 1 # The download delay setting will honor only one of: #CONCURRENT_REQUESTS_PER_DOMAIN = 16 #CONCURRENT_REQUESTS_PER_IP = 16 # Disable cookies (enabled by default) #COOKIES_ENABLED = False # Disable Telnet Console (enabled by default) #TELNETCONSOLE_ENABLED = False # Override the default request headers: DEFAULT_REQUEST_HEADERS = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en', 'User-Agent':'Mozilla 4.0 (compatible; MSIE 5.5; Windows NT' } # Enable or disable spider middlewares # See https://docs.scrapy.org/en/latest/topics/spider-middleware.html #SPIDER_MIDDLEWARES = { # 'qsbk.middlewares.QsbkSpiderMiddleware': 543, #} # Enable or disable downloader middlewares # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html #DOWNLOADER_MIDDLEWARES = { # 'qsbk.middlewares.QsbkDownloaderMiddleware': 543, #} # Enable or disable extensions # See https://docs.scrapy.org/en/latest/topics/extensions.html #EXTENSIONS = { # 'scrapy.extensions.telnet.TelnetConsole': None, #} # Configure item pipelines # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html ITEM_PIPELINES = { 'qsbk.pipelines.QsbkPipeline': 300, } # Enable and configure the AutoThrottle extension (disabled by default) # See https://docs.scrapy.org/en/latest/topics/autothrottle.html #AUTOTHROTTLE_ENABLED = True # The initial download delay #AUTOTHROTTLE_START_DELAY = 5 # The maximum download delay to be set in case of high latencies #AUTOTHROTTLE_MAX_DELAY = 60 # The average number of requests Scrapy should be sending in parallel to # each remote server #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 # Enable showing throttling stats for every response received: #AUTOTHROTTLE_DEBUG = False # Enable and configure HTTP caching (disabled by default) # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings #HTTPCACHE_ENABLED = True #HTTPCACHE_EXPIRATION_SECS = 0 #HTTPCACHE_DIR = 'httpcache' #HTTPCACHE_IGNORE_HTTP_CODES = [] #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
[ "872323740@qq.com" ]
872323740@qq.com
af2fb1ee0fc0b73e982605b6ccae6570b518d107
ad614d4aecbdf3a043766e8ec192f722322e0eb7
/Programmers/Level2/BfsDfs/TargetNumber.py
4d70e8ac58321600a5b1f09cfabfead4b8447f04
[]
no_license
Lee-JaeHyuk/AlgorithmStudy
9d2e61537db64e96777643f037e3f518ff06ad18
403db098ddc0980476c7a1de5bd21f3f61d2c0e2
refs/heads/master
2023-07-13T00:18:33.695984
2021-08-07T13:38:03
2021-08-07T13:38:03
338,356,096
0
0
null
null
null
null
UTF-8
Python
false
false
352
py
def solution(numbers, target): answer = [0] for i in numbers: tem = [] for j in answer: tem.append(j+i) tem.append(j-i) answer = tem cnt = 0 for i in answer: if i == target: cnt += 1 return cnt numbers = [1, 1, 1, 1, 1] target = 3 print(solution(numbers,target))
[ "leeseap33@naver.com" ]
leeseap33@naver.com
6488ec0d68991d84511c03b432cbbcd8bf6382a9
9c440ef6cace4095d42cd656e815c2a2193db63f
/eval_set_activation4.py
ee1e7227132715a533e4025ff34fc9f8916f67bc
[]
no_license
maorros/tangram_nih_mindset_3_0_evaluations
41081edd5ce8c14bd4531de78cfa8a66398fa5da
e2fa32310d292bee9f2935f8ff8a67f7d9618bac
refs/heads/master
2020-07-03T16:34:23.388139
2019-08-30T15:41:46
2019-08-30T15:41:46
201,970,239
0
0
null
null
null
null
UTF-8
Python
false
false
8,633
py
# Solve the last puzzle, given you learned nothing/0/1/2/3/4/5/6 from game_facilitator.SelectionGeneratorCuriosity import * from tangrams import * # import tensorflow as tf import numpy as np import math import pickle import random import matplotlib.pyplot as plt import time def json_to_NN(json_str): sol = Solver() task = Task() task.create_from_json(json_str) sol.set_initial_task(task) # dictionary: key = name of node, value = number of node dic = {} for n in range(len(sol.networks[0].nodes)): # print sol.networks[0].nodes[n].name[0] + ' ' + sol.networks[0].nodes[n].name[1] + ' ' + sol.networks[0].nodes[n].name[2] dic[sol.networks[0].nodes[n].name[0] + ' ' + sol.networks[0].nodes[n].name[1] + ' ' + sol.networks[0].nodes[n].name[2]] = n # generate a random tangram with N pieces # task.random_task(sol.networks[0], number_pieces=number_pieces) training_task = task training_input = (np.minimum(task.x, 1)).flatten() # only 0/1 (not 1,2,5) # solve the orignial task using the solution activation = np.zeros_like(sol.networks[0].a) for piece in task.solution: node_num = dic[piece.name[0] + ' ' + piece.name[1] + ' ' + piece.name[2]] activation[node_num] = 1 training_output = activation return training_task, training_input, training_output start_time = time.time() world = 'w1' # the world to be computed CONDITION = 'curious' #'curious' sgc = SelectionGeneratorCuriosity() sgc.load_dif_levels(directory='.', world = world) json_all_pieces = '{"pieces": [["large triangle2", "180", "1 1"], ["large triangle1", "0", "1 1"], ["parrallelogram", "0", "2 0"], ["medium triangle", "0", "3 1"], ["small triangle2", "0", "0 1"], ["small triangle1", "90", "1 0"], ["square", "0", "0 0"]], "size": "5 5"}' task_all_pieces = Task() task_all_pieces.create_from_json(json_all_pieces) worlds = ['w1', 'w2', 'w3', 'w4', 'w5', 'w6', 'w7','w8'] selection_sequence = [] solver_cache = {} sol = Solver() n_runs = 10 seq_lens_act_T = [] lens_arr_act_T = np.zeros((n_runs,7)) seq_lens_act_F = [] lens_arr_act_F = np.zeros((n_runs,7)) # for k in range(10): # for puzzle in range(7): # # solve tangram --> solution (don't actually solve) # print('k:', k, 'puzzle:', puzzle) # task = Task() # task.create_from_json(sgc.paths[1][puzzle]) # sol.set_initial_task(task) # game = 1 # # if game > 0: # # sol.set_activation(out_list[selected]) # sol.run_task(task, duration=300, stop=True) # seq = sol.get_seq_of_moves_v2(task_all_pieces) # print(seq) # print ('game: ', game) # # solver_cache[options[selected][0]] = seq # solver_cache[str(game + 1)] = seq # seq_lens.append(len(seq)) # lens_arr[k, puzzle] = len(seq) with open('curious_y_output_e8_1e5.pkl','rb') as f: # Python 3: open(..., 'rb') [selection_sequence, training_set_input, training_set_output, global_out_list, global_H_list] = pickle.load(f) # test the solution length for each puzzle k with and without the seed after learning puzzle k-1. act = False # for act in [False, True]: for k in range(n_runs): for world in range(1): sgc.load_dif_levels(directory='.', world=worlds[world]) json_all_pieces = '{"pieces": [["large triangle2", "180", "1 1"], ["large triangle1", "0", "1 1"], ["parrallelogram", "0", "2 0"], ["medium triangle", "0", "3 1"], ["small triangle2", "0", "0 1"], ["small triangle1", "90", "1 0"], ["square", "0", "0 0"]], "size": "5 5"}' task_all_pieces = Task() task_all_pieces.create_from_json(json_all_pieces) for puzzle in range(1): # solve tangram --> solution (don't actually solve) print('k:', k, 'puzzle:',puzzle) task = Task() task.create_from_json(sgc.paths[1][6]) training_task, training_input, training_output = json_to_NN(sgc.paths[1][6]) sol.set_initial_task(task) game = 1 # if game > 0: # print('training_output', training_output) if act: # sol.set_activation(training_output) sol.set_activation(global_out_list[7*(puzzle)+6]) sol.run_task(task, duration=300, stop=True) seq = sol.get_seq_of_moves_v2(task_all_pieces) # print(seq) # print ('game: ', game) # solver_cache[options[selected][0]] = seq solver_cache[str(game+1)] = seq if act: seq_lens_act_T.append(len(seq)) lens_arr_act_T[k, puzzle] = len(seq) else: seq_lens_act_F.append(len(seq)) lens_arr_act_F[k, puzzle] = len(seq) act = True # for act in [False, True]: for k in range(n_runs): for world in range(1): sgc.load_dif_levels(directory='.', world=worlds[world]) json_all_pieces = '{"pieces": [["large triangle2", "180", "1 1"], ["large triangle1", "0", "1 1"], ["parrallelogram", "0", "2 0"], ["medium triangle", "0", "3 1"], ["small triangle2", "0", "0 1"], ["small triangle1", "90", "1 0"], ["square", "0", "0 0"]], "size": "5 5"}' task_all_pieces = Task() task_all_pieces.create_from_json(json_all_pieces) for puzzle in range(0,7): # solve tangram --> solution (don't actually solve) print('k:', k, 'puzzle:',puzzle) task = Task() task.create_from_json(sgc.paths[1][6]) training_task, training_input, training_output = json_to_NN(sgc.paths[1][6]) sol.set_initial_task(task) game = 1 # if game > 0: # print('training_output', training_output) if act: # sol.set_activation(training_output) sol.set_activation(global_out_list[7*(puzzle)+6]) sol.run_task(task, duration=300, stop=True) seq = sol.get_seq_of_moves_v2(task_all_pieces) # print(seq) # print ('game: ', game) # solver_cache[options[selected][0]] = seq solver_cache[str(game+1)] = seq if act: seq_lens_act_T.append(len(seq)) lens_arr_act_T[k, puzzle] = len(seq) else: seq_lens_act_F.append(len(seq)) lens_arr_act_F[k, puzzle] = len(seq) print seq_lens_act_T print lens_arr_act_T print seq_lens_act_F print lens_arr_act_F # 'eval_set_activation.pkl' - graph with correct activation (just move the pieces) vs. without activation # 'eval_set_activation2_training.pkl' - solving puzzle k after learning puzzle k-1. save = True if save is True: with open('eval_set_activation4_10_runs_training.pkl', 'wb') as f: pickle.dump([seq_lens_act_T, lens_arr_act_T, seq_lens_act_F, lens_arr_act_F], f, pickle.HIGHEST_PROTOCOL) print("--- %s seconds = %s minutes ---" % ((time.time() - start_time), (time.time() - start_time) / 60.0)) ####################################################################3 import pickle with open('eval_set_activation4_10_runs_training.pkl', 'rb') as f: # Python 3: open(..., 'rb') [seq_lens_act_T, lens_arr_act_T, seq_lens_act_F, lens_arr_act_F] = pickle.load(f) # arr = lens_arr_act_T[:,:] # the first column of F is with no training, and all T is with trainings arr = np.concatenate((lens_arr_act_F[:,0:1], lens_arr_act_T), axis=1) [rows,columns] = arr.shape arr.mean(axis=0) yerr = arr.std(axis=0, ddof=1)/np.sqrt(rows) plt.figure() plt.bar([0,1,2,3,4,5,6,7],arr.mean(axis=0), align='center' ,width=0.3, color = '0.6', edgecolor = 'black', capsize=7, ecolor = 'black', linewidth = 2, label='with activation') plt.errorbar([0,1,2,3,4,5,6,7],arr.mean(axis=0) , color = 'orange', capsize=7, yerr=yerr, ecolor = 'black', elinewidth = 2, linewidth = 0) plt.xticks(np.arange(8), ('None', '1', '2', '3', '4','5','6','7')) # # arr = lens_arr_act_F[:,:] # [rows,columns] = arr.shape # arr.mean(axis=0) # yerr = arr.std(axis=0, ddof=1)/np.sqrt(rows) # # plt.bar([1+0.3,2+0.3,3+0.3,4+0.3,5+0.3,6+0.3,7+0.3],arr.mean(axis=0), align='center' ,width=0.3, color = 'blue', edgecolor = 'black', capsize=7, ecolor = 'black', linewidth = 2, label='no activation') # plt.errorbar([1+0.3,2+0.3,3+0.3,4+0.3,5+0.3,6+0.3,7+0.3],arr.mean(axis=0) , color = 'orange', capsize=7, yerr=yerr, ecolor = 'black', elinewidth = 2, linewidth = 0) # plt.legend() plt.xlabel('Learned Puzzle') plt.ylabel('Moves to Solution') # plt.title('Solving last puzzle in curriculum') # plt.title('Solution Length on Curriculum Series') # sgc.paths[1][puzzle] plt.show()
[ "maorros@gmail.com" ]
maorros@gmail.com
486b34fc9cd4c2fc72b6544ecaafc0167574542a
8af6c536d8f1ed2740a5420340d923d978429c9f
/NoSQL_requests.py
c241cc87ad0112de110776dae280eb951ccc184e
[]
no_license
matthieudeReynal/NoSQL-a4
b17e10a1f50316a1cfdef35dd1d56c6f5a82c1bb
10afd309e51ff7b3ee4ccab69109f489a656fd8c
refs/heads/main
2023-02-01T21:07:51.322221
2020-12-19T10:46:50
2020-12-19T10:46:50
322,823,961
0
0
null
null
null
null
UTF-8
Python
false
false
2,506
py
# -*- coding: utf-8 -*- """ Created on Tue Mar 17 10:21:54 2020 @author: mattd """ import pymongo from bson.raw_bson import RawBSONDocument import time def getLowerPrice(): # print lower price of bitcoin registered in database myclient = pymongo.MongoClient("mongodb://localhost:27017/",document_class=RawBSONDocument).samples mydb = myclient["CoinBase"] mycol = mydb["Candles"] myquery = mydb.mycol.find().sort([("low",pymongo.DESCENDING)]) print(myquery.limit(1)[0]["low"]) #getLowerPrice() def getOpenPriceOver6000(): #print ids list of candles with open price over 6000$ myclient = pymongo.MongoClient("mongodb://localhost:27017/",document_class=RawBSONDocument).samples mydb = myclient["CoinBase"] mycol = mydb["Candles"] myquery = mydb.mycol.find({"open" : {"$gt" : "6000"}}) for i in range (50): print(myquery[i]["_id"]) #getOpenPriceOver6000() def getCandlesOnLastHour(): # print ids list of candles on last hour myclient = pymongo.MongoClient("mongodb://localhost:27017/",document_class=RawBSONDocument).samples mydb = myclient["CoinBase"] mycol = mydb["Candles"] now = int(time.time()) oneHour = 60*60 myquery = mydb.mycol.find({"time" : {"$gt" : ('"'+str(now - oneHour)+'"')}}) for i in range (20): print(myquery[i]["_id"]) print(i) NB_MINUTES_POLLING = 2 # we do the polling on 2 minutes but it can be changed def menu(): print("1 : get lower bitcoin price registered in database") print("2: get ids of candles with open price over 6000$") print("3: get candles on last 15 minutes") choix = int(input("your choice : ")) if(choix == 1): i = 0 while i < NB_MINUTES_POLLING: if (int(time.time())%60 == 0): getLowerPrice() i += 1 if(choix == 2): i = 0 while i < NB_MINUTES_POLLING: if (int(time.time())%60 == 0): getOpenPriceOver6000() i += 1 if(choix == 3): i = 0 while i < NB_MINUTES_POLLING: if (int(time.time())%60 == 0): getCandlesOnLastHour() i += 1 menu()
[ "noreply@github.com" ]
matthieudeReynal.noreply@github.com
3773c11f5b4cd6c95d13dd9f3af801c105abbc49
eedc0b4647cc66bffa06a81c60cee15ceebbfe78
/base.py
249e23d17217031f25090f4657c92ebdc4ff265e
[]
no_license
ISA233/CodingTest
0eec362e9f888a945f36968b4bec5d89f956d527
6739ce1463db09903ec05663eeeca1ace02c4ade
refs/heads/main
2023-06-04T06:23:03.348943
2021-06-18T15:51:35
2021-06-18T15:51:35
378,198,182
0
0
null
null
null
null
UTF-8
Python
false
false
1,002
py
from torch import nn class ResBlock(nn.Module): def __init__(self, channels): super(ResBlock, self).__init__() self.conv0 = nn.Conv2d(channels, channels, kernel_size=3, padding=1) self.conv1 = nn.Conv2d(channels, channels, kernel_size=3, padding=1) self.bn0 = nn.BatchNorm2d(channels) self.bn1 = nn.BatchNorm2d(channels) # self.bn0 = nn.GroupNorm(8, channels) # self.bn1 = nn.GroupNorm(8, channels) self.act = nn.LeakyReLU() def forward(self, x): c = x x = self.bn0(x) x = self.act(x) x = self.conv0(x) x = self.bn1(x) x = self.act(x) x = self.conv1(x) x = x + c return x class ConvBlock(nn.Module): def __init__(self, in_channels, out_channels, kernel_size, padding): super(ConvBlock, self).__init__() self.conv0 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=padding) self.bn = nn.BatchNorm2d(out_channels) self.act = nn.LeakyReLU() def forward(self, x): x = self.conv0(x) x = self.bn(x) x = self.act(x) return x
[ "ys-zhang18@mails.tsinghua.edu.cn" ]
ys-zhang18@mails.tsinghua.edu.cn
2819e14510bfbf5b458c01e088afae2508b301bd
9bcc73f037edb3a1a19c21f1b2fe1f3c543fb718
/famille/utils/mail.py
fa93b5b2145082bfb4442a6f8104cc1a0135081e
[ "Apache-2.0" ]
permissive
huguesmayolle/famille
c8bee2aeaae9e8107a9c3d76573cf6c280cdecab
c7b3399e88a6922cadc0c7c9f2ff7447e7c95377
refs/heads/master
2021-01-09T06:28:21.078260
2014-06-24T21:32:00
2014-06-24T21:32:00
14,694,562
0
0
null
null
null
null
UTF-8
Python
false
false
2,364
py
# -*- coding=utf-8 -*- import base64 from functools import partial import logging import json import smtplib from django.conf import settings from django.core import mail from django.core.exceptions import ObjectDoesNotExist from django.template.loader import render_to_string class Mailer(object): @classmethod def send_mail_from_template(cls, template_name, context, **kwargs): """ Send an email given a template and a context. The other keyword arguments will be passed through send_mail method. :param template_name: the name of the template to render :param context: the context to render the template """ kwargs["body"] = render_to_string(template_name, context) msg = mail.EmailMessage(**kwargs) msg.content_subtype = "html" try: return msg.send() except smtplib.SMTPException as e: return cls.on_failure(e) @classmethod def on_failure(cls, exc): """ A callback when failure occurs. """ logging.error("An error occured while sending an email: %s", exc) send_mail_from_template = Mailer.send_mail_from_template send_mail_from_template_with_contact = partial(send_mail_from_template, from_email=settings.CONTACT_EMAIL) send_mail_from_template_with_noreply = partial(send_mail_from_template, from_email=settings.NOREPLY_EMAIL) def email_moderation(message): """ Only allow email for premium users. :param message: the message to be sent """ from famille.models import get_user_related try: sender = get_user_related(message.sender) except (ObjectDoesNotExist, AttributeError): return (False, u"Votre compte ne vous permet pas d'envoyer d'email.") if not sender.is_premium: return (False, u"Vous devez avoir un compte premium pour accéder à cette fonctionnalité.") return True def decode_recipient_list(data): """ Decode a list of recipients. """ data = data.split("---") return [json.loads(base64.urlsafe_b64decode(str(r))) for r in data] def encode_recipient(recipient): """ Encode a recipient using b64. """ data = { "type": recipient.__class__.__name__, "pk": recipient.pk } return base64.urlsafe_b64encode(json.dumps(data))
[ "mvergerdelbove@work4labs.com" ]
mvergerdelbove@work4labs.com
1634818226a0fc30047e34f01594960beed070a1
12f006a0e5d75ef2349d4ae519c1c9cac5309761
/Solution_280.py
11cd7505bdde8e3b44cd345e61a86d2600eb646c
[]
no_license
TimothySjiang/leetcodepy
c613db16282eade713e01b7d641c0f5b341ec84b
ef64e46b8833a684b8b0355ce576b767a0e03596
refs/heads/master
2020-07-01T14:48:35.953841
2020-01-12T06:19:44
2020-01-12T06:19:44
201,199,810
1
0
null
null
null
null
UTF-8
Python
false
false
452
py
class Solution: def wiggleSort(self, nums: List[int]) -> None: """ Do not return anything, modify nums in-place instead. """ if not nums: return for i in range(1, len(nums)): if i % 2: should_swap = nums[i] < nums[i - 1] else: should_swap = nums[i] > nums[i - 1] if should_swap: nums[i], nums[i - 1] = nums[i - 1], nums[i]
[ "shjiang@ucdavis.edu" ]
shjiang@ucdavis.edu
fbef87490e98fd84855f2e7a1d8f299aa9bcb831
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
/cases/synthetic/exp-big-1067.py
238bc82745159e6f8c60993d672d44248420347b
[]
no_license
Virtlink/ccbench-chocopy
c3f7f6af6349aff6503196f727ef89f210a1eac8
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
refs/heads/main
2023-04-07T15:07:12.464038
2022-02-03T15:42:39
2022-02-03T15:42:39
451,969,776
0
0
null
null
null
null
UTF-8
Python
false
false
3,178
py
# Compute x**y def exp(x: int, y: int) -> int: a: int = 0 a2: int = 0 a3: int = 0 a4: int = 0 a5: int = 0 def f(i: int) -> int: nonlocal a nonlocal a2 nonlocal a3 nonlocal a4 nonlocal a5 def geta() -> int: return a if i <= 0: return geta() else: a = a * x a2 = a * x a3 = a * x a4 = a * x a5 = a * x return f(i-1) a = 1 a2 = 1 a3 = 1 a4 = 1 a5 = 1 return f(y) def exp2(x: int, y: int, x2: int, y2: int) -> int: a: int = 0 a2: int = 0 a3: int = 0 a4: int = 0 a5: int = 0 def f(i: int) -> int: nonlocal a nonlocal a2 nonlocal a3 nonlocal a4 nonlocal a5 def geta() -> int: return a if i <= 0: return geta() else: a = a * x a2 = a * x a3 = a * x a4 = a * x a5 = a * x return f(i-1) a = 1 a2 = 1 a3 = 1 a4 = 1 a5 = 1 return f(y) def exp3(x: int, y: int, x2: int, y2: int, x3: int, y3: int) -> int: a: int = 0 a2: int = 0 a3: int = 0 a4: int = 0 a5: int = 0 def f(i: int) -> int: nonlocal a nonlocal a2 nonlocal a3 nonlocal a4 nonlocal a5 def geta() -> int: return a if i <= 0: return geta() else: a = a * x a2 = a * x a3 = a * x a4 = a * x a5 = a * x return f(i-1) a = 1 a2 = 1 a3 = 1 a4 = 1 a5 = 1 return f(y) def exp4(x: int, y: int, x2: int, y2: int, x3: int, y3: int, x4: int, y4: int) -> int: a: int = 0 a2: int = 0 a3: int = 0 a4: int = 0 a5: int = 0 def f(i: int) -> int: nonlocal a nonlocal a2 nonlocal a3 nonlocal a4 nonlocal a5 def geta() -> int: return a if i <= 0: return geta() else: a = a * x a2 = a * x a3 = a * x a4 = a * x a5 = a * x return f(i-1) a = 1 a2 = 1 a3 = 1 a4 = 1 a5 = 1 return f(y) def exp5(x: int, y: int, x2: int, $ID: int, x3: int, y3: int, x4: int, y4: int, x5: int, y5: int) -> int: a: int = 0 a2: int = 0 a3: int = 0 a4: int = 0 a5: int = 0 def f(i: int) -> int: nonlocal a nonlocal a2 nonlocal a3 nonlocal a4 nonlocal a5 def geta() -> int: return a if i <= 0: return geta() else: a = a * x a2 = a * x a3 = a * x a4 = a * x a5 = a * x return f(i-1) a = 1 a2 = 1 a3 = 1 a4 = 1 a5 = 1 return f(y) # Input parameter n:int = 42 n2:int = 42 n3:int = 42 n4:int = 42 n5:int = 42 # Run [0, n] i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 i5:int = 0 # Crunch while i <= n: print(exp(2, i % 31)) i = i + 1
[ "647530+Virtlink@users.noreply.github.com" ]
647530+Virtlink@users.noreply.github.com
99c4f27b85cb176a47e8428516109c869629a4c6
e0f455b5ea82a1dcf5e5f58e44eba441836617a2
/desk/flask/bin/easy_install-3.4
2465315ecb8dfc3b370c2ed1530732ebb4dd6441
[ "MIT" ]
permissive
PlexHaxx/plex-desk
9d3580ada89112e909fe723ee623bfc115b961fc
551abb1a655f7ba43dbe26da834ee91dc156333d
refs/heads/master
2021-01-18T04:46:55.411509
2015-09-06T13:24:53
2015-09-06T13:24:53
null
0
0
null
null
null
null
UTF-8
Python
false
false
257
4
#!/home/jj/Code/plex-desk/flask/bin/python # -*- coding: utf-8 -*- import re import sys from setuptools.command.easy_install import main if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(main())
[ "josh.jacobs19@gmail.com" ]
josh.jacobs19@gmail.com
aa711919ff1e2e28fbd96758ef77e76aa0b5e23a
801f367bd19b8f2ab08669fd0a85aad7ace961ac
/cleaned_version/main.py
3ac5d8be014a8e25b1dac6beacd132991a977681
[ "MIT" ]
permissive
Wendong-Huo/thesis-bodies
d91b694a6b1b6a911476573ed1ed27eb27fb000d
dceb8a36efd2cefc611f6749a52b56b9d3572f7a
refs/heads/main
2023-04-17T18:32:38.541537
2021-03-12T19:53:23
2021-03-12T19:53:23
623,471,326
1
0
null
2023-04-04T12:45:48
2023-04-04T12:45:47
null
UTF-8
Python
false
false
401
py
# Author: Sida Liu, 2020 # Starting experiments # # for all available arguments, refer to arguments.py from arguments import get_args import step1_generate_bodies import step2_train_on_one_body args = get_args() def main(): env_id, dataset_path = step1_generate_bodies.generate_bodies() step2_train_on_one_body.train(env_id, dataset_path) pass if __name__ == "__main__": main()
[ "sliu1@uvm.edu" ]
sliu1@uvm.edu
8e77312e78be660aea422bc6c904543e36801828
1da59a5838b2f954194b4cb1926a42ec472bed38
/Math modeling/lab3.py
4c43e1f656df7a534b7e7b56685d563207fb0611
[]
no_license
MaximShidlovski23/Math
3534f96c33764cbd94106246fdf02ac49d48a4a1
9aeed2b1d6ab178874e1a8bccd89c0dd802ee693
refs/heads/master
2023-06-07T04:33:08.431481
2021-06-22T12:23:03
2021-06-22T12:23:03
null
0
0
null
null
null
null
UTF-8
Python
false
false
10,240
py
from bs4 import BeautifulSoup import numpy as np def get_marking(file): f = open(file, 'r') html = f.read() soup = BeautifulSoup(html, 'html.parser') matrix = [] for lol in soup.find_all('td'): matrix.append(lol.next_element) del matrix[0:4] del matrix[3::4] matrix_marking = [] matrix_transition = [] for elem in matrix: if not elem.isdigit(): if elem[0] == '[': elem = elem.replace('[', '') elem = elem.replace(']', '') matrix_marking.append(elem.replace(' ', '')) else: matrix_transition.append(elem) return matrix_marking, matrix_transition def create_diagram(matrix_marking, matrix_transition): matrix_diagram = [] last_elem = matrix_marking[-1] del matrix_marking[1::2] del matrix_transition[1::2] matrix_transition_sym = [] for elem in matrix_transition: matrix_transition_sym.append(elem.replace(' on', '')) matrix_marking.append(last_elem) count = 1 matrix_diagram.append((count, matrix_marking[0])) for i in range(1, len(matrix_marking)): if matrix_marking[i] == matrix_marking[0]: count = 1 else: count += 1 matrix_diagram.append((count, matrix_marking[i])) sort_matrix_diagram = list(set(matrix_diagram)) sort_matrix_diagram = sorted(sort_matrix_diagram, key=lambda x: x[0]) max_count = max(matrix_diagram, key=lambda x: x[0])[0] print("Диаграмма маркировок: ") max_mat = list(range(1, max_count + 1)) matrix_count = [] for i in max_mat: count = 0 for elem in sort_matrix_diagram: if elem[0] == i: count += 1 matrix_count.append(count) tree = [] for elem in sort_matrix_diagram: tree.append(elem[1]) print(sort_matrix_diagram[0][0], ': ', 10 * ' ', tree[0]) count = 0 check = -1 for i in matrix_count: count += 1 if count > 1: print(sort_matrix_diagram[check + i][0], ': ', tree[check + 1:i + check + 1]) check += i else: check += 1 print('Все пути диаграммы маркировок:') prev_elem = 0 count = 0 for elem in matrix_diagram: if elem[0] == 1: print(elem[1]) continue if elem[0] > prev_elem: print(round(len(elem[1]) / 2) * ' ', '|') print(round(len(elem[1]) / 2) * ' ', matrix_transition_sym[count]) print(round(len(elem[1]) / 2) * ' ', '|') print(elem[1]) prev_elem += 1 count += 1 else: print() count += 1 print(matrix_diagram[0][1]) print(round(len(elem[1]) / 2) * ' ', '|') print(round(len(elem[1]) / 2) * ' ', matrix_transition_sym[count]) print(round(len(elem[1]) / 2) * ' ', '|') print(elem[1]) count += 1 prev_elem = elem[0] return tree, matrix_diagram # k-ограниченность def k_boundedness(tree): num_tree = [] for elem in tree: num_tree.append(list(elem)) sum_mas = [] max_elem = 0 for mas in tree: sum_m = 0 for i in mas: max_elem = max(max_elem, int(i)) sum_m += int(i) sum_mas.append(sum_m) return max_elem, sum_mas, num_tree def safety(max_elem): if max_elem == 1: print('сеть Петри является безопасной') else: print('сеть Петри не является безопасной') def boundedness(sum_mas, max_elem): rise = 0 not_rise = 0 for i in range(len(sum_mas) - 1): if sum_mas[i] < sum_mas[i + 1]: rise += 1 else: not_rise += 1 if rise < not_rise: print('сеть Петри ограничена') print('сеть Петри является', max_elem, '- ограниченая') else: print('сеть Петри неограничена') def conservative_and_stability(num_tree, matrix_diagram, sum_mas): tree_all = [] for elem in matrix_diagram: tree_all.append(elem[1]) num_tree_all = [] for elem in tree_all: num_tree_all.append(list(elem)) num_tree_all_int = [] for i in range(len(num_tree_all)): check_mas = [] for j in range(len(num_tree_all[i])): check_mas.append(int(num_tree_all[i][j])) num_tree_all_int.append(check_mas) num_tree_all_bool = [] for i in range(len(num_tree_all_int)): check_mas = [] for j in range(len(num_tree_all_int[i])): if num_tree_all_int[i][j] == 0: check_mas.append(0) else: check_mas.append(1) num_tree_all_bool.append(check_mas) check_conservative = 0 check_stability = 0 for i in range(len(num_tree_all_bool) - 1): for j in range(i + 1, len(num_tree_all_bool)): if num_tree_all_bool[i] == num_tree_all_bool[j]: if sum(num_tree_all_int[i]) <= sum(num_tree_all_int[j]): check_conservative += 1 else: check_conservative += -10000 if j + 1 != len(num_tree_all_bool): if num_tree_all_bool[i + 1] != num_tree_all_bool[j + 1]: check_stability += 1 if check_conservative > 0: print('сеть Петри является консервативной') if len(set(sum_mas)) == 1: print('сеть Петри является 1-консервативная') else: print('сеть Петри не является консервативной') if check_stability == 0: print('сеть Петри является устойчивой') else: print('сеть Петри не является устойчивой') # mas_pos = np.zeros(len(num_tree_all_bool[1])) def free_choice_net_and_marked_graph(D_input): mas_check = [] for j in range(len(D_input[0])): check = 0 for i in range(len(D_input)): check += D_input[i][j] mas_check.append(check) check = 0 for elem in mas_check: if elem > 1: print('сеть Петри является сетью свободного выбора') print('сеть Петри не является маркированным графом') print('сеть Петри не является бесконфликтной сетью') break else: check += 1 if check == len(mas_check): print('сеть Петри не является сетью свободного выбора') print('сеть Петри является маркированным графом') print('сеть Петри является бесконфликтной сетью') def automatic_net(D_input, D_output): mas_check_input = [] mas_check_output = [] for i in range(len(D_input)): mas_check_input.append(sum(D_input[i])) mas_check_output.append(sum(D_output[i])) if max(mas_check_output) > 1 or max(mas_check_input) > 1: print('сеть Петри является не автоматной') else: print('сеть Петри является автоматной') # задача достижимости def task_reachabillity(tree, marker): if marker in tree: print('Достижение', marker, ' возможно') else: print('Достижение', marker, ' невозможно') def main(): path = 'D:\\Matlab\\toolbox\\petrinet2.4\\newnet\\' print('Схема №1') matrix_marking, matrix_transition = get_marking(path + 'Log1.html') tree, matrix_diagram = create_diagram(matrix_marking, matrix_transition) D_output_1 = np.array([[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 1, 0], [0, 0, 0, 0, 1]]) D_input_1 = np.array([[0, 0, 0, 1, 0], [0, 0, 1, 0, 0], [1, 1, 0, 0, 0], [0, 0, 0, 1, 0]]) D_1 = D_output_1 - D_input_1 print('Характеристики по динамическим ограничениям:') max_elem, sum_mas, num_tree = k_boundedness(tree) boundedness(sum_mas, max_elem) safety(max_elem) conservative_and_stability(num_tree, matrix_diagram, sum_mas) print('Матрица инцидентности: ') print(D_1) print('Характеристики по статистическим ограничениям:') free_choice_net_and_marked_graph(D_input_1) automatic_net(D_input_1, D_output_1) task_reachabillity(tree, '11000') task_reachabillity(tree, '10001') print('Схема №2') matrix_marking, matrix_transition = get_marking(path + 'Log2.html') tree, matrix_diagram = create_diagram(matrix_marking, matrix_transition) print('Характеристики по динамическим ограничениям:') max_elem, sum_mas, num_tree = k_boundedness(tree) boundedness(sum_mas, max_elem) safety(max_elem) conservative_and_stability(num_tree, matrix_diagram, sum_mas) D_input_2 = np.array([[1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0], [0, 0, 0, 1, 1, 1]]) D_output_2 = np.array([[0, 1, 1, 0, 0, 1], [0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 1, 0], [1, 0, 0, 0, 0, 0]]) D_2 = D_output_2 - D_input_2 print('Матрица инцидентности: ') print(D_2) print('Характеристики по статистическим ограничениям:') free_choice_net_and_marked_graph(D_input_2) automatic_net(D_input_2, D_output_2) task_reachabillity(tree, '010012') task_reachabillity(tree, '110002') if __name__ == '__main__': main()
[ "m.shidlovski23@gmail.com" ]
m.shidlovski23@gmail.com
d244a2d68e725fc0ebbc61b04aa4c65c4fa4abcb
36c2e50fe8090d27c6f87671151c34eb6e5174ea
/Glue_Jobs_python_shell/glue_import_order_payments.py
e973150fc8e16d60dcaedd90a23b467ab066b2f1
[]
no_license
denzalvin/Data_Engineer_Project_ETL_BI
809a5a7bd2e5958c22fe83ec141092cb076f0454
69a754f8332a58f98e278c0fcf9a8defd812fab5
refs/heads/main
2023-03-20T20:54:23.773041
2021-01-26T12:25:14
2021-01-26T12:25:14
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,099
py
import boto3,json from pg import DB secret_name = 'secret-name' region_name ='ap-southeast-2' session = boto3.session.Session() client = session.client(service_name='secretsmanager',region_name=region_name) get_secret_value_response = client.get_secret_value(SecretId=secret_name) creds = json.loads(get_secret_value_response['SecretString']) username = creds['username'] password = creds['password'] host = creds['host'] db = DB(dbname='dev',host=host,port=5439,user=username,passwd=password) merge_qry = """ begin ; copy mysql_dwh_staging.order_payment from 's3://bucket_name/order_payments/current/order_payments.csv' iam_role 'YOUR_ARN' CSV QUOTE '\"' DELIMITER ',' acceptinvchars; delete from mysql_dwh.order_payments using mysql_dwh_staging.order_payments where mysql_dwh.order_payments.order_id = mysql_dwh_staging.order_payments.order_id ; insert into mysql_dwh.order_payments select * from mysql_dwh_staging.order_payments; truncate table mysql_dwh_staging.order_payments; end ; """ result = db.query(merge_qry) print(result)
[ "noreply@github.com" ]
denzalvin.noreply@github.com
bbe121219182e529e2cb49702216e2f7f779882d
2fef9dac4fc88a5dad089ce1df78921eb50735dc
/geodata/Test.py
9d4aed708a29bee24f235426c64e43f06a248df6
[ "BSD-2-Clause" ]
permissive
corb555/Geodata
b8b217e4814c6759a64c7c738069aabfdfd50fd4
746e532dfc4536e19ef8afa715fc728150497b8e
refs/heads/master
2023-04-29T23:33:44.561925
2023-04-20T00:30:29
2023-04-20T00:30:29
231,111,325
1
1
BSD-2-Clause
2020-03-25T04:25:20
2019-12-31T15:36:05
Python
UTF-8
Python
false
false
5,100
py
# Temporary python file for testing things import queue from geodata import GeoUtil, Normalize places = [ '12 baker st, Man!@#%^&(chester, , England', # punctuation 'department kathedrale of westphalia kommune ', # normandie 'archipel de saint augustin', 'Le Mont Saint Michel', # county of ] noise_words = [ # apply this list of regex substitutions for match scoring # (r'), '), '),'), (r'normandy american ' , 'normandie american '), (r'nouveau brunswick' , ' '), (r'westphalia' , 'westfalen'), (r'departement' , 'department'), (r'royal borough of windsor and maidenhead', 'berkshire'), (r'regional municipality' , 'county'), (r'kathedrale' , 'cathedral'), (r'citta metropolitana di ' , ' '), (r'kommune' , ''), (r"politischer bezirk " , ' '), (r'regional' , ' '), (r'region' , ' '), (r'abbey' , 'abbey'), (r'priory' , 'abbey'), (r'greater' , ' '), (r' de ' , ' '), (r' di ' , ' '), (r' du ' , ' '), (r' of ' , ' '), (r"l'" , ''), (r'erry' , 'ury'), (r'ery' , 'ury'), (r'borg' , 'burg'), (r'bourg' , 'burg'), (r'urgh' , 'urg'), (r'mound' , 'mund'), (r'ourne' , 'orn'), (r'ney' , 'ny'), ] phrase_cleanup = [ # always apply this list of regex substitutions (r' +' , ' '), # Strip multiple space to single space (r'\bmt ' , 'mount '), (r'\br\.k\. |\br k ' , 'roman catholic '), (r'\brooms katholieke\b' , 'roman catholic'), (r'sveti |saints |sainte |sint |saint |sankt |st\. ', 'st '), # Normalize Saint to St (r' co\.' , ' county'), # Normalize County (r'united states of america' , 'usa'), # Normalize to USA begraafplaats (r'united states' , 'usa'), # Normalize to USA (r'cimetiere' , 'cemetery'), # (r'begraafplaats' , 'cemetery'), # (r'town of ' , ' '), # - remove town of (r'city of ' , ' '), # - remove city of (r'county of ([^,]+)' , r'\g<1> county'), # Normalize 'Township of X' to 'X Township' (r'township of ([^,]+)' , r'\g<1> township'), # Normalize 'Township of X' to 'X Township' (r'cathedral of ([^,]+)' , r'\g<1> cathedral'), # Normalize 'Township of X' to 'X Township' (r'palace of ([^,]+)' , r'\g<1> palace'), # Normalize 'Township of X' to 'X Township' (r'castle of ([^,]+)' , r'\g<1> castle'), # Normalize 'Township of X' to 'X Township' (r"'(\w{2,})'" , r"\g<1>"), # remove single quotes around word, but leave apostrophes ] no_punc_remove_commas = [ # Regex to remove most punctuation including commas (r"[^a-z0-9 $*']+", " ") ] no_punc_keep_commas = [ # Regex to remove most punctuation but keep commas (r"[^a-z0-9 $*,']+" , " ") ] """ r"[^a-z0-9 $*,']+" , " " """ # Regex to remove most punctuation including commas # noise_rgx - Combine phrase dictionary with Noise words dictionary and compile regex (this is used for match scoring) # keys = sorted(dct.keys(), key=len, reverse=True) substitutions = queue.PriorityQueue() for val, idx in ([('blueberry',2), ('apple',1), ('cherry',3)]): substitutions.put((idx, val)) while not substitutions.empty(): idx, val = substitutions.get() print(f'idx={idx} val={val}') """ phrase_rgx_keep_commas = GeoUtil.RegexList(no_punc_keep_commas + phrase_cleanup) phrase_rgx_remove_commas = GeoUtil.RegexList(no_punc_remove_commas + phrase_cleanup) #noise_rgx = GeoUtil.MultiRegex(phrase_cleanup + noise_words) norm = Normalize.Normalize() for txt in places: print(f'==== {txt} ====') print(f' RESULT {phrase_rgx_remove_commas.sub(txt, lower=True, set_ascii=True)}') print(f' RESULT2 {norm.normalize(txt, False)}') """
[ "corb@aol.com" ]
corb@aol.com
5c01afe75fb5df16f185b2fbf7c8336f2ab97715
1fc45a47f0e540941c87b04616f3b4019da9f9a0
/src/sentry/receivers/similarity.py
c8e1b5ebcddf7e9fa97da46bdb8689f6ff2dfff7
[ "BSD-2-Clause" ]
permissive
seukjung/sentry-8.15.0
febc11864a74a68ddb97b146cc1d2438ef019241
fd3cab65c64fcbc32817885fa44df65534844793
refs/heads/master
2022-10-28T06:39:17.063333
2018-01-17T12:31:55
2018-01-17T12:31:55
117,833,103
0
0
BSD-3-Clause
2022-10-05T18:09:54
2018-01-17T12:28:13
Python
UTF-8
Python
false
false
399
py
from __future__ import absolute_import from sentry import features as feature_flags from sentry.signals import event_processed from sentry.similarity import features as similarity_features @event_processed.connect(weak=False) def record(project, group, event, **kwargs): if not feature_flags.has('projects:similarity-indexing', project): return similarity_features.record(event)
[ "jeyce@github.com" ]
jeyce@github.com
c93c759d8271fb0805fb686c85dccd205e492b8b
43fe239253a88c8b9c667a762105688ef37b91e9
/pies/_utils.py
078318357b9b48a81908b5dcc8aba92e4a4ad161
[ "MIT" ]
permissive
timothycrosley/deprecated.pies
d1eb28bdd23bc5d512b8cd650033526760bfdea5
83405a8e45904e9b16b2681c15419735e654a0a6
refs/heads/develop
2021-01-08T11:29:51.666680
2016-03-27T05:17:39
2016-03-27T05:17:39
12,525,405
20
2
MIT
2019-02-24T10:48:34
2013-09-01T19:13:10
Python
UTF-8
Python
false
false
3,346
py
"""pies/_utils.py. Utils internal to the pies library and not meant for direct external usage. Copyright (C) 2013 Timothy Edmund Crosley Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import abc import sys def with_metaclass(meta, *bases): """Enables use of meta classes across Python Versions. taken from jinja2/_compat.py. Use it like this:: class BaseForm(object): pass class FormType(type): pass class Form(with_metaclass(FormType, BaseForm)): pass """ class metaclass(meta): __call__ = type.__call__ __init__ = type.__init__ def __new__(cls, name, this_bases, d): if this_bases is None: return type.__new__(cls, name, (), d) return meta(name, bases, d) return metaclass('temporary_class', None, {}) def unmodified_isinstance(*bases): """When called in the form MyOverrideClass(unmodified_isinstance(BuiltInClass)) it allows calls against passed in built in instances to pass even if there not a subclass """ class UnmodifiedIsInstance(type): if sys.version_info[0] == 2 and sys.version_info[1] <= 6: @classmethod def __instancecheck__(cls, instance): if cls.__name__ in (str(base.__name__) for base in bases): return isinstance(instance, bases) subclass = getattr(instance, '__class__', None) subtype = type(instance) instance_type = getattr(abc, '_InstanceType', None) if not instance_type: class test_object: pass instance_type = type(test_object) if subtype is instance_type: subtype = subclass if subtype is subclass or subclass is None: return cls.__subclasscheck__(subtype) return (cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)) else: @classmethod def __instancecheck__(cls, instance): if cls.__name__ in (str(base.__name__) for base in bases): return isinstance(instance, bases) return type.__instancecheck__(cls, instance) return with_metaclass(UnmodifiedIsInstance, *bases)
[ "timothy.crosley@gmail.com" ]
timothy.crosley@gmail.com
c1f4a18bd49436f047646cd987895a2c8e36c574
0d3abea172f0769587be566fb913cb042acd68dd
/gae/cartosql.py
1a05a2356a530e1e138b98f6c331cc319df0b668
[ "LicenseRef-scancode-warranty-disclaimer", "Apache-2.0" ]
permissive
GlobalFishingWatch/pelagos-cartodb-proxy
69d29afe855f05feaabefcf57ce01b986be15097
c5f88fc59db648b434d2b82836100cdd6a43f2eb
refs/heads/master
2020-05-21T10:10:52.241949
2017-01-30T17:58:51
2017-01-30T17:58:51
68,815,087
0
0
null
2017-01-30T17:58:52
2016-09-21T12:35:18
Python
UTF-8
Python
false
false
1,212
py
import json import urllib import urllib2 import operator import re import google.appengine.api.urlfetch google.appengine.api.urlfetch.set_default_fetch_deadline(60) def load_url(*arg, **kw): try: return urllib2.urlopen(*arg, **kw) except urllib2.HTTPError as e: e.msg = e.read() raise e def get_sql_url(layer): sql_url = layer["options"]["sql_api_template"].replace("{user}", layer["options"]["user_name"]) + layer["options"]["sql_api_endpoint"] return sql_url def get_sql_args(args): return "&".join(["%s=%s" % (name, urllib.quote(unicode(value))) for name, value in args.iteritems()]) def exec_sql(layer, **kw): args = dict( page=0, sort_order="asc", order_by="", filter_column="", filter_value="", sql_source="null" ) args.update(kw) if 'q' in args: args['q'] = re.sub(" +", " ", args['q']) try: return json.load( load_url( get_sql_url(layer), data=get_sql_args(args))) except urllib2.HTTPError as e: if "q" in kw: e.msg = "%s while executing %s" % (e.msg, kw["q"]) raise e
[ "egil.moller@freecode.no" ]
egil.moller@freecode.no
7a102aac00465f26707d05ca7c2dc555a824a90d
53c4ec58760768fc9073793cf17cd8c55978c3af
/annotator/uniformer/configs/_base_/models/deeplabv3_r50-d8.py
d7a43bee01422ad4795dd27874e0cd4bb6cbfecf
[ "Apache-2.0" ]
permissive
HighCWu/ControlLoRA
0b6cab829134ed8377f22800b0e1d648ddf573b0
3b8481950867f61b2cf072b1f156d84f3363ac20
refs/heads/main
2023-08-05T08:51:25.864774
2023-02-28T13:06:24
2023-02-28T13:06:24
603,359,062
421
20
Apache-2.0
2023-08-02T02:14:40
2023-02-18T09:12:15
Python
UTF-8
Python
false
false
1,273
py
# model settings norm_cfg = dict(type='SyncBN', requires_grad=True) model = dict( type='EncoderDecoder', pretrained='open-mmlab://resnet50_v1c', backbone=dict( type='ResNetV1c', depth=50, num_stages=4, out_indices=(0, 1, 2, 3), dilations=(1, 1, 2, 4), strides=(1, 2, 1, 1), norm_cfg=norm_cfg, norm_eval=False, style='pytorch', contract_dilation=True), decode_head=dict( type='ASPPHead', in_channels=2048, in_index=3, channels=512, dilations=(1, 12, 24, 36), dropout_ratio=0.1, num_classes=19, norm_cfg=norm_cfg, align_corners=False, loss_decode=dict( type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)), auxiliary_head=dict( type='FCNHead', in_channels=1024, in_index=2, channels=256, num_convs=1, concat_input=False, dropout_ratio=0.1, num_classes=19, norm_cfg=norm_cfg, align_corners=False, loss_decode=dict( type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.4)), # model training and testing settings train_cfg=dict(), test_cfg=dict(mode='whole'))
[ "highcwu@163.com" ]
highcwu@163.com
f12de33b658204e75c9c073f186efb2c3ddcd5cc
0a9015edaf9589e15429d2ba224ee763da513b7a
/src/app.py
671b9746a8c379c070357281500725ace4cbb6c7
[]
no_license
t-rad679/habitica_ga
47a09a3f98f4cb5a1e0b133358749aeab1d08049
445f582fa9a275a55d597f7ea3f7ea2c1afdf535
refs/heads/master
2023-04-23T07:41:26.220044
2021-05-06T01:53:08
2021-05-06T01:53:08
364,755,360
0
0
null
null
null
null
UTF-8
Python
false
false
272
py
from flask import Flask from client.habitica_client import Difficulty, TaskType, create_task server = Flask(__name__) @server.route("/create_task") def handle_create_task(): response = create_task(TaskType.TODO, "test", Difficulty.TRIVIAL) return response.text
[ "IGoWhereI'mNeeded!23" ]
IGoWhereI'mNeeded!23
1d604523679d4dad152d28ba79dee0589334d99f
6f1034b17b49f373a41ecf3a5a8923fb4948992b
/pychron/social/google_calendar/__init__.py
f169af5aa3a8ffc66919042ffd9a893624e843c3
[ "Apache-2.0" ]
permissive
NMGRL/pychron
a6ec1854488e74eb5d3ff53eee8537ecf98a6e2f
8cfc8085393ace2aee6b98d36bfd6fba0bcb41c6
refs/heads/main
2023-08-30T07:00:34.121528
2023-06-12T17:43:25
2023-06-12T17:43:25
14,438,041
38
28
Apache-2.0
2023-08-09T22:47:17
2013-11-15T23:46:10
Python
UTF-8
Python
false
false
999
py
# =============================================================================== # Copyright 2015 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= # ============= standard library imports ======================== # ============= local library imports ========================== # ============= EOF =============================================
[ "jirhiker@gmail.com" ]
jirhiker@gmail.com
e946c26768fb5637935ac6e0bb625b6220179822
be2d39bbdf7f35249fc97936b0842719f720633a
/Apps/Biblioteca/views.py
37be899b8c0b662c7aa9cec0d481508f4fffb56c
[]
no_license
matias54321/Biblioteca
16f2ba639c2d7ddbe57a433657cfbb62a4f26d35
93872462aa61033f750cd5846bbb404d97a93679
refs/heads/master
2020-07-31T12:51:45.734339
2019-09-24T13:10:27
2019-09-24T13:10:27
210,609,626
0
0
null
2019-09-24T13:27:03
2019-09-24T13:26:55
null
UTF-8
Python
false
false
920
py
from django.shortcuts import render,redirect from .forms import AutorForm from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.forms import AuthenticationForm from django.contrib.auth import login, authenticate, logout from django.contrib.auth.decorators import login_required # Create your views here. def Home(request): return render(request,'index.html') def crearAutor(request): if request.method == 'POST': print(request.POST) autor_form = AutorForm(request.POST) if autor_form.is_valid(): autor_form.save() return redirect('index') else: autor_form=AutorForm() return render(request,'Biblioteca/crear_autor.html') def RegistroUsuario(request): return render(request,'Biblioteca/registro_usuario.html') def RegistrandoUsuario(request): if request.method == 'POST': print(request.POST)
[ "vina@duoc.acad" ]
vina@duoc.acad
cdf0447350241e897c4cd2b3f534517ed4c9dcd0
c9396af9d8effc7dc9b1d11707f1bc2eba1f8e60
/mysite/users/migrations/0044_auto_20190212_1924.py
ab11117e5a40285cb64af7fdad2b7f1b5bdabbfa
[]
no_license
nevergofullretard/easylearn
4af352db1dad480aa88adfe84ed7c2f6d0ec8102
6520bcf7d3e5dc27f512a95034105f98459d9f82
refs/heads/master
2022-01-23T04:27:50.105433
2019-07-28T08:12:29
2019-07-28T08:12:29
194,724,510
0
0
null
null
null
null
UTF-8
Python
false
false
417
py
# Generated by Django 2.1.2 on 2019-02-12 18:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('users', '0043_auto_20190212_1922'), ] operations = [ migrations.AlterField( model_name='profile', name='units_gemacht', field=models.ManyToManyField(blank=True, to='units.Unit_name'), ), ]
[ "jagermaxi1@gmail.com" ]
jagermaxi1@gmail.com
d9f963bf2f119a778d498521a6d41d1687864215
24bd5eaaed802de5402059404f5fa43569f4a076
/main.py
e2244ff049de37aec263f70d664322e2a38b155e
[]
no_license
oussama-talaoui/py_gui
dbc0c2662538aa5d1ee2a83609cefdc93468b1d5
cbb9bab846a75e9a4b76cd1702406195a4099258
refs/heads/master
2022-12-06T01:57:58.040371
2020-09-01T09:13:02
2020-09-01T09:13:02
291,355,840
0
0
null
null
null
null
UTF-8
Python
false
false
6,724
py
################################################################################ ## ## BY: OUSSAMA TAHIRIA ## PROJECT MADE WITH: Qt Designer and PySide2 ## V: 2.0.0 ## ################################################################################ import sys import platform import psutil import threading import GPUtil import re from PySide2 import QtCore, QtGui, QtWidgets from PySide2.QtCore import (QCoreApplication, QPropertyAnimation, QDate, QDateTime, QMetaObject, QObject, QPoint, QRect, QSize, QTime, QUrl, Qt, QEvent) from PySide2.QtGui import (QBrush, QColor, QConicalGradient, QCursor, QFont, QFontDatabase, QIcon, QKeySequence, QLinearGradient, QPalette, QPainter, QPixmap, QRadialGradient) from PySide2.QtWidgets import * ## ==> LOGIN SCREEN from ui_login import Ui_Login ## ==> SPLASH SCREEN from ui_splash_screen import Ui_SplashScreen ## GUI FILE from ui_main import Ui_MainWindow ## IMPORT FUNCTIONS from ui_functions import * ## ==> GLOBALS counter = 0 # LOGIN SCREEN class LoginWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) self.ui = Ui_Login() self.ui.setupUi(self) # BT CLOSE POPUP self.ui.btn_close_popup.clicked.connect(lambda: self.ui.frame_error.hide()) # HIDE ERROR self.ui.frame_error.hide() # BT LOGIN self.ui.btn_connect.clicked.connect(self.checkFields) ## SHOW ==> LOGIN WINDOW ######################################################################## self.show() # # FUNCTIONS # def checkFields(self): textUser = "" textPassword = "" def showMessage(message): self.ui.frame_error.show() self.ui.label_error.setText(message) # CHECK USER if not self.ui.lineEdit_user.text(): textUser = " User Empyt. " self.ui.lineEdit_user.setStyleSheet(self.ui.styleLineEditError) else: textUser = "" self.ui.lineEdit_user.setStyleSheet(self.ui.styleLineEditOk) # CHECK PASSWORD if not self.ui.lineEdit_password.text(): textPassword = " Password Empyt. " self.ui.lineEdit_password.setStyleSheet(self.ui.styleLineEditError) else: textPassword = "" self.ui.lineEdit_password.setStyleSheet(self.ui.styleLineEditOk) # CHECK FIELDS if textUser + textPassword != '': text = textUser + textPassword showMessage(text) self.ui.frame_error.setStyleSheet(self.ui.stylePopupError) else: text = " Login OK. " if self.ui.checkBox_user.isChecked(): text = text + " | Saver user: OK " showMessage(text) self.ui.frame_error.setStyleSheet(self.ui.stylePopupOk) # SHOW SPLASH WINDOW self.main = SplashScreen() self.main.show() # CLOSE SPLASH SCREEN self.close() # SPLASH SCREEN class SplashScreen(QMainWindow): def __init__(self): QMainWindow.__init__(self) self.ui = Ui_SplashScreen() self.ui.setupUi(self) ## UI ==> INTERFACE CODES ######################################################################## ## REMOVE TITLE BAR self.setWindowFlag(QtCore.Qt.FramelessWindowHint) self.setAttribute(QtCore.Qt.WA_TranslucentBackground) ## DROP SHADOW EFFECT self.shadow = QGraphicsDropShadowEffect(self) self.shadow.setBlurRadius(20) self.shadow.setXOffset(0) self.shadow.setYOffset(0) self.shadow.setColor(QColor(0, 0, 0, 60)) self.ui.drop_shadow_frame.setGraphicsEffect(self.shadow) ## QTIMER ==> START self.timer = QtCore.QTimer() self.timer.timeout.connect(self.progress) # TIMER IN MILLISECONDS self.timer.start(35) # CHANGE DESCRIPTION # Initial Text self.ui.label_description.setText("Zinus Corp.") # Change Texts QtCore.QTimer.singleShot(1500, lambda: self.ui.label_loading.setText("Loading Database")) QtCore.QTimer.singleShot(3000, lambda: self.ui.label_loading.setText("Loading user interface")) ## SHOW ==> MAIN WINDOW ######################################################################## self.show() ## ==> END ## ## ==> APP FUNCTIONS ######################################################################## def progress(self): global counter # SET VALUE TO PROGRESS BAR self.ui.progressBar.setValue(counter) # CLOSE SPLASH SCREE AND OPEN APP if counter > 100: # STOP TIMER self.timer.stop() # SHOW MAIN WINDOW self.main = MainWindow() self.main.show() # CLOSE SPLASH SCREEN self.close() # INCREASE COUNTER counter += 1 class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) self.ui = Ui_MainWindow() self.ui.setupUi(self) # MOVE WINDOW def moveWindow(event): # RESTORE BEFORE MOVE if UIFunctions.returnStatus() == 1: UIFunctions.maximize_restore(self) # IF LEFT CLICK MOVE WINDOW if event.buttons() == Qt.LeftButton: self.move(self.pos() + event.globalPos() - self.dragPos) self.dragPos = event.globalPos() event.accept() # SET TITLE BAR self.ui.title_bar.mouseMoveEvent = moveWindow ## ==> SET UI DEFINITIONS UIFunctions.uiDefinitions(self) ## QTIMER ==> START self.timer = QtCore.QTimer() self.timer.timeout.connect(self.systemInformation) # TIMER IN MILLISECONDS self.timer.start(1000) ## SHOW ==> MAIN WINDOW ######################################################################## self.show() ## APP EVENTS ######################################################################## def mousePressEvent(self, event): self.dragPos = event.globalPos() ## SYSTEM INFORMATION def systemInformation(self): text = str(psutil.sensors_temperatures()['coretemp'][0]) m = re.findall(r"\W current\D*(\d+.\d)", text) self.ui.label_22.setText(str(psutil.virtual_memory().percent) + "%") self.ui.label_18.setText(str(psutil.cpu_percent()) + "%") self.ui.label_20.setText("Temp: " + str(m[0]) + "C°") if __name__ == "__main__": app = QApplication(sys.argv) window = LoginWindow() sys.exit(app.exec_())
[ "tahiri.oussup@gmail.com" ]
tahiri.oussup@gmail.com
2846b2395baf40fc31b5641fb2644c8af92d1c44
06ac73b42757da860e7120c841db9adfe028f626
/main.py
f7deb547986441def6572799e5a0499ddd6b3ddc
[]
no_license
sergiuiacob1/QLearning
f624455324cbd8c1e784861fd0d5007696e8f971
4294a24576a10a5328a2690606ef664057e54fbf
refs/heads/master
2020-09-24T23:37:03.519853
2019-12-04T13:12:05
2019-12-04T13:12:05
225,870,545
0
0
null
null
null
null
UTF-8
Python
false
false
253
py
from maze import Maze from q_learning import QLearning if __name__ == '__main__': maze = Maze(n=10, no_obstacles=5) q_learning = QLearning(maze) q_learning.train(epochs=1000, eta=1.5, gamma=0.1, exploration_rate=0.1) q_learning.solve()
[ "sergiu.iacob1@gmail.com" ]
sergiu.iacob1@gmail.com
1bc47a49ef727ab6ccde52521741bdc2364f6e3c
2aadde1fdaf3915cfd0ad73b523b8760c724ed4f
/PyPersonalCollection/urls.py
05e003f4aec68bf174ce3cde80b962658b33312e
[]
no_license
WingGao/PyPersonalCollection
cd65ed88f2cb764392b88304e5e3da2452402b74
93e8ffbb64e7cc82d460f4d58dccf1862e77ba59
refs/heads/master
2021-03-12T22:07:16.745396
2015-01-16T06:54:04
2015-01-16T06:54:04
20,069,296
0
0
null
null
null
null
UTF-8
Python
false
false
811
py
from django.conf.urls import patterns, include, url # from django.contrib import admin # admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'ppc.views.item.show'), url(r'^ppc/', include('ppc.urls')), url(r'^user/', include('wuser.urls')), # machine url(r'^machine/create', 'machine.views.create'), url(r'^machine/get', 'machine.views.get'), url(r'^machine/save', 'machine.views.save'), # Examples: # url(r'^$', 'PyPersonalCollection.views.home', name='home'), # url(r'^blog/', include('blog.urls')), # url(r'^admin/', include(admin.site.urls)), )
[ "wing.gao@live.com" ]
wing.gao@live.com
35e3195cf492e94e0fab9bf47f2e3d7d97aa2057
c99985d16a61bda4f75e2e4952f327f8209d3dd7
/manage.py
9ad71b5b6ce3245f01a948d17f1c9f0d021c12f3
[]
no_license
sindhyatodo/sindhyatodoproject
4d8b6c9c54ea264b13beafba1b128639a088dbcf
5830e4b643f8e3c13fa676aea63f833474b966d8
refs/heads/master
2023-07-17T05:36:16.167947
2021-09-10T15:41:17
2021-09-10T15:41:17
403,081,428
0
0
null
null
null
null
UTF-8
Python
false
false
690
py
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): """Run administrative tasks.""" os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'movieproject.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
[ "noreply@github.com" ]
sindhyatodo.noreply@github.com
dda520c184f9b53b56be40fe906e1db08a59b5cb
e208bd998583bbbefbb5ece714597d16b48ff557
/apwsj/make_rdvs.py
462d3436d4a1d3431f50d3f1171fef7f35d03d3e
[]
no_license
edithal-14/rdv-cnn
b321250d64fce43597c97042e37ab3882b9adc22
496b5cdf567e49e3a418230a53d39f8e116bc86a
refs/heads/master
2022-11-07T06:07:54.188575
2020-06-22T08:14:41
2020-06-22T08:14:41
135,123,433
1
2
null
null
null
null
UTF-8
Python
false
false
2,269
py
import pickle import numpy as np from scipy.spatial.distance import cdist from collections import defaultdict def build_rdv(t,s): return np.concatenate([t,s,np.subtract(t,s),np.multiply(t,s)],axis=0) def rdv(tm,sm,label): match = np.argmin(cdist(tm,sm,metric="cosine"),axis=1) vec = np.stack((build_rdv(tm[i],sm[match[i]]) for i in range(len(tm)))) return [vec,label] emb = pickle.load(open("apwsj_sentence_embeddings_512.p","rb")) is_key=defaultdict(int) for key in emb: is_key[key]=1 topics_allowed="q101, q102, q103, q104, q105, q106, q107, q108, q109, q111, q112, q113, q114, q115, q116, q117, q118, q119, q120, q121, q123, q124, q125, q127, q128, q129, q132, q135, q136, q137, q138, q139, q141" topics_allowed=topics_allowed.split(", ") non_novel = list() for line in open("redundancy_list_without_partially_redundant.txt","r"): tokens = line.split() if tokens[0] in topics_allowed: non_novel.append([tokens[0]+"/"+tokens[i] for i in range(1,len(tokens))]) novel = list() for line in open("novel_list_without_partially_redundant.txt","r"): tokens = line.split() if tokens[0] in topics_allowed: novel.append([tokens[0]+"/"+tokens[i] for i in range(1,len(tokens))]) #non_novel = 0 , novel=1 rel_doc_vecs = list() rdv_files = list() for case in non_novel: # if a target document has more than 200 sentences, we ignore it if emb[case[0]].shape[0]>200: continue file = list() if is_key[case[0]]==1: file.append(case[0]) sources = [] for d in case[1:]: if is_key[d]==1: sources.append(emb[d]) file.append(d) if len(sources)==0: continue sources = tuple(sources) rel_doc_vecs.append(rdv(emb[case[0]],np.vstack(sources),0)) file.append("0") rdv_files.append(file) for case in novel: # if a target document has more than 200 sentences, we ignore it if emb[case[0]].shape[0]>200: continue file = list() if is_key[case[0]]==1: file.append(case[0]) sources = [] for d in case[1:]: if is_key[d]==1: sources.append(emb[d]) file.append(d) if len(sources)==0: continue sources = tuple(sources) rel_doc_vecs.append(rdv(emb[case[0]],np.vstack(sources),1)) file.append("1") rdv_files.append(file) pickle.dump([rel_doc_vecs,rdv_files],open("rdvs_512_without_partially_redundant.pickle","wb"),2)
[ "vigneshedithal11031997v@gmail.com" ]
vigneshedithal11031997v@gmail.com
c4a026e38ec01759c413fa78c3e9aee9a5cba5f6
01776becc70eafe6dcbad140eb40a862bc623341
/LeetCode/Medium/322.Coin Change.py
e0d381a9018111b461d6039977d39203baef144c
[]
no_license
AnthonyTsui/AlgoPractice
8eae4d197080c0a94b0127ed5a95198f5d2f3269
59fcb2826fb95a304cf7b4b9a77c2ae710fb5c9a
refs/heads/master
2022-12-02T18:20:58.104356
2020-08-29T23:58:17
2020-08-29T23:58:17
250,649,377
0
0
null
null
null
null
UTF-8
Python
false
false
1,770
py
# You are given coins of different denominations and a total amount of money amount. Write a function to compute the fewest number of coins that you need to make up that amount. If that amount of money cannot be made up by any combination of the coins, return -1. # Example 1: # Input: coins = [1, 2, 5], amount = 11 # Output: 3 # Explanation: 11 = 5 + 5 + 1 # Example 2: # Input: coins = [2], amount = 3 # Output: -1 # Note: #Approach: We can recognize that this is a DP problem due to the fact that the optimal solution to any ith amount is the summation of its #optimal solutions to its subproblems. #For example given one coin with C value and an amount N, we know that min(N) = 1 + min(N-C), or the minimum number of coins to make N is the minimum #number of coins needed to make N-C, the remaining value. #To solve this dynamically,we create a list with length amount + 1 in order to calculate all the minimum coins required to make the values #leading up to the target amount. We can then utilize min(denoms[i], 1 + denoms[i-c]) to calculate the minimum coins needed at 'i' amount #Time complexity: O(C*N) where C = len(coins) and N = amount + 1 #Space Complexity: O(N) class Solution(object): def coinChange(self, coins, amount): """ :type coins: List[int] :type amount: int :rtype: int """ #For the amount, maxDenoms = mAmount #For any coin, maxDenoms = m(Amount - coinValue) denoms = [float("inf") for i in range(amount+1)] denoms[0] = 0 print(coins, amount) for coin in coins: for i in range(coin, amount+1): denoms[i] = min(denoms[i], 1 + denoms[i-coin]) return -1 if denoms[-1] == float('inf') else denoms[-1]
[ "atsui4688@gmail.com" ]
atsui4688@gmail.com
c4636ff70e18214193191a071dcf14a59ea059b2
8677c1191eec869bb2fa97a63f46210c6530b863
/product/migrations/0001_initial.py
109c3a41c5a9910737cec1f089f9c1cbdb4f0fa7
[]
no_license
Sukh0312/Products
8ce2ad7667a1ba333257a5a7dfa3cf5239b72cee
fdb6d3e3cca571bee142b1b01ae843a2d5fb8103
refs/heads/master
2022-12-14T22:33:46.619944
2020-09-12T10:57:34
2020-09-12T10:57:34
294,925,295
1
0
null
null
null
null
UTF-8
Python
false
false
609
py
# Generated by Django 3.1 on 2020-09-11 15:45 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('description', models.TextField()), ('mfg_date', models.DateTimeField(auto_now_add=True)), ], ), ]
[ "sudhanshumotewar2001@gmail.com" ]
sudhanshumotewar2001@gmail.com
0fffe8b26683512d075ac2a4c08f568778b310aa
a5c17ae4b6877f9f693cfce4857c7c81f718600a
/BLDUP/items.py
484909c0551b6d17f1a2359a97f3bfd78d232059
[]
no_license
Iron-Cow/Scrapy-Deeds
eb3471b09783318d6737823514c98adcc050ed44
ef9666f7185e982928a5e984d01364f85ff2850c
refs/heads/master
2022-11-12T22:40:49.257653
2020-04-09T13:01:58
2020-04-09T13:01:58
254,372,557
0
0
null
2022-11-04T19:40:15
2020-04-09T13:01:29
Python
UTF-8
Python
false
false
286
py
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # https://docs.scrapy.org/en/latest/topics/items.html import scrapy class BldupItem(scrapy.Item): # define the fields for your item here like: # name = scrapy.Field() pass
[ "uangeji@gmail.com" ]
uangeji@gmail.com
f5c179ee6cc0cc278512c9b34595cbd0cad42c09
afebbb07b2b4eada17a5853c1ce63b4075d280df
/marketsim/gen/_out/_any.py
650d50f7a928ec5f36aa99fef8e6bf0bbbc5aec1
[]
no_license
peter1000/marketsimulator
8c0a55fc6408b880311d3ad49defc55e9af57824
1b677200a9d5323f2970c83f076c2b83d39d4fe6
refs/heads/master
2021-01-18T01:39:04.869755
2015-03-29T17:47:24
2015-03-29T17:47:24
null
0
0
null
null
null
null
UTF-8
Python
false
false
29
py
class Any(object): pass
[ "anton.kolotaev@gmail.com" ]
anton.kolotaev@gmail.com
0d5f98ee1993327d144cdaaffd29576787ef6429
c934802edc057ce109d04c6bbb4606612beacf66
/examples/gui/gtk/simulator.py
ea0fb51e299ec6da648532224edd5607d2629d07
[ "BSD-3-Clause" ]
permissive
moltob/pymodbus
9925a4090c55c9d36ea6aa21e158b99441c54677
24cdc56d9f79593cbc1101416caa9cffc2023dd1
refs/heads/master
2021-01-15T15:04:40.058704
2015-01-22T19:32:37
2015-01-22T20:07:41
28,333,118
6
0
null
2015-10-19T10:15:07
2014-12-22T09:48:15
Python
UTF-8
Python
false
false
12,585
py
#!/usr/bin/env python #---------------------------------------------------------------------------# # System #---------------------------------------------------------------------------# import os import getpass import pickle from threading import Thread #---------------------------------------------------------------------------# # For Gui #---------------------------------------------------------------------------# from twisted.internet import gtk2reactor gtk2reactor.install() import gtk from gtk import glade #---------------------------------------------------------------------------# # SNMP Simulator #---------------------------------------------------------------------------# from twisted.internet import reactor from twisted.internet import error as twisted_error from pymodbus.server.async import ModbusServerFactory from pymodbus.datastore import ModbusServerContext,ModbusSlaveContext #--------------------------------------------------------------------------# # Logging #--------------------------------------------------------------------------# import logging log = logging.getLogger(__name__) #---------------------------------------------------------------------------# # Application Error #---------------------------------------------------------------------------# class ConfigurationException(Exception): ''' Exception for configuration error ''' def __init__(self, string): Exception.__init__(self, string) self.string = string def __str__(self): return 'Configuration Error: %s' % self.string #---------------------------------------------------------------------------# # Extra Global Functions #---------------------------------------------------------------------------# # These are extra helper functions that don't belong in a class #---------------------------------------------------------------------------# def root_test(): ''' Simple test to see if we are running as root ''' return getpass.getuser() == "root" #---------------------------------------------------------------------------# # Simulator Class #---------------------------------------------------------------------------# class Simulator(object): ''' Class used to parse configuration file and create and modbus datastore. The format of the configuration file is actually just a python pickle, which is a compressed memory dump from the scraper. ''' def __init__(self, config): ''' Trys to load a configuration file, lets the file not found exception fall through @param config The pickled datastore ''' try: self.file = open(config, "r") except Exception: raise ConfigurationException("File not found %s" % config) def _parse(self): ''' Parses the config file and creates a server context ''' try: handle = pickle.load(self.file) dsd = handle['di'] csd = handle['ci'] hsd = handle['hr'] isd = handle['ir'] except KeyError: raise ConfigurationException("Invalid Configuration") slave = ModbusSlaveContext(d=dsd, c=csd, h=hsd, i=isd) return ModbusServerContext(slaves=slave) def _simulator(self): ''' Starts the snmp simulator ''' ports = [502]+range(20000,25000) for port in ports: try: reactor.listenTCP(port, ModbusServerFactory(self._parse())) print 'listening on port', port return port except twisted_error.CannotListenError: pass def run(self): ''' Used to run the simulator ''' reactor.callWhenRunning(self._simulator) #---------------------------------------------------------------------------# # Network reset thread #---------------------------------------------------------------------------# # This is linux only, maybe I should make a base class that can be filled # in for linux(debian/redhat)/windows/nix #---------------------------------------------------------------------------# class NetworkReset(Thread): ''' This class is simply a daemon that is spun off at the end of the program to call the network restart function (an easy way to remove all the virtual interfaces) ''' def __init__(self): Thread.__init__(self) self.setDaemon(True) def run(self): ''' Run the network reset ''' os.system("/etc/init.d/networking restart") #---------------------------------------------------------------------------# # Main Gui Class #---------------------------------------------------------------------------# # Note, if you are using gtk2 before 2.12, the file_set signal is not # introduced. To fix this, you need to apply the following patch #---------------------------------------------------------------------------# #Index: simulator.py #=================================================================== #--- simulator.py (revision 60) #+++ simulator.py (working copy) #@@ -158,7 +161,7 @@ # "on_helpBtn_clicked" : self.help_clicked, # "on_quitBtn_clicked" : self.close_clicked, # "on_startBtn_clicked" : self.start_clicked, #- "on_file_changed" : self.file_changed, #+ #"on_file_changed" : self.file_changed, # "on_window_destroy" : self.close_clicked # } # self.tree.signal_autoconnect(actions) #@@ -235,6 +238,7 @@ # return False # # # check input file #+ self.file_changed(self.tdevice) # if os.path.exists(self.file): # self.grey_out() # handle = Simulator(config=self.file) #---------------------------------------------------------------------------# class SimulatorApp(object): ''' This class implements the GUI for the flasher application ''' file = "none" subnet = 205 number = 1 restart = 0 def __init__(self, xml): ''' Sets up the gui, callback, and widget handles ''' #---------------------------------------------------------------------------# # Action Handles #---------------------------------------------------------------------------# self.tree = glade.XML(xml) self.bstart = self.tree.get_widget("startBtn") self.bhelp = self.tree.get_widget("helpBtn") self.bclose = self.tree.get_widget("quitBtn") self.window = self.tree.get_widget("window") self.tdevice = self.tree.get_widget("fileTxt") self.tsubnet = self.tree.get_widget("addressTxt") self.tnumber = self.tree.get_widget("deviceTxt") #---------------------------------------------------------------------------# # Actions #---------------------------------------------------------------------------# actions = { "on_helpBtn_clicked" : self.help_clicked, "on_quitBtn_clicked" : self.close_clicked, "on_startBtn_clicked" : self.start_clicked, "on_file_changed" : self.file_changed, "on_window_destroy" : self.close_clicked } self.tree.signal_autoconnect(actions) if not root_test(): self.error_dialog("This program must be run with root permissions!", True) #---------------------------------------------------------------------------# # Gui helpers #---------------------------------------------------------------------------# # Not callbacks, but used by them #---------------------------------------------------------------------------# def show_buttons(self, state=False, all=0): ''' Greys out the buttons ''' if all: self.window.set_sensitive(state) self.bstart.set_sensitive(state) self.tdevice.set_sensitive(state) self.tsubnet.set_sensitive(state) self.tnumber.set_sensitive(state) def destroy_interfaces(self): ''' This is used to reset the virtual interfaces ''' if self.restart: n = NetworkReset() n.start() def error_dialog(self, message, quit=False): ''' Quick pop-up for error messages ''' dialog = gtk.MessageDialog( parent = self.window, flags = gtk.DIALOG_DESTROY_WITH_PARENT | gtk.DIALOG_MODAL, type = gtk.MESSAGE_ERROR, buttons = gtk.BUTTONS_CLOSE, message_format = message) dialog.set_title('Error') if quit: dialog.connect("response", lambda w, r: gtk.main_quit()) else: dialog.connect("response", lambda w, r: w.destroy()) dialog.show() #---------------------------------------------------------------------------# # Button Actions #---------------------------------------------------------------------------# # These are all callbacks for the various buttons #---------------------------------------------------------------------------# def start_clicked(self, widget): ''' Starts the simulator ''' start = 1 base = "172.16" # check starting network net = self.tsubnet.get_text() octets = net.split('.') if len(octets) == 4: base = "%s.%s" % (octets[0], octets[1]) net = int(octets[2]) % 255 start = int(octets[3]) % 255 else: self.error_dialog("Invalid starting address!"); return False # check interface size size = int(self.tnumber.get_text()) if (size >= 1): for i in range(start, (size + start)): j = i % 255 cmd = "/sbin/ifconfig eth0:%d %s.%d.%d" % (i, base, net, j) os.system(cmd) if j == 254: net = net + 1 self.restart = 1 else: self.error_dialog("Invalid number of devices!"); return False # check input file if os.path.exists(self.file): self.show_buttons(state=False) try: handle = Simulator(config=self.file) handle.run() except ConfigurationException, ex: self.error_dialog("Error %s" % ex) self.show_buttons(state=True) else: self.error_dialog("Device to emulate does not exist!"); return False def help_clicked(self, widget): ''' Quick pop-up for about page ''' data = gtk.AboutDialog() data.set_version("0.1") data.set_name(('Modbus Simulator')) data.set_authors(["Galen Collins"]) data.set_comments(('First Select a device to simulate,\n' + 'then select the starting subnet of the new devices\n' + 'then select the number of device to simulate and click start')) data.set_website("http://code.google.com/p/pymodbus/") data.connect("response", lambda w,r: w.hide()) data.run() def close_clicked(self, widget): ''' Callback for close button ''' self.destroy_interfaces() reactor.stop() # quit twisted def file_changed(self, widget): ''' Callback for the filename change ''' self.file = widget.get_filename() #---------------------------------------------------------------------------# # Main handle function #---------------------------------------------------------------------------# # This is called when the application is run from a console # We simply start the gui and start the twisted event loop #---------------------------------------------------------------------------# def main(): ''' Main control function This either launches the gui or runs the command line application ''' debug = True if debug: try: log.setLevel(logging.DEBUG) logging.basicConfig() except Exception, e: print "Logging is not supported on this system" simulator = SimulatorApp('./simulator.glade') reactor.run() #---------------------------------------------------------------------------# # Library/Console Test #---------------------------------------------------------------------------# # If this is called from console, we start main #---------------------------------------------------------------------------# if __name__ == "__main__": main()
[ "bashwork@gmail.com" ]
bashwork@gmail.com
8a30b7639416084d36b061b8aa6b778360dcd013
eb1f7622905878a1590347b62dec4eb1cb355adb
/treecorr/binnedcorr3.py
06573596cfdcc3ba8240eba818989da63719c002
[ "BSD-2-Clause", "BSD-2-Clause-Views" ]
permissive
sukhdeep2/TreeCorr
831143521972c86304dba1ac1d8a6b12d2189e0b
7a31800291c438c9403c512fe8070b6c37787a1d
refs/heads/master
2021-06-01T06:06:22.025622
2016-06-01T19:13:44
2016-06-01T19:13:44
null
0
0
null
null
null
null
UTF-8
Python
false
false
31,447
py
# Copyright (c) 2003-2015 by Mike Jarvis # # TreeCorr is free software: redistribution and use in source and binary forms, # with or without modification, are permitted provided that the following # conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions, and the disclaimer given in the accompanying LICENSE # file. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the disclaimer given in the documentation # and/or other materials provided with the distribution. """ .. module:: binnedcorr3 """ import treecorr import math import numpy class BinnedCorr3(object): """This class stores the results of a 3-point correlation calculation, along with some ancillary data. This is a base class that is not intended to be constructed directly. But it has a few helper functions that derived classes can use to help perform their calculations. See the derived classes for more details: - :class:`~treecorr.NNNCorrelation` handles count-count-count correlation functions - :class:`~treecorr.KKKCorrelation` handles kappa-kappa-kappa correlation functions - :class:`~treecorr.GGGCorrelation` handles gamma-gamma-gamma correlation functions Three-point correlations are a bit more complicated than two-point, since the data need to be binned in triangles, not just the separation between two points. We characterize the triangles according to the following three parameters based on the three side lenghts of the triangle with d1 >= d2 >= d3. .. math:: r &= d2 \\\\ u &= \\frac{d3}{d2} \\\\ v &= \\pm \\frac{(d1 - d2)}{d3} \\\\ The orientation of the triangle is specified by the sign of v. Positive v triangles have the three sides d1,d2,d3 in counter-clockwise orientation. Negative v triangles have the three sides d1,d2,d3 in clockwise orientation. The constructor for all derived classes take a config dict as the first argument, since this is often how we keep track of parameters, but if you don't want to use one or if you want to change some parameters from what are in a config dict, then you can use normal kwargs, which take precedence over anything in the config dict. :param config: The configuration dict which defines attributes about how to read the file. Any kwargs that are not those listed here will be added to the config, so you can even omit the config dict and just enter all parameters you want as kwargs. (default: None) :param logger: If desired, a logger object for logging. (default: None, in which case one will be built according to the config dict's verbose level.) The following parameters may be given either in the config dict or as a named kwarg: :param nbins: How many bins to use for the r binning. (Exactly three of nbins, bin_size, min_sep, max_sep are required. If nbins is not given, it will be calculated from the values of the other three, rounding up to the next highest integer. In this case, max_sep will be readjusted to account for this rounding up.) :param bin_size: The width of the bins in log(separation). (Exactly three of nbins, bin_size, min_sep, max_sep are required. If bin_size is not given, it will be calculated from the values of the other three.) :param min_sep: The minimum separation in units of sep_units, if relevant. (Exactly three of nbins, bin_size, min_sep, max_sep are required. If min_sep is not given, it will be calculated from the values of the other three.) :param max_sep: The maximum separation in units of sep_units, if relevant. (Exactly three of nbins, bin_size, min_sep, max_sep are required. If max_sep is not given, it will be calculated from the values of the other three. If nbins is not given, then max_sep will be adjusted as needed to allow nbins to be an integer value.) :param sep_units: The units to use for the separation values, given as a string. This includes both min_sep and max_sep above, as well as the units of the output distance values. Valid options are arcsec, arcmin, degrees, hours, radians. (default: radians if angular units make sense, but for 3-d or flat 2-d positions, the default will just match the units of x,y[,z] coordinates) :param bin_slop: How much slop to allow in the placement of pairs in the bins. If bin_slop = 1, then the bin into which a particular pair is placed may be incorrect by at most 1.0 bin widths. (default: None, which means to use bin_slop=1 if bin_size <= 0.1, or 0.1/bin_size if bin_size > 0.1. This mean the error will be at most 0.1 in log(sep), which has been found to yield good results for most application. :param nubins: Analogous to nbins for the u direction. (The default is to calculate from ubin_size = binsize, min_u = 0, max_u = 1, but this can be overridden by specifying up to 3 of these four parametes.) :param ubin_size: Analogous to bin_size for the u direction. (default: bin_size) :param min_u: Analogous to min_sep for the u direction. (default: 0) :param max_u: Analogous to max_sep for the u direction. (default: 1) :param nvbins: Analogous to nbins for the v direction. (The default is to calculate from vbin_size = binsize, min_v = -1, max_v = 1, but this can be overridden by specifying up to 3 of these four parametes.) :param vbin_size: Analogous to bin_size for the v direction. (default: bin_size) :param min_v: Analogous to min_sep for the v direction. (default: -1) :param max_v: Analogous to max_sep for the v direction. (default: 1) :param verbose: If no logger is provided, this will optionally specify a logging level to use: - 0 means no logging output (default) - 1 means to output warnings only - 2 means to output various progress information - 3 means to output extensive debugging information :param log_file: If no logger is provided, this will specify a file to write the logging output. (default: None; i.e. output to standard output) :param output_dots: Whether to output progress dots during the calcualtion of the correlation function. (default: False unless verbose is given and >= 2, in which case True) :param split_method: How to split the cells in the tree when building the tree structure. Options are: - mean: Use the arithmetic mean of the coordinate being split. (default) - median: Use the median of the coordinate being split. - middle: Use the middle of the range; i.e. the average of the minimum and maximum value. - random: Use a random point somewhere in the middle two quartiles of the range. :param max_top: The maximum number of top layers to use when setting up the field. The top-level cells are the cells where each calculation job starts. There will typically be of order 2^max_top top-level cells. (default: 10) :param precision: The precision to use for the output values. This should be an integer, which specifies how many digits to write. (default: 4) :param metric: Which metric to use for distance measurements. Options are: - 'Euclidean' = straight line Euclidean distance between two points. For spherical coordinates (ra,dec without r), this is the chord distance between points on the unit sphere. - 'Rperp' = the perpendicular component of the distance. For two points with distance from Earth `r1, r2`, if `d` is the normal Euclidean distance and :math:`Rparallel = |r1-r2|`, then we define :math:`Rperp^2 = d^2 - Rparallel^2`. - 'Rlens' = the projected distance perpendicular to the first point in the pair (taken to be a lens) to the line of sight to the second point (e.g. a lensed source galaxy). - 'Arc' = the true great circle distance for spherical coordinates. (default: 'Euclidean') :param min_rpar: For the 'Rperp' metric, the minimum difference in Rparallel to allow for pairs being included in the correlation function. (default: None) :param max_rpar: For the 'Rperp' metric, the maximum difference in Rparallel to allow for pairs being included in the correlation function. (default: None) :param num_threads: How many OpenMP threads to use during the calculation. (default: use the number of cpu cores; this value can also be given in the constructor in the config dict.) Note that this won't work if the system's C compiler is clang prior to version 3.7. """ _valid_params = { 'nbins' : (int, False, None, None, 'The number of output bins to use for sep dimension.'), 'bin_size' : (float, False, None, None, 'The size of the output bins in log(sep).'), 'min_sep' : (float, False, None, None, 'The minimum separation to include in the output.'), 'max_sep' : (float, False, None, None, 'The maximum separation to include in the output.'), 'sep_units' : (str, False, None, treecorr.angle_units.keys(), 'The units to use for min_sep and max_sep. Also the units of the output distances'), 'bin_slop' : (float, False, None, None, 'The fraction of a bin width by which it is ok to let the pairs miss the correct bin.', 'The default is to use 1 if bin_size <= 0.1, or 0.1/bin_size if bin_size > 0.1.'), 'nubins' : (int, False, None, None, 'The number of output bins to use for u dimension.'), 'ubin_size' : (float, False, None, None, 'The size of the output bins in u.'), 'min_u' : (float, False, None, None, 'The minimum u to include in the output.'), 'max_u' : (float, False, None, None, 'The maximum u to include in the output.'), 'nvbins' : (int, False, None, None, 'The number of output bins to use for v dimension.'), 'vbin_size' : (float, False, None, None, 'The size of the output bins in v.'), 'min_v' : (float, False, None, None, 'The minimum v to include in the output.'), 'max_v' : (float, False, None, None, 'The maximum v to include in the output.'), 'verbose' : (int, False, 1, [0, 1, 2, 3], 'How verbose the code should be during processing. ', '0 = Errors Only, 1 = Warnings, 2 = Progress, 3 = Debugging'), 'log_file' : (str, False, None, None, 'If desired, an output file for the logging output.', 'The default is to write the output to stdout.'), 'output_dots' : (bool, False, None, None, 'Whether to output dots to the stdout during the C++-level computation.', 'The default is True if verbose >= 2 and there is no log_file. Else False.'), 'split_method' : (str, False, 'mean', ['mean', 'median', 'middle', 'random'], 'Which method to use for splitting cells.'), 'max_top' : (int, False, 10, None, 'The maximum number of top layers to use when setting up the field.'), 'precision' : (int, False, 4, None, 'The number of digits after the decimal in the output.'), 'num_threads' : (int, False, None, None, 'How many threads should be used. num_threads <= 0 means auto based on num cores.'), 'metric': (str, False, 'Euclidean', ['Euclidean', 'Rperp', 'Rlens', 'Arc'], 'Which metric to use for the distance measurements'), 'min_rpar': (float, False, None, None, 'For Rperp metric, the minimum difference in Rparallel for pairs to include'), 'max_rpar': (float, False, None, None, 'For Rperp metric, the maximum difference in Rparallel for pairs to include'), } def __init__(self, config=None, logger=None, **kwargs): self.config = treecorr.config.merge_config(config,kwargs,BinnedCorr3._valid_params) if logger is None: self.logger = treecorr.config.setup_logger( treecorr.config.get(self.config,'verbose',int,0), self.config.get('log_file',None)) else: self.logger = logger if 'output_dots' in self.config: self.output_dots = treecorr.config.get(self.config,'output_dots',bool) elif 'verbose' in self.config: self.output_dots = treecorr.config.get(self.config,'verbose',int,0) >= 2 else: self.output_dots = False self.sep_units = treecorr.config.get(self.config,'sep_units',str,'radians') self.sep_unit_name = self.config.get('sep_units','') self.log_sep_units = math.log(self.sep_units) if 'nbins' not in self.config: if 'max_sep' not in self.config: raise AttributeError("Missing required parameter max_sep") if 'min_sep' not in self.config: raise AttributeError("Missing required parameter min_sep") if 'bin_size' not in self.config: raise AttributeError("Missing required parameter bin_size") self.min_sep = float(self.config['min_sep']) self.max_sep = float(self.config['max_sep']) if self.min_sep >= self.max_sep: raise ValueError("max_sep must be larger than min_sep") self.bin_size = float(self.config['bin_size']) self.nbins = int(math.ceil(math.log(self.max_sep/self.min_sep)/self.bin_size)) # Update max_sep given this value of nbins self.max_sep = math.exp(self.nbins*self.bin_size)*self.min_sep elif 'bin_size' not in self.config: if 'max_sep' not in self.config: raise AttributeError("Missing required parameter max_sep") if 'min_sep' not in self.config: raise AttributeError("Missing required parameter min_sep") self.min_sep = float(self.config['min_sep']) self.max_sep = float(self.config['max_sep']) if self.min_sep >= self.max_sep: raise ValueError("max_sep must be larger than min_sep") self.nbins = int(self.config['nbins']) self.bin_size = math.log(self.max_sep/self.min_sep)/self.nbins elif 'max_sep' not in self.config: if 'min_sep' not in self.config: raise AttributeError("Missing required parameter min_sep") self.min_sep = float(self.config['min_sep']) self.nbins = int(self.config['nbins']) self.bin_size = float(self.config['bin_size']) self.max_sep = math.exp(self.nbins*self.bin_size)*self.min_sep else: if 'min_sep' in self.config: raise AttributeError("Only 3 of min_sep, max_sep, bin_size, nbins are allowed.") self.max_sep = float(self.config['max_sep']) self.nbins = int(self.config['nbins']) self.bin_size = float(self.config['bin_size']) self.min_sep = self.max_sep*math.exp(-self.nbins*self.bin_size) if self.sep_unit_name == '': self.logger.info("r: nbins = %d, min,max sep = %g..%g, bin_size = %g", self.nbins,self.min_sep,self.max_sep,self.bin_size) else: self.logger.info("r: nbins = %d, min,max sep = %g..%g %s, bin_size = %g", self.nbins,self.min_sep/self.sep_units,self.max_sep/self.sep_units, self.sep_unit_name,self.bin_size) # The underscore-prefixed names are in natural units (radians for angles) self._min_sep = self.min_sep * self.sep_units self._max_sep = self.max_sep * self.sep_units if 'nubins' not in self.config: self.min_u = float(self.config.get('min_u', 0.)) self.max_u = float(self.config.get('max_u', 1.)) self.ubin_size = float(self.config.get('ubin_size', self.bin_size)) if self.min_u >= self.max_u: raise ValueError("max_u must be larger than min_u") self.nubins = int(math.ceil((self.max_u-self.min_u)/self.ubin_size)) self.min_u = self.max_u - self.nubins*self.ubin_size if self.min_u < 0.: self.min_u = 0. self.ubin_size = (self.max_u-self.min_u)/self.nubins elif 'ubin_size' not in self.config: self.min_u = float(self.config.get('min_u', 0.)) self.max_u = float(self.config.get('max_u', 1.)) if self.min_u >= self.max_u: raise ValueError("max_u must be larger than min_u") self.nubins = int(self.config['nubins']) self.ubin_size = (self.max_u-self.min_u)/self.nubins elif 'min_u' not in self.config: self.max_u = float(self.config.get('max_u', 1.)) self.nubins = int(self.config['nubins']) self.ubin_size = float(self.config['ubin_size']) if self.ubin_size * (self.nubins-1) >= 1.: raise ValueError("Cannot specify ubin_size * nubins > 1.") self.min_u = self.max_u - self.nubins*self.ubin_size if self.min_u < 0.: self.min_u = 0. self.ubin_size = (self.max_u-self.min_u)/self.nubins else: if 'max_u' in self.config: raise AttributeError("Only 3 of min_u, max_u, ubin_size, nubins are allowed.") self.min_u = float(self.config['min_u']) self.nubins = int(self.config['nubins']) self.ubin_size = float(self.config['ubin_size']) if self.ubin_size * (self.nubins-1) >= 1.: raise ValueError("Cannot specify ubin_size * nubins > 1.") self.max_u = self.min_u + self.nubins*self.ubin_size if self.max_u > 1.: self.max_u = 1. self.ubin_size = (self.max_u-self.min_u)/self.nubins self.logger.info("u: nbins = %d, min,max = %g..%g, bin_size = %g", self.nubins,self.min_u,self.max_u,self.ubin_size) if 'nvbins' not in self.config: self.min_v = float(self.config.get('min_v', -1.)) self.max_v = float(self.config.get('max_v', 1.)) self.vbin_size = float(self.config.get('vbin_size', self.bin_size)) if self.min_v >= self.max_v: raise ValueError("max_v must be larger than min_v") self.nvbins = int(math.ceil((self.max_v-self.min_v)/self.vbin_size)) # If one of min_v or max_v is specified, keep it exact. # Otherwise expand both values out as needed. Also, make sure nvbins is even. if ('min_v' in self.config) == ('max_v' in self.config): if self.nvbins % 2 == 1: self.nvbins += 1 cen = (self.min_v + self.max_v)/2. self.min_v = cen - self.nvbins*self.vbin_size/2. self.max_v = cen + self.nvbins*self.vbin_size/2. elif 'min_v' in config: self.max_v = self.min_v + self.nvbins*self.vbin_size else: self.min_v = self.max_v - self.nvbins*self.vbin_size if self.min_v < -1.: self.min_v = -1. if self.max_v > 1.: self.max_v = 1. self.vbin_size = (self.max_v-self.min_v)/self.nvbins elif 'vbin_size' not in self.config: self.min_v = float(self.config.get('min_v', -1.)) self.max_v = float(self.config.get('max_v', 1.)) if self.min_v >= self.max_v: raise ValueError("max_v must be larger than min_v") self.nvbins = int(self.config['nvbins']) self.vbin_size = (self.max_v-self.min_v)/self.nvbins elif 'min_v' not in self.config and 'max_v' not in self.config: self.nvbins = int(self.config['nvbins']) self.vbin_size = float(self.config['vbin_size']) if self.vbin_size * (self.nvbins-1) >= 1.: raise ValueError("Cannot specify vbin_size * nvbins > 1.") self.max_v = self.nvbins*self.vbin_size if self.max_v > 1.: self.max_v = 1. self.min_v = -self.max_v self.vbin_size = (self.max_v-self.min_v)/self.nvbins elif 'min_v' in self.config: if 'max_v' in self.config: raise AttributeError("Only 3 of min_v, max_v, vbin_size, nvbins are allowed.") self.min_v = float(self.config['min_v']) self.nvbins = int(self.config['nvbins']) self.vbin_size = float(self.config['vbin_size']) self.max_v = self.min_v + self.nvbins*self.vbin_size if self.max_v > 1.: raise ValueError("Cannot specify min_v + vbin_size * nvbins > 1.") else: self.max_v = float(self.config['max_v']) self.nvbins = int(self.config['nvbins']) self.vbin_size = float(self.config['vbin_size']) self.min_v = self.max_v - self.nvbins*self.vbin_size if self.min_v < -1.: raise ValueError("Cannot specify max_v - vbin_size * nvbins < -1.") self.logger.info("v: nbins = %d, min,max = %g..%g, bin_size = %g", self.nvbins,self.min_v,self.max_v,self.vbin_size) self.split_method = self.config.get('split_method','mean') if self.split_method not in ['middle', 'median', 'mean', 'random']: raise ValueError("Invalid split_method %s"%self.split_method) self.logger.debug("Using split_method = %s",self.split_method) self.max_top = treecorr.config.get(self.config,'max_top',int,10) self.bin_slop = treecorr.config.get(self.config,'bin_slop',float,-1.0) if self.bin_slop < 0.0: if self.bin_size <= 0.1: self.bin_slop = 1.0 self.b = self.bin_size else: self.bin_slop = 0.1/self.bin_size # The stored bin_slop corresponds to lnr bins. self.b = 0.1 if self.ubin_size <= 0.1: self.bu = self.ubin_size else: self.bu = 0.1 if self.vbin_size <= 0.1: self.bv = self.vbin_size else: self.bv = 0.1 else: self.b = self.bin_size * self.bin_slop self.bu = self.ubin_size * self.bin_slop self.bv = self.vbin_size * self.bin_slop if self.b > 0.100001: # Add some numerical slop self.logger.warn("Using bin_slop = %g, bin_size = %g",self.bin_slop,self.bin_size) self.logger.warn("The b parameter is bin_slop * bin_size = %g",self.b) self.logger.debug("bu = %g, bv = %g",self.bu,self.bv) self.logger.warn("It is generally recommended to use b <= 0.1 for most applications.") self.logger.warn("Larger values of this b parameter may result in significant "+ "inaccuracies.") else: self.logger.debug("Using bin_slop = %g, b = %g, bu = %g, bv = %g", self.bin_slop,self.b,self.bu,self.bv) # This makes nbins evenly spaced entries in log(r) starting with 0 with step bin_size self.logr1d = numpy.linspace(start=0, stop=self.nbins*self.bin_size, num=self.nbins, endpoint=False) # Offset by the position of the center of the first bin. self.logr1d += math.log(self.min_sep) + 0.5*self.bin_size self.u1d = numpy.linspace(start=0, stop=self.nubins*self.ubin_size, num=self.nubins, endpoint=False) self.u1d += self.min_u + 0.5*self.ubin_size self.v1d = numpy.linspace(start=0, stop=self.nvbins*self.vbin_size, num=self.nvbins, endpoint=False) self.v1d += self.min_v + 0.5*self.vbin_size shape = (self.nbins, self.nubins, self.nvbins) self.logr = numpy.tile(self.logr1d[:, numpy.newaxis, numpy.newaxis], (1, self.nubins, self.nvbins)) self.u = numpy.tile(self.u1d[numpy.newaxis, :, numpy.newaxis], (self.nbins, 1, self.nvbins)) self.v = numpy.tile(self.v1d[numpy.newaxis, numpy.newaxis, :], (self.nbins, self.nubins, 1)) self.rnom = numpy.exp(self.logr) self._coords = None self._metric = None self.min_rpar = treecorr.config.get(self.config,'min_rpar',float,0.) self.max_rpar = treecorr.config.get(self.config,'min_rpar',float,0.) def _process_all_auto(self, cat1, metric, num_threads): # I'm not sure which of these is more intuitive, but both are correct... if True: for c1 in cat1: self.process_auto(c1, metric, num_threads) for c2 in cat1: if c2 is not c1: self.process_cross(c1,c1,c2, metric, num_threads) self.process_cross(c1,c2,c1, metric, num_threads) self.process_cross(c2,c1,c1, metric, num_threads) for c3 in cat1: if c3 is not c1 and c3 is not c2: self.process_cross(c1,c2,c3, metric, num_threads) else: for i,c1 in enumerate(cat1): self.process_auto(c1) for j,c2 in enumerate(cat1[i+1:]): self.process_cross(c1,c1,c2, metric, num_threads) self.process_cross(c1,c2,c1, metric, num_threads) self.process_cross(c2,c1,c1, metric, num_threads) self.process_cross(c1,c2,c2, metric, num_threads) self.process_cross(c2,c1,c2, metric, num_threads) self.process_cross(c2,c2,c1, metric, num_threads) for c3 in cat1[i+j+1:]: self.process_cross(c1,c2,c3, metric, num_threads) self.process_cross(c1,c3,c2, metric, num_threads) self.process_cross(c2,c1,c3, metric, num_threads) self.process_cross(c2,c3,c1, metric, num_threads) self.process_cross(c3,c1,c2, metric, num_threads) self.process_cross(c3,c2,c1, metric, num_threads) def _process_all_cross21(self, cat1, cat2, metric, num_threads): for c1 in cat1: for c2 in cat2: self.process_cross(c1,c1,c2, metric, num_threads) for c3 in cat1: if c3 is not c1: self.process_cross(c1,c3,c2, metric, num_threads) self.process_cross(c3,c1,c2, metric, num_threads) def _process_all_cross(self, cat1, cat2, cat3, metric, num_threads): for c1 in cat1: for c2 in cat2: for c3 in cat3: self.process_cross(c1,c2,c3, metric, num_threads) def _set_num_threads(self, num_threads): if num_threads is None: num_threads = self.config.get('num_threads',None) if num_threads is None: self.logger.debug('Set num_threads automatically from ncpu') else: self.logger.debug('Set num_threads = %d',num_threads) treecorr.set_omp_threads(num_threads, self.logger) def _set_metric(self, metric, coords1, coords2=None, coords3=None): if metric is None: metric = treecorr.config.get(self.config,'metric',str,'Euclidean') coords, metric = treecorr.util.parse_metric(metric, coords1, coords2, coords3) if self._coords != None or self._metric != None: if coords != self._coords: self.logger.warn("Detected a change in catalog coordinate systems. " "This probably doesn't make sense!") if metric != self._metric: self.logger.warn("Detected a change in metric. " "This probably doesn't make sense!") self._coords = coords self._metric = metric def _apply_units(self, mask): if self._coords == treecorr._lib.Sphere and self._metric == treecorr._lib.Euclidean: # Then our distances are all angles. Convert from the chord distance to a real angle. # L = 2 sin(theta/2) self.meand1[mask] = 2. * numpy.arcsin(self.meand1[mask]/2.) self.meanlogd1[mask] = numpy.log(2.*numpy.arcsin(numpy.exp(self.meanlogd1[mask])/2.)) self.meand2[mask] = 2. * numpy.arcsin(self.meand2[mask]/2.) self.meanlogd2[mask] = numpy.log(2.*numpy.arcsin(numpy.exp(self.meanlogd2[mask])/2.)) self.meand3[mask] = 2. * numpy.arcsin(self.meand3[mask]/2.) self.meanlogd3[mask] = numpy.log(2.*numpy.arcsin(numpy.exp(self.meanlogd3[mask])/2.)) self.meand1[mask] /= self.sep_units self.meanlogd1[mask] -= self.log_sep_units self.meand2[mask] /= self.sep_units self.meanlogd2[mask] -= self.log_sep_units self.meand3[mask] /= self.sep_units self.meanlogd3[mask] -= self.log_sep_units def _get_minmax_size(self): b = numpy.max( (self.b, self.bu, self.bv) ) if self._metric == treecorr._lib.Euclidean: # The minimum separation we care about is that of the smallest size, which is # min_sep * min_u. Do the same calculation as for 2pt to get to min_size. min_size = self._min_sep * self.min_u * b / (2.+3.*b) # This time, the maximum size is d1 * b. d1 can be as high as 2*max_sep. max_size = 2. * self._max_sep * b return min_size, max_size else: return 0., 0.
[ "michael@jarvis.net" ]
michael@jarvis.net
871b137fe0872b36469883e5e9a349c0675255a9
34722a32572fe7b5d9f440dd90c13b9dffec036e
/deep-learning-face-detection/detect_capture_faces.py
183c261601290f71c3b4095b28994349fb79fdc8
[]
no_license
NathanDuPont/Discovery-World-Face-GAN
d6bdece248ff57a85a9a044a20568e0868e1c193
2375704d9f6670bbba32fad4add8222111e8317c
refs/heads/main
2023-04-14T22:23:34.559386
2021-04-23T01:44:15
2021-04-23T01:44:15
347,130,421
1
0
null
2021-04-01T22:26:19
2021-03-12T16:34:03
Python
UTF-8
Python
false
false
2,740
py
# python detect_capture_faces.py --video swing.mp4 from imutils.video import FileVideoStream import numpy as np import argparse import imutils import cv2 import os # ap = argparse.ArgumentParser() # ap.add_argument("-v", "--video", required=True, # help="path to input video file") # args = vars(ap.parse_args()) faces = [] video_file_name = 'swing.mp4' confidence_threshold = 0.5 model = cv2.dnn.readNetFromCaffe('model/deploy.prototxt.txt', "model/opencv_face_detector.caffemodel") file_name_no_ext = video_file_name.split('.')[0] face_directory = f'{file_name_no_ext}-faces' if not os.path.exists(face_directory): os.makedirs(face_directory) video_stream = FileVideoStream(video_file_name).start() while True: # grab the frame from the threaded video stream and resize it # to have a maximum width of 400 pixels frame = video_stream.read() if frame is None: break frame = imutils.resize(frame, width=400) # grab the frame dimensions and convert it to a blob (h, w) = frame.shape[:2] blob = cv2.dnn.blobFromImage(cv2.resize(frame, (300, 300)), 1.0, (300, 300), (104.0, 177.0, 123.0)) # pass the blob through the network and obtain the detections and # predictions model.setInput(blob) detections = model.forward() # we only care about detections which are greater than the threshold num_detections = detections.shape[2] detections = detections[0, 0, detections[0, 0, 0:num_detections, 2] > confidence_threshold, 0:7] if detections.size > 0: # get the size of each box as the sum of two sides of the rectangle num_detections = detections.shape[0] sizes = ((detections[0:num_detections, 5] - detections[0:num_detections, 3]) + (detections[0:num_detections, 6] - detections[0:num_detections, 4])) # print(detections[0:num_detections, 5]) # print(detections[0:num_detections, 3]) # print(detections[0:num_detections, 6]) # print(detections[0:num_detections, 4]) # print() # get the largest box ordered_sizes = -sizes.argsort() if sizes[ordered_sizes[0]] > .5: box = detections[ordered_sizes[0], 3:7] * np.array([w, h, w, h]) (startX, startY, endX, endY) = box.astype("int") faces.append(frame[startY:endY, startX:endX]) # show the output frame cv2.imshow("Frame", frame) key = cv2.waitKey(1) & 0xFF # if the `q` key was pressed, break from the loop if key == ord("q"): break # write all the faces to a file for i, face in enumerate(faces): cv2.imwrite(f'{face_directory}/face{i}.jpg', face) # do a bit of cleanup cv2.destroyAllWindows() video_stream.stop()
[ "dowdlek@msoe.edu" ]
dowdlek@msoe.edu
4e4a8e60733074a4f13e76f417f8b65a0dd28f7d
51d1263741394ebf2c7dc6be6cc2331352b98950
/blog/urls.py
480f281eba0bb7d628a807d67bf413652b275055
[]
no_license
Charles-Wu-Chen/djangogirls
25d5db139ef699e6489eedb0e8ee1a3893358cb4
5b20c90e69ffdc62aac3967d4320fe52efe17a23
refs/heads/master
2021-05-31T19:02:30.636495
2016-01-18T03:32:33
2016-01-18T03:32:33
null
0
0
null
null
null
null
UTF-8
Python
false
false
339
py
from django.conf.urls import url from blog import views urlpatterns = [ url(r'^$', views.post_list, name='post_list_name'), url(r'^post/(?P<pk>[0-9]+)/$', views.post_detail, name='post_detail'), url(r'^post/new/$', views.post_new, name='post_new'), url(r'^post/(?P<pk>[0-9]+)/edit/$', views.post_edit, name='post_edit'), ]
[ "wuchen2010@gmail.com" ]
wuchen2010@gmail.com
e0fb0a82beb95083e8fb38de30e260d71be13f1c
5e485ce79f6bac55a9c9e603a0608f51046428f2
/testjf_UI/interface/test_多人投资并发.py
024a27b177663583f30571bb511ce18eb93e131a
[]
no_license
3058035655/zstestjf122201
27205631132b402bab68dc35d83668b7b1c382c3
6fa6b648fbd4b60898a7b332ec051b772b2e491b
refs/heads/master
2020-04-12T22:51:57.148145
2018-12-22T10:13:31
2018-12-22T10:13:31
162,801,034
0
0
null
null
null
null
UTF-8
Python
false
false
3,577
py
#-*- coding:utf-8 -*- import unittest import time import sys import re import requests from testjf_UI.common.db import Db from testjf_UI.common.loginpage import LoginPage from testjf_UI.interface.test_多人投资ok import Test_duoren_invest from utils.config import DATA_PATH, REPORT_PATH from utils.file_reader import ExcelReader sys.path.append('D:\\jftest1_CG\\test1') from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.common.by import By class Test_duoren_invest(unittest.TestCase): excel = DATA_PATH + '/register2.xlsx' def test_Login(self): # excel = DATA_PATH + '/register.xlsx' datas = ExcelReader (self.excel).data for d in datas: with self.subTest (data=d): db = Db() connection = db.connection cursor = db.cursor # cursor.execute ("SELECT message_note from sys_tel_message where tel='13301302026'") cursor.execute ("SELECT code from bidd_info where title='m季季81602'") connection.commit () t = cursor.fetchall () # a=t['tel'] code = t[0]['code'] print ('标的code:', code) tel=int(float(d['title'])) print('用户:',tel) # 用户登录 content = {'login':tel, 'password': '2971055a690ad019e9fc08a9971080ccfd6a8b588c69acc28383a12d9cfdcb135a60550a4df643b9967c5fab90ce4eb8e3970c2c093fefe299662ac44e868763d281e8708ab625528d55c6a777b2700bcb9daf7e7e0c6805ffd13760d4ac0120d6f43c2dc05fc38fcff485eedd8859d79200ddb7a9a606b8548fa1d8def1dacc', 'pwdLevel': '2', 'verify_code': '请输入计算结果', 'randCode': '请输入您的6位验证码', 'commendPhone': '请输入推荐码(推荐人手机号后8位)', 'loginregister': '请输入您的手机号', 'passwordresgister': '', 'token': '', 'modulus': '', 'exponent': '', 'newToken': '', 'phoneId': '', 'code': '', 'utype': '', 'csrftoken': '', 'pwdLevel': ''} r = requests.post ('http://192.168.1.249:9901/hkjf/login.do?method=indexlogin', data=content) # 发送请求 print ('登录响应状态', r.status_code) c = r.cookies # 请求标的详情 content_xq = {'code':code, 'tempType': '2'} rxq = requests.get ("http://192.168.1.249:9901/hkjf/investControllerFront.do?method=detail", params=content_xq, cookies=c) txt = rxq.text r = re.findall (r'<input name="token" type="hidden" value="(.+?)"/>', txt) # print("token的值是:",r) print ('详情响应code:', rxq.status_code) # 用户投资 contenttz = {'code':code, 'token': r, 'couponDetailCodeK': '', 'couponDetailCodeJ': '', 'confirmAmount': '1000', 'useAbleMoney': '1000'} rtz = requests.post ('http://192.168.1.249:9901/hkjf/investController.do?method=goodsOpenInvest', data=contenttz, cookies=c) # 发送请求 # print (r1.text) # 获取响应报文 print ('投资响应状态', rtz.status_code) if __name__ == '__main__': unittest.main()
[ "3058035655@qq.com" ]
3058035655@qq.com