blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
288
content_id
stringlengths
40
40
detected_licenses
listlengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
684 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
147 values
src_encoding
stringclasses
25 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
128
12.7k
extension
stringclasses
142 values
content
stringlengths
128
8.19k
authors
listlengths
1
1
author_id
stringlengths
1
132
a352a0a1b1ee3450f63f90b6486011d84e3b1868
e87c04d6c2bbba383f9c75620b16f02358039ab5
/보충/미로1.py
889bc61652d663fee00031279caf1f6e9ba1bf6d
[]
no_license
yoonwoo123/Algorithm
2bf6e103009572cbcf3abfd783f6c28762529340
5d1e76f1bf6c2fc6acb25dc5296d62b2ca453ec6
refs/heads/master
2022-06-18T14:06:06.248906
2022-05-29T10:41:15
2022-05-29T10:41:15
221,483,283
0
0
null
null
null
null
UTF-8
Python
false
false
706
py
import sys sys.stdin = open("미로1_input.txt") dx = [1, 0, -1, 0] # 우, 하, 좌, 상 dy = [0, 1, 0, -1] def dfs(x, y): maze[y][x] = 9 # 방문표시 for i in range(4): global flag nx = x + dx[i] ny = y + dy[i] if maze[ny][nx] == 3: flag = 1 break if maze[ny][nx] == 0: # 함수를 넣어서 갈 수 있는 것만 통과 dfs(nx, ny) if flag == 1: return 1 else: return 0 testcases = 10 for T in range(testcases): tc = input() maze = [] flag = 0 for i in range(16): maze.append(list(map(int, list(input())))) dfs(1, 1) print("%s%d %d" % ('#', T + 1, flag))
[ "lkkjasd@korea.ac.kr" ]
lkkjasd@korea.ac.kr
8f90f53f4f8b98f34c6f21170636a11a3faf3b30
456433ac78b70cb8ae076ae166a85e349f181d7f
/systems/KURSSKLAD/KURSTERM/TRANSUNITRETURN/transunitreturn.py
014805c78aeb006058805d3a66217c259c87f717
[]
no_license
shybkoi/WMS-Demo
854c1679b121c68323445b60f3992959f922be8d
2525559c4f56654acfbc21b41b3f5e40387b89e0
refs/heads/master
2021-01-23T01:51:20.074825
2017-03-23T11:51:18
2017-03-23T11:51:18
85,937,726
0
0
null
null
null
null
WINDOWS-1251
Python
false
false
3,269
py
# -*- coding: cp1251 -*- from systems.KURSSKLAD.KURSTERM.common import TCommonTerm from systems.KURSSKLAD.KURSTERM.TRANSUNITRETURN.templates.index import index from systems.KURSSKLAD.KURSTERM.TRANSUNITRETURN.templates.auto import auto from systems.KURSSKLAD.KURSTERM.TRANSUNITRETURN.templates.autoTransUnit import autoTransUnit from kinterbasdb import ProgrammingError as FBExc from cherrypy import HTTPRedirect class TTransUnitReturn(TCommonTerm): helpSystem = True def index(self, id_system=None): TCommonTerm.index(self, id_system) self.setIfaceVar('wmsid',self.GetKSessionID()) return self.main() index.exposed = True def main(self, barcode=None, mes=None): if barcode: bcInfo = self.kBarCodeInfo(barcode) if bcInfo and bcInfo['result']==0: if bcInfo['usercode']=='AUTO': raise HTTPRedirect('auto?id=%s'%(bcInfo['recordid'])) else: mes = _('Не верный ШК') else: mes = bcInfo['mes'] return self.drawTemplate(templ=index,data=[{'mes':mes}]) main.exposed = True def auto(self, id, barcode = None, mes = None): if barcode: bcInfo = self.kBarCodeInfo(barcode) if bcInfo and bcInfo['result']==0: if bcInfo['usercode']=='AUTO': raise HTTPRedirect('auto?id=%s'%(bcInfo['recordid'])) elif bcInfo['usercode']=='TRANSUNIT': raise HTTPRedirect('autoTransUnit?aid=%s&tuid=%s'%(id,bcInfo['recordid'])) else: mes = _('Не верный ШК') else: mes = bcInfo['mes'] a = self.autoInfo(id) atu = self.dbExec(sql="select * from WH_TRANSUNITRETURN_AUTOLISTTU(?)",params=[id],fetch="all") return self.drawTemplate(templ=auto,data=[a,atu,{'backurl':'main', 'mes':mes}]) auto.exposed = True def autoTransUnit(self, aid, tuid, barcode = None, mes = None): tuid = self.kId(tuid) if barcode: bcInfo = self.kBarCodeInfo(barcode) if bcInfo and bcInfo['result']==0: if bcInfo['usercode']=='AUTO': raise HTTPRedirect('auto?id=%s'%(bcInfo['recordid'])) elif bcInfo['usercode']=='TRANSUNIT': if tuid == self.kId(bcInfo['recordid']): try: self.dbExec(sql="execute procedure WH_TRANSUNITRETURN_DO(?,?,?)",params=[aid,tuid,self.getIfaceVar('wmsid')],fetch="none") except FBExc, exc: mes = self.fbExcText(exc[1]) else: raise HTTPRedirect('auto?id=%s'%(aid)) else: raise HTTPRedirect('autoTransUnit?aid=%s&tuid=%s'%(aid,bcInfo['recordid'])) else: mes = _('Не верный ШК') else: mes = bcInfo['mes'] a = self.autoInfo(aid) tu = self.transUnitInfo(tuid) return self.drawTemplate(templ=autoTransUnit,data=[a,tu,{'backurl':'auto?id=%s'%(aid),'mes':mes}]) autoTransUnit.exposed = True
[ "s.shybkoi@gmail.com" ]
s.shybkoi@gmail.com
925ca8233ec1f869327ec17b01779d58af3a7eeb
ad9bd58a3ec8fa08dfcc994d4101ee815a9f5bc0
/02_algorithm/04.Stack/20190819/stack_practice_03/stack_practice_03.py
a2b7c341e89dea47790118435e6561dd2c2769d4
[]
no_license
wally-wally/TIL
93fc1d0e3bc7d030341ed54155294c68c48b4c7d
936783bc86f563646c0398c24e2fcaa707f0ed23
refs/heads/master
2023-04-28T08:59:48.235747
2023-04-12T12:06:52
2023-04-12T12:06:52
195,918,111
40
7
null
2020-09-29T16:20:46
2019-07-09T02:31:02
Python
UTF-8
Python
false
false
632
py
# 문제를 재귀적으로 푼다 # 재귀적 정의를 구현할 때 재귀호출이 좋다. # 재귀적 정의 --> 좀 더 작은 문제의 답을 사용해서 더 큰 문제의 답을 구하는 방법 # 팩토리얼 예제 # 문제의 크기는 자연수로 표현 # 1, n = 1 or 0 # (n-1)! * n, n > 1 def factorial(n): # n(매개변수) : 문제(크기)를 나타내는 값 # 반환값 = n!의 값(문제의 해) if n == 0 or n == 1: # 기저 사례 # 재귀호출 하지 않고 종료 return 1 else: # 재귀호출 return factorial(n - 1) * n print(factorial(4))
[ "wallys0213@gmail.com" ]
wallys0213@gmail.com
dde95a6598f20b7d186a996cf811781d6e534b9e
9633f30e171550a5008ffe1a90d21254e7fe9c19
/0x0C-python-almost_a_circle/models/base.py
b9ce9c1c1c3eaf425240be1a6412718d8d35b1ae
[]
no_license
jj131204/holbertonschool-higher_level_programming
95e2b528bc68e7a3897c5ff49a23b1f37e9abee4
be2afa3b2a54e88d7dd4e39e5116c9bd1b941ba6
refs/heads/master
2023-08-30T16:56:37.834970
2021-09-22T22:34:37
2021-09-22T22:34:37
361,829,671
0
0
null
null
null
null
UTF-8
Python
false
false
1,010
py
#!/usr/bin/python3 """create class Base""" import json class Base: """class base""" __nb_objects = 0 def __init__(self, id=None): """ def __init__(self, id=None): """ if id is not None: self.id = id else: Base.__nb_objects += 1 self.id = Base.__nb_objects @staticmethod def to_json_string(list_dictionaries): """returns the JSON string representation of list_dictionaries""" if list_dictionaries is None or list_dictionaries == []: return "[]" return json.dumps(list_dictionaries) @classmethod def save_to_file(cls, list_objs): """that writes the JSON string representation of list_objs to a file""" str_ = [] name = cls.__name__ + ".json" if list_objs is not None: for i in list_objs: str_.append(cls.to_dictionary(i)) with open(name, mode="w") as file: file.write(cls.to_json_string(str_))
[ "jj131204@gmail.com" ]
jj131204@gmail.com
c9b09b68eed4b34734399a88a6c0a55d7be9401c
95495baeb47fd40b9a7ecb372b79d3847aa7a139
/test/test_access_policy_category.py
11c2017c46bf3867b915f353ab0c59fca1768429
[]
no_license
pt1988/fmc-api
b1d8ff110e12c13aa94d737f3fae9174578b019c
075f229585fcf9bd9486600200ff9efea5371912
refs/heads/main
2023-01-07T09:22:07.685524
2020-10-30T03:21:24
2020-10-30T03:21:24
308,226,669
0
0
null
null
null
null
UTF-8
Python
false
false
1,242
py
# coding: utf-8 """ Cisco Firepower Management Center Open API Specification **Specifies the REST URLs and methods supported in the Cisco Firepower Management Center API. Refer to the version specific [REST API Quick Start Guide](https://www.cisco.com/c/en/us/support/security/defense-center/products-programming-reference-guides-list.html) for additional information.** # noqa: E501 OpenAPI spec version: 1.0.0 Contact: tac@cisco.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import swagger_client from swagger_client.models.access_policy_category import AccessPolicyCategory # noqa: E501 from swagger_client.rest import ApiException class TestAccessPolicyCategory(unittest.TestCase): """AccessPolicyCategory unit test stubs""" def setUp(self): pass def tearDown(self): pass def testAccessPolicyCategory(self): """Test AccessPolicyCategory""" # FIXME: construct object with mandatory attributes with example values # model = swagger_client.models.access_policy_category.AccessPolicyCategory() # noqa: E501 pass if __name__ == '__main__': unittest.main()
[ "pt1988@gmail.com" ]
pt1988@gmail.com
67356b560bdcc2411922d7c19c8c1593edebbd9e
7cbdb437946c7c79ca514f52c352ae3bfe5b1aaf
/recipe/tests.py
ee043909e2b45e6e4aff36e23350a72f8a89a11c
[]
no_license
SamirIngley/Recipes-Blog
435311fab3cb27f0b288729805a2aed50bfb9a3f
7b12f50738b4f65557c0e1f92f905ddd83de2ebf
refs/heads/master
2020-11-26T22:27:49.239843
2019-12-26T01:21:19
2019-12-26T01:21:19
229,217,552
0
0
null
2020-06-06T01:05:46
2019-12-20T07:55:06
Python
UTF-8
Python
false
false
1,838
py
# wiki/tests.py from django.test import TestCase from django.contrib.auth.models import User from recipe.models import Page class RecipeTestCase(TestCase): def test_true_is_true(self): """ Tests if True is equal to True. Should always pass. """ self.assertEqual(True, True) def test_page_slugify_on_save(self): """ Tests the slug generated when saving a Page. """ # Author is a required field in our model. # Create a user for this test and save it to the test database. user = User() user.save() # Create and save a new page to the test database. page = Page(title="My Test Page", content="test", author=user) page.save() # Make sure the slug that was generated in Page.save() # matches what we think it should be. self.assertEqual(page.slug, "my-test-page") class PageListViewTests(TestCase): def test_multiple_pages(self): # Make some test data to be displayed on the page. user = User.objects.create() Page.objects.create(title="My Test Page", content="test", author=user) Page.objects.create(title="Another Test Page", content="test", author=user) # Issue a GET request to the MakeWiki homepage. # When we make a request, we get a response back. response = self.client.get('/') # Check that the response is 200 OK. self.assertEqual(response.status_code, 200) # Check that the number of pages passed to the template # matches the number of pages we have in the database. responses = response.context['pages'] self.assertEqual(len(responses), 2) self.assertQuerysetEqual( responses, ['<Page: My Test Page>', '<Page: Another Test Page>'], ordered=False )
[ "samir.ingle7@gmail.com" ]
samir.ingle7@gmail.com
90d4aab70505bc32d8460fdc76a1d6a0bc6b0724
90419da201cd4948a27d3612f0b482c68026c96f
/sdk/python/pulumi_azure_nextgen/compute/v20191201/get_log_analytic_export_throttled_requests.py
3a6f0ea7758156897c6c5bbd943793b12d7e02fe
[ "BSD-3-Clause", "Apache-2.0" ]
permissive
test-wiz-sec/pulumi-azure-nextgen
cd4bee5d70cb0d332c04f16bb54e17d016d2adaf
20a695af0d020b34b0f1c336e1b69702755174cc
refs/heads/master
2023-06-08T02:35:52.639773
2020-11-06T22:39:06
2020-11-06T22:39:06
312,993,761
0
0
Apache-2.0
2023-06-02T06:47:28
2020-11-15T09:04:00
null
UTF-8
Python
false
false
3,608
py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs __all__ = [ 'GetLogAnalyticExportThrottledRequestsResult', 'AwaitableGetLogAnalyticExportThrottledRequestsResult', 'get_log_analytic_export_throttled_requests', ] @pulumi.output_type class GetLogAnalyticExportThrottledRequestsResult: """ LogAnalytics operation status response """ def __init__(__self__, properties=None): if properties and not isinstance(properties, dict): raise TypeError("Expected argument 'properties' to be a dict") pulumi.set(__self__, "properties", properties) @property @pulumi.getter def properties(self) -> 'outputs.LogAnalyticsOutputResponseResult': """ LogAnalyticsOutput """ return pulumi.get(self, "properties") class AwaitableGetLogAnalyticExportThrottledRequestsResult(GetLogAnalyticExportThrottledRequestsResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetLogAnalyticExportThrottledRequestsResult( properties=self.properties) def get_log_analytic_export_throttled_requests(blob_container_sas_uri: Optional[str] = None, from_time: Optional[str] = None, group_by_operation_name: Optional[bool] = None, group_by_resource_name: Optional[bool] = None, group_by_throttle_policy: Optional[bool] = None, location: Optional[str] = None, to_time: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetLogAnalyticExportThrottledRequestsResult: """ Use this data source to access information about an existing resource. :param str blob_container_sas_uri: SAS Uri of the logging blob container to which LogAnalytics Api writes output logs to. :param str from_time: From time of the query :param bool group_by_operation_name: Group query result by Operation Name. :param bool group_by_resource_name: Group query result by Resource Name. :param bool group_by_throttle_policy: Group query result by Throttle Policy applied. :param str location: The location upon which virtual-machine-sizes is queried. :param str to_time: To time of the query """ __args__ = dict() __args__['blobContainerSasUri'] = blob_container_sas_uri __args__['fromTime'] = from_time __args__['groupByOperationName'] = group_by_operation_name __args__['groupByResourceName'] = group_by_resource_name __args__['groupByThrottlePolicy'] = group_by_throttle_policy __args__['location'] = location __args__['toTime'] = to_time if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-nextgen:compute/v20191201:getLogAnalyticExportThrottledRequests', __args__, opts=opts, typ=GetLogAnalyticExportThrottledRequestsResult).value return AwaitableGetLogAnalyticExportThrottledRequestsResult( properties=__ret__.properties)
[ "noreply@github.com" ]
test-wiz-sec.noreply@github.com
288e76ce905f639a74ba721fd38b41fc04c40f45
16e498600e8e9f1c0f3f0e2c4b2df2dcb56b9adc
/registration/admin.py
ea137c1ff68e3fdf9cd248567b622cc64d6f5948
[ "BSD-3-Clause" ]
permissive
kklimonda/django-registration
b220338f0775ea7313a481438cade9341ef078c3
9d4099011c64d0e9d1a502a7b230fa2547d7f771
refs/heads/master
2021-01-17T07:28:05.795437
2013-04-14T03:06:20
2013-04-14T03:06:20
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,611
py
from django.contrib import admin from django.contrib.sites.models import RequestSite from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ from .models import RegistrationProfile class RegistrationAdmin(admin.ModelAdmin): actions = ['activate_users', 'resend_activation_email'] list_display = ('user', 'activation_key_expired') raw_id_fields = ['user'] search_fields = ('user__username', 'user__first_name', 'user__last_name', 'user__email') def activate_users(self, request, queryset): """ Activates the selected users, if they are not alrady activated. """ for profile in queryset: RegistrationProfile.objects.activate_user(profile.activation_key) activate_users.short_description = _("Activate users") def resend_activation_email(self, request, queryset): """ Re-sends activation emails for the selected users. Note that this will *only* send activation emails for users who are eligible to activate; emails will not be sent to users whose activation keys have expired or who have already activated. """ if Site._meta.installed: site = Site.objects.get_current() else: site = RequestSite(request) for profile in queryset: if not profile.activation_key_expired(): profile.send_activation_email(site) resend_activation_email.short_description = _("Re-send activation emails") admin.site.register(RegistrationProfile, RegistrationAdmin)
[ "rochacbruno@gmail.com" ]
rochacbruno@gmail.com
64b4dc3e651ef1a0377081e5697aad87c4789d60
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/73/usersdata/214/39782/submittedfiles/triangulo.py
fc7eb86f1197185aca4df06deba4e940fa9bee35
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
475
py
# -*- coding: utf-8 -*- import math a=int(input('Digite a:')) b=int(input('Digite a:')) c=int(input('Digite a:') a>=b>=c>0 if a<b+c: print('S') else: print('N') if a+b<c: if(a**2)==(b**2)+(c**2): print ('Re') if(a**2)==(b**2)+(c**2): print('Ob') if(a**2)==(b**2)+(c**2): print('Ac') if a==b==c: print('Eq') if b==c!=a: print('Is') if (a!=b) and (b!=c): print ('Es')
[ "rafael.mota@ufca.edu.br" ]
rafael.mota@ufca.edu.br
99df5d6a98c97acee2ab5bf8db8f1a542093aa9b
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/59/usersdata/195/61824/submittedfiles/testes.py
569ee6ed3f4b3b4a00c2e1c54c88be2f1a92f096
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
296
py
# -*- coding: utf-8 -*- import numpy as np def transposta(b): b=[] for i in range(0,b.shape[0],1): for j in range(0,b.shape[1],1): b[i,j]==a[j,i] b[i,j]=float(input('elemento:')) n=int(input('linas:')) m=int(input('colunas:')) b=b.zeros((n,m)) print(transposta(b))
[ "rafael.mota@ufca.edu.br" ]
rafael.mota@ufca.edu.br
5bae0b9fbbefb10205ba1adbbadbfbd3ca07d7f8
9743d5fd24822f79c156ad112229e25adb9ed6f6
/xai/brain/wordbase/otherforms/_funnelled.py
1b5788bd13ce44f929c9be42befa4a4c2f4fd558
[ "MIT" ]
permissive
cash2one/xai
de7adad1758f50dd6786bf0111e71a903f039b64
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
refs/heads/master
2021-01-19T12:33:54.964379
2017-01-28T02:00:50
2017-01-28T02:00:50
null
0
0
null
null
null
null
UTF-8
Python
false
false
226
py
#calss header class _FUNNELLED(): def __init__(self,): self.name = "FUNNELLED" self.definitions = funnel self.parents = [] self.childen = [] self.properties = [] self.jsondata = {} self.basic = ['funnel']
[ "xingwang1991@gmail.com" ]
xingwang1991@gmail.com
fdbc362014380cedc9ec820a8c28b657237f2444
dd8511e1209646823f7ec2a2ce669171f9b0a5cc
/plato/tools/common/test_symbolic_predictors.py
3dfd88b375367eee00f840f7afbef08a991bd643
[]
no_license
codeaudit/plato
9e8df28e589d6c43aef2271e9f940076ef4a143d
ba19f92a42729e9d3cf5da05746dead83db3f41c
refs/heads/master
2021-01-12T11:32:34.796675
2016-10-07T14:05:00
2016-10-07T14:05:00
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,025
py
from plato.tools.mlp.mlp import MultiLayerPerceptron from plato.tools.optimization.cost import negative_log_likelihood_dangerous from plato.tools.common.online_predictors import GradientBasedPredictor from plato.tools.optimization.optimizers import SimpleGradientDescent from utils.benchmarks.train_and_test import percent_argmax_correct from utils.tools.iteration import zip_minibatch_iterate from utils.datasets.synthetic_clusters import get_synthetic_clusters_dataset __author__ = 'peter' def test_symbolic_predicors(): """ This test is meant to serves as both a test and tutorial for how to use a symbolic predictor. It shows how to construct a symbolic predictor using a function, cost function, and optimizer. It then trains this predictor on a synthetic toy dataset and demonstrates that it has learned. """ dataset = get_synthetic_clusters_dataset() symbolic_predictor = GradientBasedPredictor( function = MultiLayerPerceptron.from_init( layer_sizes = [dataset.input_size, 100, dataset.n_categories], output_activation='softmax', w_init = 0.1, rng = 3252 ), cost_function=negative_log_likelihood_dangerous, optimizer=SimpleGradientDescent(eta = 0.1), ) predictor = symbolic_predictor.compile() # .compile() turns the symbolic predictor into an IPredictor object, which can be called with numpy arrays. init_score = percent_argmax_correct(predictor.predict(dataset.test_set.input), dataset.test_set.target) for x_m, y_m in zip_minibatch_iterate([dataset.training_set.input, dataset.training_set.target], minibatch_size=10, n_epochs=20): predictor.train(x_m, y_m) final_score = percent_argmax_correct(predictor.predict(dataset.test_set.input), dataset.test_set.target) print 'Initial score: %s%%. Final score: %s%%' % (init_score, final_score) assert init_score < 30 assert final_score > 98 if __name__ == '__main__': test_symbolic_predicors()
[ "peter.ed.oconnor@gmail.com" ]
peter.ed.oconnor@gmail.com
e8d66db9f808801439a592d691e8ab8563e3eb6b
1eb7fa8b1745d4e51cefb4eceb44621862516aa6
/Company Interview/FB/SerializeAndDeserialize.py
33ca3c4b67d0cd6cb974244469491f275d5f25f1
[]
no_license
geniousisme/CodingInterview
bd93961d728f1fe266ad5edf91adc5d024e5ca48
a64bca9c07a7be8d4060c4b96e89d8d429a7f1a3
refs/heads/master
2021-01-10T11:15:31.305787
2017-03-06T00:03:13
2017-03-06T00:03:13
43,990,453
0
1
null
null
null
null
UTF-8
Python
false
false
1,175
py
class TreeNode(object): def __init__(self, x): self.val = x self.left = None self.right = None class Codec(object): def serialize(self, root): def serializer(root): if root is None: res.append('#') return res.append(str(root.val)) serializer(root.left) serializer(root.right) res = [] serializer(root) return ' '.join(res) def deserialize(self, data): def deserializer(): node_val = next(node_vals) if node_val == '#': return None node = TreeNode(int(node_val)) node.left = deserializer() node.right = deserializer() return node node_vals = iter(data.split()) return deserializer() if __name__ == "__main__": codec = Codec() t1 = TreeNode(1) t2 = TreeNode(2) t3 = TreeNode(3) t4 = TreeNode(4) t5 = TreeNode(5) t6 = TreeNode(6) t7 = TreeNode(7) t1.left = t2 t1.right = t3 t2.left = t4 t2.right = t5 t3.left = t6 t3.right = t7 print codec.serialize(t1)
[ "chia-hao.hsu@aiesec.net" ]
chia-hao.hsu@aiesec.net
3ea21ddb2ae9b03c9181387ce1af7ce6b652d1cb
e18b3cb22c09cb6b2fff7555eeaeddba1513ac1f
/python_stack/flask_fundamentals/whats_my_name/app.py
4bafe3b4c5f7d1bd350de8f8e61c352ad859b72d
[]
no_license
LawerenceLee/coding_dojo_projects
e71760850f3164fbd217004d0ea2f38c5bddd2d8
099b1f862ec520bab93c58235151680bb74c0bf6
refs/heads/master
2021-05-10T16:11:40.466412
2018-05-09T02:24:04
2018-05-09T02:24:04
118,569,970
1
1
null
2018-03-24T22:37:58
2018-01-23T06:48:41
Python
UTF-8
Python
false
false
417
py
from flask import (Flask, render_template, redirect, request, url_for) DEBUG = True PORT = 8000 HOST = "0.0.0.0" app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route("/process", methods=['POST']) def process(): print(request.form['your-name']) return redirect(url_for("index")) if __name__ == "__main__": app.run(debug=DEBUG, host=HOST, port=PORT)
[ "lucifitz.edward@gmail.com" ]
lucifitz.edward@gmail.com
8b648420b85b3333e48cd30d97adff051fcc4d67
2a1969afe3818412140efb25921f35610dd9023d
/python/pythonGUI/PyQt5/my note/examples/3_3_box_layout_vbox_hbox.py
dedee22f54e3d8a2e9f2906c1cc1aad0231bdc2f
[]
no_license
Light2077/LightNote
149cf42089f15bbebd62e27fe5aa6afe67f25779
cd733014f8be44207d624a5fd02dfddcd776aad1
refs/heads/master
2023-09-01T07:49:05.494481
2023-08-24T10:00:09
2023-08-24T10:00:09
224,410,710
4
0
null
null
null
null
UTF-8
Python
false
false
1,036
py
import sys from PyQt5.QtWidgets import ( QApplication, QMainWindow, QWidget, QVBoxLayout, QHBoxLayout, QPushButton, ) class Example(QMainWindow): def __init__(self, parent=None): super().__init__(parent=parent) self.setGeometry(200, 200, 300, 150) self.setWindowTitle("VBox+HBox") vbox = QVBoxLayout() btn1 = QPushButton("A", self) btn2 = QPushButton("B", self) btn3 = QPushButton("C", self) vbox.addWidget(btn1) vbox.addWidget(btn2) vbox.addWidget(btn3) hbox = QHBoxLayout() btn4 = QPushButton("D", self) btn5 = QPushButton("E", self) btn6 = QPushButton("F", self) hbox.addWidget(btn4) hbox.addWidget(btn5) hbox.addWidget(btn6) vbox.addLayout(hbox) w = QWidget() self.setCentralWidget(w) w.setLayout(vbox) self.show() if __name__ == "__main__": app = QApplication(sys.argv) ex = Example() sys.exit(app.exec_())
[ "435786117@qq.com" ]
435786117@qq.com
c5aed1e631b4cec0b812399ab18123634d8a5671
c3dc08fe8319c9d71f10473d80b055ac8132530e
/challenge-165/roger-bell-west/python/ch-1.py
460191357b2828a60f49ec73f416a12739c54f30
[]
no_license
southpawgeek/perlweeklychallenge-club
d4b70d9d8e4314c4dfc4cf7a60ddf457bcaa7a1e
63fb76188e132564e50feefd2d9d5b8491568948
refs/heads/master
2023-01-08T19:43:56.982828
2022-12-26T07:13:05
2022-12-26T07:13:05
241,471,631
1
0
null
2020-02-18T21:30:34
2020-02-18T21:30:33
null
UTF-8
Python
false
false
1,280
py
#! /usr/bin/python3 import fileinput points = [] lines = [] x = [] y = [] for line in fileinput.input(): line=line.rstrip() f = [int(i) for i in line.split(',')] for i in range(len(f)): if i % 2 == 0: x.append(f[i]) else: y.append(f[i]) if len(f) == 4: lines.append(f) if len(f) == 2: points.append(f) mnx = min(x) mxx = max(x) mny = min(y) mxy = max(y) lo = [ mnx - (mxx-mnx)/10, mny - (mxy-mny)/10 ] hi = [ mxx + (mxx-mnx)/10, mxy + (mxy-mny)/10 ] w = hi[0] - lo[0] h = hi[1] - lo[1] print('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>') print('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">') print(f'<svg width="{w}" height="{h}" viewBox="{lo[0]} {lo[1]} {w} {h}" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">') if len(lines) > 0: print(' <g id="lines" stroke="#369" stroke-width="4">') for p in lines: print(f' <line x1="{p[0]}" y1="{p[1]}" x2="{p[2]}" y2="{p[3]}" />') print(' </g>') if len(points) > 0: print(' <g fill="#f73" id="points">') for p in points: print(f' <circle cx="{p[0]}" cy="{p[1]}" r="3" />') print(' </g>') print('</svg>')
[ "roger@firedrake.org" ]
roger@firedrake.org
2a2eedd00c5d7719d072f84307fedc954789ea25
32b46b0955d1abd963077c7ed6f614c8fa1403e9
/BT/pyobjc-core-3.0.4/build/lib.macosx-10.11-intel-2.7/PyObjCTest/test_inspect_signatures.py
91eb53069c7804e7efe4a7ac50b850519d493a79
[ "MIT" ]
permissive
ivanmolera/Raspberry
3771f74ce4e4667c95081bfa38a2b39ec6375a26
30d16fdb88efc2aec347047eef26da213346cd1a
refs/heads/master
2021-01-19T20:27:51.424504
2018-02-10T16:48:30
2018-02-10T16:48:30
88,509,424
2
0
null
null
null
null
UTF-8
Python
false
false
1,624
py
from PyObjCTools.TestSupport import * import objc import types import inspect class TestInspectSignatures (TestCase): @min_python_release("3.4") def test_module_functions_signature(self): for nm in dir(objc): obj = getattr(objc, nm) if isinstance(obj, types.BuiltinMethodType): try: value = inspect.signature(obj) except ValueError: value = None if value is None: self.fail("No inspect.signature for %s"%(nm,)) @min_python_release("3.4") def test_class_signature(self): class_list = [objc.ObjCPointer, objc.objc_meta_class, objc.objc_class, objc.objc_object, objc.pyobjc_unicode, objc.selector, objc.FSRef, objc.FSSpec, objc.ivar, objc.informal_protocol, objc.formal_protocol, objc.varlist, objc.function, objc.IMP, objc.super] if hasattr(objc, 'WeakRef'): class_list.append(objc.WeakRef) for cls in class_list: for nm in dir(cls): if nm in ('__new__', '__subclasshook__', '__abstractmethods__', '__prepare__'): continue obj = getattr(cls, nm) if isinstance(obj, types.BuiltinMethodType): try: value = inspect.signature(obj) except ValueError: value = None if value is None: self.fail("No inspect.signature for %s.%s"%(cls.__name__, nm,)) if __name__ == "__main__": main()
[ "ivan.molera@gmail.com" ]
ivan.molera@gmail.com
ce452c20e4b8d2186443529b945ac3386d21abc2
552320aa1aed946ac6e83a149355d495252e09f4
/rule/rule-code/oracle/DML_SORT.py
89a12a34f4f23ee4c2386b611357ac74181aa3cc
[]
no_license
kk71/sqlaudit
59bab5765a67f56f1dd2f3103812051c5acbbc49
51b4a3b188ab31da5511bb68f617933d771a3d51
refs/heads/main
2023-02-04T18:38:46.125746
2020-06-30T06:06:51
2020-06-30T06:06:51
323,559,338
1
0
null
null
null
null
UTF-8
Python
false
false
313
py
import re def code(rule, entries, **kwargs): single_sql: dict = kwargs.get("single_sql") sql_text: str = single_sql["sql_text_no_comment"] dml_sort = re.compile(r"(\s)?((update )|(delete )).*order by", re.M+re.I) if dml_sort.search(sql_text): yield single_sql code_hole.append(code)
[ "kai.fang@kirintech.cn" ]
kai.fang@kirintech.cn
1841e5f59a2690cdc2fb0f3fe2a6685186d96ab9
ce7fb81204902a49786b6b9cbf29647d159f28c3
/node/SQL/mysql/tbl_vector_style_insert.py
961d1c50619e01fe8b7ec63c6907d31cdb4ffd3a
[]
no_license
seefs/Source_Insight
cddc11b80b2ffe7d01200867d9db5185521ed201
130d807d99333d11a708e59a80b06b0a5377f1a3
refs/heads/master
2023-08-16T21:16:19.543498
2023-08-06T18:17:11
2023-08-06T18:17:11
165,578,073
7
3
null
null
null
null
UTF-8
Python
false
false
1,831
py
import pymysql import re # 打开数据库连接 db = pymysql.connect("localhost","root","s0f0s0","jiebanew" ) # 使用cursor()方法获取操作游标 cursor = db.cursor() match_str = '_' dict = {} sql_rd = "SELECT * FROM tbl_base_all" sql_rd = "SELECT * FROM tbl_base_all" #sql_wr_pre = "UPDATE tbl_vector SET `VC%s` =%s where VID = %s;" sql_wr_next = "UPDATE tbl_vector SET `VC%s` =%s where VID = %s;" i_pre = 0 i_next = 0 #p_id = 'p%s_%s' n_id = 'n%s_%s' try: # 执行SQL语句 cursor.execute(sql_rd) # 获取所有记录列表 results = cursor.fetchall() for row in results: fslist = row[3] line_sents = re.split(match_str,fslist) # 保留分割符 line_len = len(line_sents) # print (line_len) if line_len == 0 : continue i_pre = line_sents[0] for i in range(line_len-1): i_next = line_sents[i + 1] # dict[p_id % (str(i_next), str(i_pre))] = dict.get(p_id % (str(i_next), str(i_pre)), 0) + 1 dict[n_id % (str(i_pre), str(i_next))] = dict.get(n_id % (str(i_pre), str(i_next)), 0) + 1 i_pre = i_next for i in range(17): for j in range(17): k = i m = j if k == 17 : k = 26 if m == 17 : m = 26 # v = dict.get(p_id % (str(m), str(k)), 0) v = dict.get(n_id % (str(k), str(m)), 0) if v > 0 : cursor.execute(sql_wr_next % (str(k), str(v), str(m))) # 提交到数据库执行 db.commit() # 打印结果 # print (sql_wr_num % (str(1), str(fcid))) except: print ("Error: unable to fetch data") # 关闭数据库连接 db.close() #print ("dict : %s" % str(dict))
[ "seefs@163.com" ]
seefs@163.com
29f4630dce6ed1e24ddcb6b1e4a73d2781516456
60325d386df03a8a5d2602139b9b6a7422c8e79c
/Python/SOP/SOP_create_gizmo.py
c0be55f579105a0634ea62df51c95a0e96d13af0
[]
no_license
jsa4000/SideFX-HOUDINI
98f5aacfda4709a721a7cadb3c61171f1b8012ae
2366b4b87c0e780dbc9ccecfc8bc04f8e59a01c9
refs/heads/master
2021-01-22T09:27:57.533881
2017-04-28T11:23:22
2017-04-28T11:23:22
81,961,380
5
3
null
null
null
null
UTF-8
Python
false
false
1,149
py
node = hou.pwd() geo = node.geometry() # Add code to modify contents of geo. # Use drop down menu to select examples. attrib = geo.addAttrib(hou.attribType.Point,"Cd",(0,0,0)) # Vector X (Side) sidevector = hou.Vector3((1.0,0.0,0.0)) sidevector.normalized() attribSide = geo.addAttrib(hou.attribType.Point,"Side",sidevector) # Vector Y (Up) upvector = hou.Vector3((0.0,1.0,0.0)) upvector.normalized() attribUp = geo.addAttrib(hou.attribType.Point,"Up",upvector) # Vector Z (Aim) aimvector = hou.Vector3((0.0,0.0,1.0)) aimvector.normalized() attribAim = geo.addAttrib(hou.attribType.Point,"Aim",aimvector) for i in range(0,3): initial_pos = (0,0,0) final_pos = [0,0,0] final_pos[i] = 3 color = [0,0,0] color[i] = 1 # Create the polygon poly = geo.createPolygon() positions = [initial_pos, final_pos ] # Create the points and the vertices for position in positions: point = geo.createPoint() point.setAttribValue(attrib, color) point.setPosition(position) poly.addVertex(point) # SEt the poly opened to display as an edge poly.setIsClosed(False)
[ "jsa4000@gmail.com" ]
jsa4000@gmail.com
2fc2533d27af5189679f1be9017cff8d67e35c0d
02e5b1240db2ef04b4f8b661a9ac4ce060144d74
/test/geweke_ct_test.py
5b96979aa97f893c391cdafaa57f3d088f12b1fa
[ "MIT" ]
permissive
jayeshchoudhari/pyhawkes
b3b143a5040730826c23a9b3703159dbeb9bf21d
f4b0e6e3ce7f74e647f0ed2254ea334c22d6e82b
refs/heads/master
2021-06-12T12:55:54.740142
2017-03-27T06:47:16
2017-03-27T06:47:16
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,149
py
import numpy as np np.random.seed(1234) import matplotlib.pyplot as plt from scipy.stats import gamma, t, probplot from pyhawkes.models import ContinuousTimeNetworkHawkesModel from pybasicbayes.util.text import progprint_xrange def test_geweke(): """ Create a discrete time Hawkes model and generate from it. :return: """ K = 1 T = 50.0 dt = 1.0 dt_max = 3.0 # network_hypers = {'C': 1, 'p': 0.5, 'kappa': 3.0, 'alpha': 3.0, 'beta': 1.0/20.0} network_hypers = {'c': np.zeros(K, dtype=np.int), 'p': 0.5, 'kappa': 10.0, 'v': 10*3.0} bkgd_hypers = {"alpha": 1., "beta": 10.} model = ContinuousTimeNetworkHawkesModel(K=K, dt_max=dt_max, network_hypers=network_hypers) model.generate(T=T) # Gibbs sample and then generate new data N_samples = 1000 samples = [] lps = [] for itr in progprint_xrange(N_samples, perline=50): # Resample the model model.resample_model() samples.append(model.copy_sample()) lps.append(model.log_likelihood()) # Geweke step model.data_list.pop() model.generate(T=T) # Compute sample statistics for second half of samples A_samples = np.array([s.weight_model.A for s in samples]) W_samples = np.array([s.weight_model.W for s in samples]) mu_samples = np.array([s.impulse_model.mu for s in samples]) tau_samples = np.array([s.impulse_model.tau for s in samples]) lambda0_samples = np.array([s.bias_model.lambda0 for s in samples]) lps = np.array(lps) offset = 0 A_mean = A_samples[offset:, ...].mean(axis=0) W_mean = W_samples[offset:, ...].mean(axis=0) mu_mean = mu_samples[offset:, ...].mean(axis=0) tau_mean = tau_samples[offset:, ...].mean(axis=0) lambda0_mean = lambda0_samples[offset:, ...].mean(axis=0) print "A mean: ", A_mean print "W mean: ", W_mean print "mu mean: ", mu_mean print "tau mean: ", tau_mean print "lambda0 mean: ", lambda0_mean # Plot the log probability over iterations plt.figure() plt.plot(np.arange(N_samples), lps) plt.xlabel("Iteration") plt.ylabel("Log probability") # Plot the histogram of bias samples plt.figure() p_lmbda0 = gamma(model.bias_model.alpha, scale=1./model.bias_model.beta) _, bins, _ = plt.hist(lambda0_samples[:,0], bins=50, alpha=0.5, normed=True) bincenters = 0.5*(bins[1:]+bins[:-1]) plt.plot(bincenters, p_lmbda0.pdf(bincenters), 'r--', linewidth=1) plt.xlabel('lam0') plt.ylabel('p(lam0)') print "Expected p(A): ", model.network.P print "Empirical p(A): ", A_samples.mean(axis=0) # Plot the histogram of weight samples plt.figure() Aeq1 = A_samples[:,0,0] == 1 # p_W1 = gamma(model.network.kappa, scale=1./model.network.v[0,0]) # p_W1 = betaprime(model.network.kappa, model.network.alpha, scale=model.network.beta) p_W1 = gamma(model.network.kappa, scale=1./model.network.v[0,0]) if np.sum(Aeq1) > 0: _, bins, _ = plt.hist(W_samples[Aeq1,0,0], bins=50, alpha=0.5, normed=True) bincenters = 0.5*(bins[1:]+bins[:-1]) plt.plot(bincenters, p_W1.pdf(bincenters), 'r--', linewidth=1) plt.xlabel('W') plt.ylabel('p(W | A=1)') # Plot the histogram of impulse precisions plt.figure() p_tau = gamma(model.impulse_model.alpha_0, scale=1./model.impulse_model.beta_0) _, bins, _ = plt.hist(tau_samples[:,0,0], bins=50, alpha=0.5, normed=True) bincenters = 0.5*(bins[1:]+bins[:-1]) plt.plot(bincenters, p_tau.pdf(bincenters), 'r--', linewidth=1) plt.xlabel('tau') plt.ylabel('p(tau)') # Plot the histogram of impulse means plt.figure() p_mu = t(df=2*model.impulse_model.alpha_0, loc=model.impulse_model.mu_0, scale=np.sqrt(model.impulse_model.beta_0/(model.impulse_model.alpha_0*model.impulse_model.lmbda_0))) _, bins, _ = plt.hist(mu_samples[:,0,0], bins=50, alpha=0.5, normed=True) bincenters = 0.5*(bins[1:]+bins[:-1]) plt.plot(bincenters, p_mu.pdf(bincenters), 'r--', linewidth=1) plt.xlabel('mu') plt.ylabel('p(mu)') plt.show() def test_sample_nig(): mu_0 = 0.0 lmbda_0 = 10. alpha_0 = 10. beta_0 = 10. # Directly sample nig and lookg at marginals from pyhawkes.utils.utils import sample_nig mu_samples = \ np.array([sample_nig(mu_0, lmbda_0, alpha_0, beta_0)[0] for _ in xrange(10000)]) # Plot the histogram of impulse means plt.figure() p_mu = t(df=2*alpha_0, loc=mu_0, scale=np.sqrt(beta_0/(alpha_0*lmbda_0))) _, bins, _ = plt.hist(mu_samples, bins=50, alpha=0.5, normed=True) bincenters = 0.5*(bins[1:]+bins[:-1]) plt.plot(bincenters, p_mu.pdf(bincenters), 'r--', linewidth=1) plt.xlabel('mu') plt.ylabel('p(mu)') plt.figure() probplot(mu_samples, dist=p_mu, plot=plt.gca()) plt.show() if __name__ == "__main__": # test_sample_nig() test_geweke()
[ "scott.linderman@gmail.com" ]
scott.linderman@gmail.com
454486b360222d0be71121c0c3b0766b377b6de8
4369c5a214f8c4fb1f8a286f72d57cfa9c3f02c7
/geotrek/tourism/migrations/0004_auto_20190322_1908.py
f03a350395c6ef760026fa83bddfb5dc1f73f878
[ "BSD-2-Clause" ]
permissive
GeotrekCE/Geotrek-admin
c13d251066e92359c26f22d185b8bd2e26e622ef
a91b75261a876be51ad2a693618629900bea6003
refs/heads/master
2023-08-21T12:45:25.586551
2023-08-09T12:28:33
2023-08-09T12:28:33
9,886,107
71
56
BSD-2-Clause
2023-09-13T09:40:33
2013-05-06T12:17:21
Python
UTF-8
Python
false
false
1,182
py
# Generated by Django 1.11.14 on 2019-03-22 18:08 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tourism', '0003_auto_20190306_1417'), ] operations = [ migrations.AlterModelOptions( name='touristiccontenttype1', options={'verbose_name': 'Type1', 'verbose_name_plural': 'First list types'}, ), migrations.AlterModelOptions( name='touristiccontenttype2', options={'verbose_name': 'Type2', 'verbose_name_plural': 'Second list types'}, ), migrations.AlterField( model_name='touristiccontent', name='type1', field=models.ManyToManyField(blank=True, db_table='t_r_contenu_touristique_type1', related_name='contents1', to='tourism.TouristicContentType1', verbose_name='Type 1'), ), migrations.AlterField( model_name='touristiccontent', name='type2', field=models.ManyToManyField(blank=True, db_table='t_r_contenu_touristique_type2', related_name='contents2', to='tourism.TouristicContentType2', verbose_name='Type 2'), ), ]
[ "gael.utard@makina-corpus.com" ]
gael.utard@makina-corpus.com
3b9e645996132a272316f8928612a8d743e4ee43
026f12a5fdd4b3bfee00713091267aaef71047c1
/end/demo1/bookdemo/polls/forms.py
59f1ce8aebd950796aeffb0769d69f030a0e8aaa
[]
no_license
zzy0371/py1911project
64c64413ea0107926ae81479adc27da87ee04767
7ce2a2acfc1dade24e6e7f8763fceb809fabd7a1
refs/heads/master
2023-01-08T07:51:13.388203
2020-03-19T03:31:33
2020-03-19T03:31:33
239,649,431
0
1
null
2023-01-05T09:04:53
2020-02-11T01:22:35
JavaScript
UTF-8
Python
false
false
1,112
py
from django import forms from .models import User class LoginForm(forms.Form): """ 定义一个登录表单用于生成html登录表单 """ username = forms.CharField(max_length=150,min_length=3, label="输入用户名", help_text="用户名最小6,最大150",) password = forms.CharField(min_length=3,max_length=50, widget=forms.PasswordInput, help_text="密码最小6,最大50",label="输入密码") class RegistForm(forms.ModelForm): """ 定义一个注册表单用于生成html表单 """ password2 = forms.CharField(widget=forms.PasswordInput,label="重复密码") class Meta: model = User fields = ["username","password"] labels = { "username":"输入用户名", "password":"输入密码" } help_texts = { "username": "长度>3 <150", "password": "长度>3 <150" } widgets = { "password":forms.PasswordInput }
[ "496575233@qq.com" ]
496575233@qq.com
6eea14cd8ff6d3489b18e1c0a58b2528c6b0370c
a4deea660ea0616f3b5ee0b8bded03373c5bbfa2
/concrete_instances/register-variants/vrsqrtps_xmm_xmm/instructions/vrsqrtps_xmm_xmm/vrsqrtps_xmm_xmm.gen.vex.py
9f8d295f1eb314cbe69055e099f7208efc7b0bdf
[]
no_license
Vsevolod-Livinskij/x86-64-instruction-summary
4a43472e26f0e4ec130be9a82f7e3f3c1361ccfd
c276edab1b19e3929efb3ebe7514489f66087764
refs/heads/master
2022-02-02T18:11:07.818345
2019-01-25T17:19:21
2019-01-25T17:19:21
null
0
0
null
null
null
null
UTF-8
Python
false
false
190
py
import angr proj = angr.Project('./instructions/vrsqrtps_xmm_xmm/vrsqrtps_xmm_xmm.o') print proj.arch print proj.entry print proj.filename irsb = proj.factory.block(proj.entry).vex irsb.pp()
[ "sdasgup3@illinois.edu" ]
sdasgup3@illinois.edu
b63fa9d1cd2d5f05bba5cf59b8e7584e386ff9ef
25d081c82bf9adc2a8d96c254df0239a9f982a71
/tools/file_search.py
11469d4167adeef4ce416653eba9b2256b77e6d2
[ "MIT" ]
permissive
asiekierka/z2
f102de582aaa9fc51b6b598a1fb07c58be4f540f
d926408423dc98d71d5e7fc2fda3202c03c309de
refs/heads/master
2021-06-15T15:09:41.614135
2021-02-23T02:44:54
2021-02-23T02:44:54
146,348,922
1
0
MIT
2018-08-27T20:14:46
2018-08-27T20:14:46
null
UTF-8
Python
false
false
2,140
py
import django import os import re import sys import zipfile sys.path.append("/var/projects/museum") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings") django.setup() from museum_site.models import File # noqa: E402 from museum_site.constants import SITE_ROOT # noqa: E402 def main(): shortcuts = { "1": "(.*).[zZ][zZ][tT]$", "2": "(.*).[sS][zZ][tT]$", "3": "(.*).[zZ][iI][gG]$", "4": "(.*).[zZ][zZ][mM]$", "5": "(.*).[tT][xX][tT]$", "6": "(.*).[dD][oO][cC]$", "7": "(.*).[bB][rR][dD]$", "8": "(.*).[eE][xX][eE]$", "9": "(.*).[cC][oO][mM]$", } length = len(list(shortcuts.keys())) print("SHORTCUTS") for x in range(1, length + 1): print(x, shortcuts[str(x)]) contains_regex = input("Regex to list Files that do match: ") if contains_regex in shortcuts.keys(): contains_regex = shortcuts[contains_regex] lacks_regex = input("Regex to list Files that don't match: ") if lacks_regex in list(shortcuts.keys()): lacks_regex = shortcuts[lacks_regex] files = File.objects.all().order_by("letter", "title") lacks_matches = "" contains_matches = "" print("LACKS", lacks_regex) for f in files: letter = f.letter fn = f.filename # Open the zip try: zf = zipfile.ZipFile( os.path.join(SITE_ROOT, "zgames", (letter + "/" + fn)) ) except Exception as e: # The audit script should handle missing/invalid zips, not this. print(e) continue # Print files lack_fail = False try: file_list = zf.namelist() for zfn in file_list: if contains_regex: if (re.match(contains_regex, zfn)): print(fn, zfn) if lacks_regex: if (not re.match(lacks_regex, zfn)): print(fn, zfn) except Exception as e: print(e) continue return True if __name__ == "__main__": main()
[ "doctordos@gmail.com" ]
doctordos@gmail.com
de3366ed069ff8116b3bfe89dc977aaa79c406f6
ea35facf6d823e93706b5f551408250b1e089be9
/共通問題/14_12.py
d02d6ce9cf4df2e35aed12e8c7dfc19af3116257
[]
no_license
YukiNGSM/PythonStudy
7a2d24f4762e384531eadd691858296b00b6a6b3
26310d0e007745ff4920ccd0fc3e51771cb2d5f1
refs/heads/master
2023-07-19T00:06:29.061255
2021-09-22T01:29:49
2021-09-22T01:29:49
409,025,304
0
0
null
null
null
null
UTF-8
Python
false
false
482
py
address_str = input("住所を入力してください:") index = 0 if address_str.find("東京都") != -1: index = address_str.find("都") elif address_str.find("北海道") != -1: index = address_str.find("道") elif address_str.find("大阪府") != -1 or address_str.find("京都府") != -1: index = address_str.find("府") elif address_str.find("県") != -1: index = address_str.find("県") # 都道府県以下の住所を表示 print(address_str[index+1:])
[ "ykh2135239@o-hara.ac.jp" ]
ykh2135239@o-hara.ac.jp
2206ac9cfa6da743114677db3d089f306b0497aa
3f9e960174cfc5c8bd6827ce5362124c467a3952
/python/prep/find-max.py
e9a8c5f30d079fb61552d3949305b9912db251da
[]
no_license
monobinab/python
f3ec6d462d7149c007ac9e14e72132eae73b4acd
265621b045969c819eb86fa7ba2a3bdfad34ecb6
refs/heads/master
2020-12-03T00:04:29.185880
2017-07-01T18:53:11
2017-07-01T20:06:16
95,982,002
0
0
null
null
null
null
UTF-8
Python
false
false
2,057
py
# some functions to work with loop def find_max(data): biggest = data[0]; print(biggest) for val in data: if val > biggest: biggest = val; return biggest; print(find_max([1000, 1, 2, 3, 7, 7, 100, 4])) def find_min(data): smallest = data[0]; for val in data: if val < smallest: smallest = val; return smallest; print(find_min([0, 2, -3, 9])) # for loop usign index def find_max_index(data): max_index = 0; for i in range(len(data)): if data[i] > data[max_index]: max_index = i; return max_index; print(find_max_index([-100, 1, 2, 3, 6, 7])) # checks if a value exist def if_exist(data, target): print(data) print(target) found = False; for val in data: if val == target: found = True; break; return found; print(if_exist("abcde", "a")) print(if_exist([0, 1, 2, 3, 4, 5], 2)) #this doesn't work yet # sort def sort_list(data): i = 0; # j = 1; # temp = 0; for i in range(len(data) - 1): temp = 0; for j in range(len(data)): print("data1 is", data[i]) # print(data[j]) if data[i] > data[i + 1]: data[i] = temp; print("data[i] is ", data[i]); print("temp is ", temp) data[i] = data[i + 1]; data[i + 1] = temp i = i + 1 # j = j+1 else: continue; return data; #print(sort_list([3, 2, 8, 6, 9, 0, 11])) #create a list with values from odd number index def odd_index_finder(data): result = []; for i in range(len(data)): if i%2 == 0: result.append(data[i]); return result; print(odd_index_finder([0, 1, 2, 3, 4, 5,6])) #create list by eliminating certain values def filter_list(data, target_value): i = 0 for val in data: print(val) if val == target_value: i = data[val] data.pop(i); return data; print(filter_list([1, 2, 6, 3, 8], 2))
[ "monobina.saha@searshc.com" ]
monobina.saha@searshc.com
259a015c82d514ec77e650e0acb5a9afebc642d2
8f48d12b88048e424ebb0d72ca6dfab5cf12ae0f
/0600_0999/923.py
0da37c61c3b5051a81a5e025ce875afd0d36cebc
[]
no_license
renjieliu/leetcode
e1caf13c18a8107ed9252588b339fb76bcb1b246
4668b64fcb9320b6c316d8608fc61911ce43b6c7
refs/heads/master
2023-03-18T18:16:06.187741
2023-03-14T20:31:59
2023-03-14T20:31:59
128,823,819
7
0
null
null
null
null
UTF-8
Python
false
false
2,542
py
class Solution: def threeSumMulti(self, arr: 'List[int]', target: int) -> int: # O(N2LogN | N) hmp = {} for a in arr: if a not in hmp: hmp[a] = 0 hmp[a] += 1 output = 0 arr = sorted(hmp.keys()) seen = set() fact = lambda x: 1 if x <= 1 else x * fact(x-1) combo = lambda x, y: fact(x)//(fact(y) * fact(x-y)) for i in range(len(arr)): for j in range(len(arr)-1, i-1, -1): a = arr[i] b = target-arr[i]-arr[j] c = arr[j] t = tuple(sorted([a, b, c])) if b in hmp and t not in seen: seen.add(t) cnt = {} #how many times the number needs to be picked. for case like (2,3,3), or (3,3,3) for x in [a, b , c]: if x not in cnt: cnt[x] = 0 cnt[x] += 1 curr = 1 for k, v in cnt.items(): curr *= combo(hmp[k], v) output += curr return output%(10**9+7) # previous solution # class Solution: # def threeSumMulti(self, arr: 'List[int]', target: int) -> int: # hmp = {} # mod = 10**9+7 # for a in arr: # if a not in hmp: # hmp[a] = 0 # hmp[a]+=1 # fact = lambda x: 1 if x <= 1 else x*fact(x-1) # combo = lambda m, n: fact(m)//(fact(n) * fact(m-n)) # sig = lambda x:'-'.join([str(_) for _ in sorted(x)]) # cnt = 0 # stk = list(hmp.keys()) # seen = set() # for i in range(len(stk)): # a = stk[i] # if target == a*3 and hmp[a] >=3: # cnt += combo(hmp[a], 3) # seen.add(sig([a,a,a])) # else: # for j in range(i+1, len(stk)): # b = stk[j] # c = target - a-b # if c in hmp and sig([a,b,c]) not in seen: # if a == c: # cnt += combo(hmp[a], 2) * combo(hmp[b], 1) # elif b == c: # cnt += combo(hmp[b], 2) * combo(hmp[a], 1) # else: # cnt += combo(hmp[a], 1) * combo(hmp[b], 1)* combo(hmp[c], 1) # seen.add(sig([a,b,c])) # return cnt % mod
[ "anlrj@qq.com" ]
anlrj@qq.com
65d45ee320837f03ec2327071590305732848200
af7a8e1fcacb1ac50ae6c8072db608ca3e80a839
/tests/test_linearize.py
7e5bb8ea9c414b953f498ee4f92ab10ba1a07ebb
[ "MIT" ]
permissive
wszhang/devito
3c497af69a8420bae00b17ad8c0b9c08ca1de704
a7dbdabe505ded73781ca06e0a1c40b4d582655d
refs/heads/master
2023-09-06T00:33:43.694995
2021-10-13T07:51:29
2021-10-13T07:51:29
416,707,398
0
0
null
null
null
null
UTF-8
Python
false
false
6,806
py
import pytest import numpy as np import scipy.sparse from devito import (Grid, Function, TimeFunction, SparseTimeFunction, Operator, Eq, MatrixSparseTimeFunction) from devito.ir import Expression, FindNodes def test_basic(): grid = Grid(shape=(4, 4)) u = TimeFunction(name='u', grid=grid) u1 = TimeFunction(name='u', grid=grid) eqn = Eq(u.forward, u + 1) op0 = Operator(eqn) op1 = Operator(eqn, opt=('advanced', {'linearize': True})) # Check generated code assert 'uL0' not in str(op0) assert 'uL0' in str(op1) op0.apply(time_M=10) op1.apply(time_M=10, u=u1) assert np.all(u.data == u1.data) @pytest.mark.parallel(mode=[(1, 'basic'), (1, 'diag2'), (1, 'full')]) def test_mpi(): grid = Grid(shape=(4, 4)) u = TimeFunction(name='u', grid=grid, space_order=2) u1 = TimeFunction(name='u', grid=grid, space_order=2) eqn = Eq(u.forward, u.dx2 + 1.) op0 = Operator(eqn) op1 = Operator(eqn, opt=('advanced', {'linearize': True})) # Check generated code assert 'uL0' not in str(op0) assert 'uL0' in str(op1) op0.apply(time_M=10) op1.apply(time_M=10, u=u1) assert np.all(u.data == u1.data) def test_cire(): grid = Grid(shape=(4, 4, 4)) u = TimeFunction(name='u', grid=grid, space_order=2) u1 = TimeFunction(name='u', grid=grid, space_order=2) eqn = Eq(u.forward, u.dy.dy + 1.) op0 = Operator(eqn, opt=('advanced', {'cire-mingain': 0})) op1 = Operator(eqn, opt=('advanced', {'linearize': True, 'cire-mingain': 0})) # Check generated code assert 'uL0' not in str(op0) assert 'uL0' in str(op1) op0.apply(time_M=10) op1.apply(time_M=10, u=u1) assert np.all(u.data == u1.data) def test_nested_indexeds(): grid = Grid(shape=(4, 4)) t = grid.stepping_dim x, y = grid.dimensions f = Function(name='f', grid=grid, dtype=np.int32) g = Function(name='g', grid=grid, dimensions=(x,), shape=(4,), dtype=np.int32) u = TimeFunction(name='u', grid=grid, space_order=2) u1 = TimeFunction(name='u', grid=grid, space_order=2) eqn = Eq(u.forward, u[t, f[g[x], g[x]], y] + 1.) op0 = Operator(eqn) op1 = Operator(eqn, opt=('advanced', {'linearize': True})) # Check generated code assert 'uL0' not in str(op0) assert 'uL0' in str(op1) op0.apply(time_M=10) op1.apply(time_M=10, u=u1) assert np.all(u.data == u1.data) def test_interpolation(): nt = 10 grid = Grid(shape=(4, 4)) src = SparseTimeFunction(name='src', grid=grid, npoint=1, nt=nt) rec = SparseTimeFunction(name='rec', grid=grid, npoint=1, nt=nt) u = TimeFunction(name="u", grid=grid, time_order=2) u1 = TimeFunction(name="u", grid=grid, time_order=2) src.data[:] = 1. eqns = ([Eq(u.forward, u + 1)] + src.inject(field=u.forward, expr=src) + rec.interpolate(expr=u.forward)) op0 = Operator(eqns, opt='advanced') op1 = Operator(eqns, opt=('advanced', {'linearize': True})) # Check generated code assert 'uL0' not in str(op0) assert 'uL0' in str(op1) op0.apply(time_M=nt-2) op1.apply(time_M=nt-2, u=u1) assert np.all(u.data == u1.data) def test_interpolation_msf(): grid = Grid(shape=(4, 4)) r = 2 # Because we interpolate across 2 neighbouring points in each dimension nt = 10 m0 = TimeFunction(name="m0", grid=grid, space_order=0, save=nt, time_order=0) m1 = TimeFunction(name="m1", grid=grid, space_order=0, save=nt, time_order=0) mat = scipy.sparse.coo_matrix((0, 0), dtype=np.float32) sf = MatrixSparseTimeFunction(name="s", grid=grid, r=r, matrix=mat, nt=nt) eqns = sf.inject(field=m0.forward, expr=sf.dt2) eqns += sf.inject(field=m1.forward, expr=sf.dt2) op0 = Operator(eqns) op1 = Operator(eqns, opt=('advanced', {'linearize': True})) assert 'm0L0' in str(op1) # There used to be a bug causing the jit compilation to fail because of # the writing to `const int` variables assert op0.cfunction assert op1.cfunction @pytest.mark.parallel(mode=[(1, 'diag2')]) def test_codegen_quality0(): grid = Grid(shape=(4, 4)) u = TimeFunction(name='u', grid=grid, space_order=2) eqn = Eq(u.forward, u.dx2 + 1.) op = Operator(eqn, opt=('advanced', {'linearize': True})) assert 'uL0' in str(op) # No useless exprs generated by linearize(), such as `int x_fsz5 = u_vec->size[1];`, # since the linearization only has an effect in the elemental functions exprs = FindNodes(Expression).visit(op) assert len(exprs) == 1 # Only four access macros necessary, namely `uL0`, `aL0`, `bufL0`, `bufL1` (the # other three obviously are _POSIX_C_SOURCE, START_TIMER, STOP_TIMER) assert len(op._headers) == 7 exprs = FindNodes(Expression).visit(op._func_table['compute0'].root) assert all('const int' in str(i) for i in exprs[:-1]) def test_codegen_quality1(): grid = Grid(shape=(4, 4, 4)) u = TimeFunction(name='u', grid=grid, space_order=2) eqn = Eq(u.forward, u.dy.dy + 1.) op = Operator(eqn, opt=('advanced', {'linearize': True, 'cire-mingain': 0})) assert 'uL0' in str(op) # 11 expressions in total are expected, 8 of which are for the linearized accesses exprs = FindNodes(Expression).visit(op) assert len(exprs) == 11 assert all('const int' in str(i) for i in exprs[:-3]) assert all('const int' not in str(i) for i in exprs[-3:]) # Only two access macros necessary, namely `uL0` and `r1L0` (the other five # obviously are _POSIX_C_SOURCE, MIN, MAX, START_TIMER, STOP_TIMER) assert len(op._headers) == 7 def test_pow(): grid = Grid(shape=(4, 4)) u = TimeFunction(name='u', grid=grid, space_order=2) eqn = Eq(u.forward, 1./(u*u) + 1.) op = Operator(eqn, opt=('advanced', {'linearize': True})) # Make sure linearize() doesn't cause `a*a` -> `Pow(a, 2)` assert 'uL0' in str(op) expr = FindNodes(Expression).visit(op)[-1].expr assert expr.rhs.is_Add assert expr.rhs.args[1].is_Pow assert expr.rhs.args[1].args[0].is_Mul assert expr.rhs.args[1].args[1] == -1 def test_different_halos(): grid = Grid(shape=(8, 8, 8)) f = Function(name='f', grid=grid, space_order=8) g = Function(name='g', grid=grid, space_order=16) u = TimeFunction(name='u', grid=grid, space_order=12) u1 = TimeFunction(name='u', grid=grid, space_order=12) f.data[:] = 1. g.data[:] = 2. eqn = Eq(u.forward, u + f + g + 1) op0 = Operator(eqn) op1 = Operator(eqn, opt=('advanced', {'linearize': True})) # Check generated code assert 'uL0' not in str(op0) assert 'uL0' in str(op1) op0.apply(time_M=4) op1.apply(time_M=4, u=u1) assert np.all(u.data == u1.data)
[ "f.luporini12@imperial.ac.uk" ]
f.luporini12@imperial.ac.uk
9934aca3276aec7f5db8547b6452a74f26e0922c
54bb9ba6d507cd25b2c2ac553665bc5fc95280d1
/tests/onegov/gazette/test_views_categories.py
f4a276ee54ff21d4de1bd1636e5d63f25c6af2d0
[ "MIT" ]
permissive
href/onegov-cloud
9ff736d968979380edba266b6eba0e9096438397
bb292e8e0fb60fd1cd4e11b0196fbeff1a66e079
refs/heads/master
2020-12-22T07:59:13.691431
2020-01-28T08:51:54
2020-01-28T08:51:54
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,759
py
from freezegun import freeze_time from tests.onegov.gazette.common import login_editor_1 from tests.onegov.gazette.common import login_publisher from pyquery import PyQuery as pq from webtest import TestApp as Client from xlrd import open_workbook def test_view_categories(gazette_app): with freeze_time("2017-10-20 12:00"): client = Client(gazette_app) login_publisher(client) # Test data: # 10 / Complaints / inactive # 11 / Education / active # 12 / Submissions / active # 13 / Commercial Register / active # 14 / Elections / active # add a category manage = client.get('/categories') manage = manage.click('Neu') manage.form['title'] = 'Rubrik XY' manage.form['active'] = True manage = manage.form.submit().maybe_follow() assert 'Rubrik hinzugefügt.' in manage assert 'Rubrik XY' in manage categories = [ [ ''.join((td.text_content(), td.attrib['class'])) for td in pq(tr)('td')[:1] ][0] for tr in manage.pyquery('table.categories tbody tr') ] assert categories == [ 'Commercial Register (13)', 'Complaints (10)inactive', 'Education (11)', 'Elections (14)', 'Rubrik XY (15)', 'Submissions (12)' ] # use the first category in a notice manage = client.get('/notices/drafted/new-notice') manage.form['title'] = 'Titel' manage.form['organization'] = '200' manage.form['category'] = '13' manage.form['issues'] = ['2017-44'] manage.form['text'] = 'Text' manage.form['author_place'] = 'Govikon' manage.form['author_name'] = 'State Chancellerist' manage.form['author_date'] = '2019-01-01' manage = manage.form.submit().maybe_follow() assert '<h2>Titel</h2>' in manage assert 'Commercial Register' in manage # edit the first category manage = client.get('/categories') manage = manage.click('Bearbeiten', index=0) manage.form['title'] = 'Rubrik Z' manage.form['active'] = False manage = manage.form.submit().maybe_follow() assert 'Rubrik geändert.' in manage assert 'Commercial Register' not in manage categories = [ [ ''.join((td.text_content(), td.attrib['class'])) for td in pq(tr)('td')[:1] ][0] for tr in manage.pyquery('table.categories tbody tr') ] assert categories == [ 'Complaints (10)inactive', 'Education (11)', 'Elections (14)', 'Rubrik XY (15)', 'Rubrik Z (13)inactive', 'Submissions (12)' ] # check if the notice has been updated manage = client.get('/notice/titel') assert 'Commercial Register' not in manage assert 'Rubrik Z' in manage # delete all but one (unused) categories manage = client.get('/categories') manage.click('Löschen', index=0).form.submit() manage.click('Löschen', index=1).form.submit() manage.click('Löschen', index=2).form.submit() manage.click('Löschen', index=3).form.submit() manage.click('Löschen', index=5).form.submit() manage = client.get('/categories') assert 'Complaints' not in manage assert 'Education' not in manage assert 'Elections' not in manage assert 'Rubrik XY' not in manage assert 'Rubrik Z' in manage assert 'Submissions' not in manage # Try to delete the used category manage = client.get('/categories') manage = manage.click('Löschen') assert 'Es können nur unbenutzte Rubriken gelöscht werden.' in manage assert not manage.forms def test_view_categories_permissions(gazette_app): client = Client(gazette_app) login_publisher(client) manage = client.get('/categories').click('Neu') manage.form['title'] = 'XY' manage = manage.form.submit().maybe_follow() edit_link = manage.click('Bearbeiten', index=0).request.url delete_link = manage.click('Löschen', index=0).request.url login_editor_1(client) client.get('/categories', status=403) client.get(edit_link, status=403) client.get(delete_link, status=403) def test_view_categories_export(gazette_app): client = Client(gazette_app) client.get('/categories/export', status=403) login_editor_1(client) client.get('/categories/export', status=403) login_publisher(client) response = client.get('/categories/export') book = open_workbook(file_contents=response.body) assert book.nsheets == 1 sheet = book.sheets()[0] assert sheet.ncols == 4 assert sheet.nrows == 6 assert sheet.cell(0, 0).value == 'ID' assert sheet.cell(0, 1).value == 'Name' assert sheet.cell(0, 2).value == 'Titel' assert sheet.cell(0, 3).value == 'Aktiv' assert sheet.cell(1, 1).value == '13' assert sheet.cell(1, 2).value == 'Commercial Register' assert sheet.cell(1, 3).value == 1 assert sheet.cell(2, 1).value == '10' assert sheet.cell(2, 2).value == 'Complaints' assert sheet.cell(2, 3).value == 0 assert sheet.cell(3, 1).value == '11' assert sheet.cell(3, 2).value == 'Education' assert sheet.cell(3, 3).value == 1 assert sheet.cell(4, 1).value == '14' assert sheet.cell(4, 2).value == 'Elections' assert sheet.cell(4, 3).value == 1 assert sheet.cell(5, 1).value == '12' assert sheet.cell(5, 2).value == 'Submissions' assert sheet.cell(5, 3).value == 1
[ "denis.krienbuehl@seantis.ch" ]
denis.krienbuehl@seantis.ch
8106dce1761f9380a357a6fea81f564b15eecbf6
82d588161a8f8cd27c3031c779120ea4380791b9
/minjoo/Codility/CountDiv.py
3da57e4f57e8f12ec1b46b4f62a80179f7909600
[]
no_license
Yejin6911/Algorithm_Study
3aa02a7d07169382a78c049d1de8251a52da816c
98c968bfeed17ab6b62e3a077280e0310f08190a
refs/heads/master
2023-09-01T00:31:07.212413
2021-10-24T07:56:21
2021-10-24T07:56:21
345,009,057
1
1
null
2021-09-20T13:08:33
2021-03-06T04:57:34
Python
UTF-8
Python
false
false
131
py
import math def solution(A, B, K): aq = A / K bq = B / K a = math.ceil(aq) b = math.floor(bq) return b - a + 1
[ "mjson1954@gmail.com" ]
mjson1954@gmail.com
a8585cd8a0146d0882917a004252adfda2b8a5ce
46853d317dcb7784dc84504c86cb0719a4fe471b
/project/settings.py
5601662d65f43cacad8db8f4973817faa03c7486
[ "MIT" ]
permissive
naritotakizawa/django-inlineformset-sample
f9d41a2f4fee5db7f0b769ddb9696ae773aae61b
f4a3e4420b20ab677cb535f7778c48dbc32ea70b
refs/heads/master
2020-05-05T13:12:34.783845
2019-04-08T04:12:39
2019-04-08T04:12:39
180,066,167
1
1
null
null
null
null
UTF-8
Python
false
false
3,186
py
""" Django settings for project project. Generated by 'django-admin startproject' using Django 2.2. For more information on this file, see https://docs.djangoproject.com/en/2.2/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.2/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'g8w(n%puz6vux51au4zic+i*-xg-f8w8-3h*cr1@992s(b%!qs' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'app.apps.AppConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'project.wsgi.application' # Database # https://docs.djangoproject.com/en/2.2/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.2/topics/i18n/ LANGUAGE_CODE = 'ja' TIME_ZONE = 'Asia/Tokyo' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.2/howto/static-files/ STATIC_URL = '/static/' MEDIA_URL = '/media/' MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
[ "toritoritorina@gmail.com" ]
toritoritorina@gmail.com
be1600bc1975659320667879170d3beef2f42c17
3b3585bb12becfe72af03814cec645b0c8e6c779
/satchmo/product/factories.py
c455d7c2e2cd3167f6f5ae085bd3097dcc955e76
[ "BSD-2-Clause" ]
permissive
juderino/jelly-roll
aac548073487511c5b935d9fb20c5a995c665b9b
ccac91bf3aab06fec4f83a7f9eabfa22d41b922a
refs/heads/master
2021-01-18T21:04:15.232998
2015-07-21T20:35:26
2015-07-21T20:35:26
36,597,803
0
0
null
2015-05-31T10:16:21
2015-05-31T10:16:21
null
UTF-8
Python
false
false
1,062
py
import factory from decimal import Decimal from django.contrib.sites.models import Site from django.template.defaultfilters import slugify from satchmo.product.models import ( ConfigurableProduct, Product, Price, ) class ProductFactory(factory.django.DjangoModelFactory): class Meta: model = Product site = factory.LazyAttribute(lambda a: Site.objects.get_current()) name = factory.Sequence(lambda n: 'Product {0}'.format(n)) slug = factory.LazyAttribute(lambda a: slugify(a.name)) @factory.post_generation def create_price(obj, create, extracted, **kwargs): PriceFactory(product=obj) class TaxableProductFactory(ProductFactory): taxable = True class ConfigurableProductFactory(factory.django.DjangoModelFactory): class Meta: model = ConfigurableProduct product = factory.SubFactory(ProductFactory) class PriceFactory(factory.django.DjangoModelFactory): class Meta: model = Price product = factory.SubFactory(TaxableProductFactory) price = Decimal("5.00")
[ "tony@ynottony.net" ]
tony@ynottony.net
a26207237f873a85d24db735c0e883f1fc52fa4a
3e8234adc26292085a78418d8475ca5fe83ef92a
/jupyterhub/sample_configs/cilogon/00-preamble.py
644223ca6b98635583d6caeec1e984103bd820af
[ "MIT" ]
permissive
womullan/jupyterlabdemo
5cef774de875606df5b2d759f1da9c1e766b5195
e8da8920627d32d2d6fa60e2083a88a630ad5209
refs/heads/master
2021-08-14T06:59:29.128152
2017-11-13T21:34:23
2017-11-13T21:34:23
110,708,695
0
0
null
2017-11-14T15:32:30
2017-11-14T15:32:30
null
UTF-8
Python
false
false
571
py
""" This is the JupyterHub configuration directory that LSST DM-SQuaRE uses. Different subconfiguration files in this directory do different things. The major components are the options form, the spawner, the authenticator, and the JupyterHub environment. These files are mapped into the JupyterHub configuration as a ConfigMap. Feel free to edit them to suit your needs. The location is specified in the deployment file /opt/lsst/software/jupyterhub/config/jupyterhub_config.py and the contents of /opt/lsst/software/jupyterhub/config/jupyterhub_config.d """
[ "athornton@gmail.com" ]
athornton@gmail.com
04b790b2a3106481a3b23156f43208a786867d2a
98b9521915fc87b963344e33ebfd779b02e9c33f
/virtual/bin/symilar
3b07a38d2af2742b5df49d3a7a393f6597f2422c
[ "MIT" ]
permissive
Jeffmusa/smartFridge
33f35483ddfd310f54b4aea7ccae4f6caf57c8bf
89c5dfca68ca80a36c062aa1bb195e6cf8f1a10f
refs/heads/master
2020-04-03T15:49:34.854722
2018-11-01T12:41:07
2018-11-01T12:41:07
155,379,225
0
0
MIT
2018-10-30T12:10:30
2018-10-30T12:10:29
null
UTF-8
Python
false
false
255
#!/home/vicklyne/smartFridge/virtual/bin/python3.6 # -*- coding: utf-8 -*- import re import sys from pylint import run_symilar if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(run_symilar())
[ "jeffmusa@gmail.com" ]
jeffmusa@gmail.com
a31b97f409e4a93874a09654a33245ce9caf47fb
6531a7c162f83cfd5e7a3c4ea8a93dae58717ff4
/le_social/openid/middleware.py
518b4a19c9f0638872c23ef170a66d27aae3753d
[]
no_license
brutasse/django-le-social
4a9103a4f24abcc054cdaaa222af9f349f24a03b
abf8a3bc57cf5f25d83b9806406cef8f9b87da63
refs/heads/master
2021-07-11T18:00:14.420981
2016-10-17T12:06:00
2016-10-17T12:06:00
1,677,246
26
4
null
2016-09-25T18:05:17
2011-04-28T19:07:01
Python
UTF-8
Python
false
false
199
py
class OpenIDMiddleware(object): """ Populates request.openid and request.openids """ def process_request(self, request): request.openids = request.session.get('openids', [])
[ "buburno@gmail.com" ]
buburno@gmail.com
57db87c9797abc2707a464d3b187ceba70140495
f3b233e5053e28fa95c549017bd75a30456eb50c
/CDK2_input/L1Q/1Q-17_MD_NVT_rerun/set_1ns_equi_1.py
6e267d5a25d2c1095b57350c91596b9b1a2e1503
[]
no_license
AnguseZhang/Input_TI
ddf2ed40ff1c0aa24eea3275b83d4d405b50b820
50ada0833890be9e261c967d00948f998313cb60
refs/heads/master
2021-05-25T15:02:38.858785
2020-02-18T16:57:04
2020-02-18T16:57:04
null
0
0
null
null
null
null
UTF-8
Python
false
false
925
py
import os dir = '/mnt/scratch/songlin3/run/CDK2/L1Q/MD_NVT_rerun/ti_one-step/1Q_17/' filesdir = dir + 'files/' temp_equiin = filesdir + 'temp_equi_1.in' temp_pbs = filesdir + 'temp_1ns_equi_1.pbs' lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078] for j in lambd: os.system("rm -r %6.5f" %(j)) os.system("mkdir %6.5f" %(j)) os.chdir("%6.5f" %(j)) os.system("rm *") workdir = dir + "%6.5f" %(j) + '/' #equiin eqin = workdir + "%6.5f_equi_1.in" %(j) os.system("cp %s %s" %(temp_equiin, eqin)) os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, eqin)) #PBS pbs = workdir + "%6.5f_1ns_equi_1.pbs" %(j) os.system("cp %s %s" %(temp_pbs, pbs)) os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs)) #top os.system("cp ../1Q-17_merged.prmtop .") os.system("cp ../0.5_equi_0.rst .") #submit pbs os.system("qsub %s" %(pbs)) os.chdir(dir)
[ "songlin3@msu.edu" ]
songlin3@msu.edu
25d5976832ad59276a925791f17f1af25df88294
869153aa415924529a3dc739df098c5d4bb83ce4
/17_contour/2_approxpoly.py
510e1441c6bea77de31e526f8100e5df2c9a617f
[]
no_license
ccwu0918/class_opencv_python
b8c4dfb64e3fa84ba4b79b96c31b98600ae4c829
fa15bcd23eb388359180a00ce096b79ec0bdc2ec
refs/heads/main
2023-06-28T05:54:05.010580
2021-08-05T12:53:09
2021-08-05T12:53:09
null
0
0
null
null
null
null
UTF-8
Python
false
false
876
py
import cv2 RECT, HEXAGON = 0, 1 frame = cv2.imread("./images/poly.png") gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) edged = cv2.Canny(gray, 50, 150) edged = cv2.dilate(edged, None, iterations=1) contours, hierarchy = cv2.findContours(edged, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) print('=== 處理前') print('矩形點數量:{}'.format(len(contours[RECT]))) print('六邊形點數量:{}'.format(len(contours[HEXAGON]))) approx_rect = cv2.approxPolyDP(contours[RECT], 30, True) approx_hex = cv2.approxPolyDP(contours[HEXAGON], 30, True) print('=== 處理後') print('矩形點數量:{}'.format(len(approx_rect))) print('六邊形點數量:{}'.format(len(approx_hex))) cv2.drawContours(frame, [approx_rect], -1, (0, 0, 255), 5) cv2.drawContours(frame, [approx_hex], -1, (0, 0, 255), 5) cv2.imshow('frame', frame) cv2.waitKey(0) cv2.destroyAllWindows()
[ "shinjia168@gmail.com" ]
shinjia168@gmail.com
27aab3a9cc7d68c1f9995b8141f2835218ef5fa5
9aaa39f200ee6a14d7d432ef6a3ee9795163ebed
/Algorithm/Python/099. Recover Binary Search Tree.py
e52e96232688e101cc8945d37d8bb4ed53c5c8a1
[]
no_license
WuLC/LeetCode
47e1c351852d86c64595a083e7818ecde4131cb3
ee79d3437cf47b26a4bca0ec798dc54d7b623453
refs/heads/master
2023-07-07T18:29:29.110931
2023-07-02T04:31:00
2023-07-02T04:31:00
54,354,616
29
16
null
null
null
null
UTF-8
Python
false
false
1,163
py
# -*- coding: utf-8 -*- # @Author: WuLC # @Date: 2016-07-03 20:01:22 # @Last modified by: WuLC # @Last Modified time: 2016-07-03 20:01:47 # @Email: liangchaowu5@gmail.com # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None # O(n) space class Solution(object): def recoverTree(self, root): """ :type root: TreeNode :rtype: void Do not return anything, modify root in-place instead. """ result, p1, p2= [], None, None self.inorderTraverse(root, result) for i in xrange(len(result)): if i+1<len(result) and result[i].val > result[i+1].val and p1 == None: p1 = result[i] elif i-1 >= 0 and result[i].val < result[i-1].val: p2 = result[i] if p1 and p2: p1.val, p2.val = p2.val, p1.val def inorderTraverse(self, root, result): if root == None: return self.inorderTraverse(root.left, result) result.append(root) self.inorderTraverse(root.right, result)
[ "liangchaowu5@gmail.com" ]
liangchaowu5@gmail.com
3dce37f70d0e342a89d689ef5496ace1bf7fb07e
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_014/ch47_2020_04_13_13_29_30_615294.py
5ec09dffb565c008c5f35752896118970f26a078
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
203
py
def estritamente_crescente(lista): i = 0 lista_nova = [] while i < len(lista): if lista[i+1] > lista[i]: lista_nova.append(lista[i+1]) i += 1 return lista_nova
[ "you@example.com" ]
you@example.com
0aaa9a9fc44222dce38e3bae24286b8df567512b
16e69196886254bc0fe9d8dc919ebcfa844f326a
/edc/subject/consent_old/forms/consent_catalogue_form.py
b9b24101a14789ffb046f04fa0cfa7ea7a8d8238
[]
no_license
botswana-harvard/edc
b54edc305e7f4f6b193b4498c59080a902a6aeee
4f75336ff572babd39d431185677a65bece9e524
refs/heads/master
2021-01-23T19:15:08.070350
2015-12-07T09:36:41
2015-12-07T09:36:41
35,820,838
0
0
null
null
null
null
UTF-8
Python
false
false
206
py
from edc.base.form.forms import BaseModelForm class ConsentCatalogueForm (BaseModelForm): def clean(self, consent_instance=None): cleaned_data = self.cleaned_data return cleaned_data
[ "ew2789@gmail.com" ]
ew2789@gmail.com
a07c61061a32428c289217787b8315b3a1f0900a
ab2f1f18f64d9f2d49a4eea5c6a78ee1275662de
/trex_client/external_libs/scapy-2.3.1/python2/scapy/__init__.py
443b36753f7f543c88564c4b8abe0f9bd5fd84ad
[ "MIT" ]
permissive
alwye/trex-http-proxy
d09d7fabe60add4a445e5ceb71f5f2a6d209e0a0
e30f5af03aaaad518b5def6e1804c3741dd5d0c6
refs/heads/master
2021-08-16T22:32:56.643253
2021-06-08T19:52:35
2021-06-08T19:52:35
60,734,923
4
3
MIT
2021-06-08T19:39:18
2016-06-08T22:27:35
Python
UTF-8
Python
false
false
458
py
## This file is part of Scapy ## See http://www.secdev.org/projects/scapy for more informations ## Copyright (C) Philippe Biondi <phil@secdev.org> ## This program is published under a GPLv2 license """ Scapy: create, send, sniff, dissect and manipulate network packets. Usable either from an interactive console or as a Python library. http://www.secdev.org/projects/scapy """ if __name__ == "__main__": from scapy.main import interact interact()
[ "alzverev@cisco.com" ]
alzverev@cisco.com
9329443d1478f64209887aa32825726f941e27ba
1b36425f798f484eda964b10a5ad72b37b4da916
/posthog/migrations/0222_fix_deleted_primary_dashboards.py
ef8d4a6cc13d5483ef0aa1cd227c44747efb0d4c
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
dorucioclea/posthog
0408baa2a7ae98e5bea352c516f741ddc17c0a3e
8848981baf237117fb22d28af0770a0165881423
refs/heads/master
2023-01-23T11:01:57.942146
2023-01-13T09:03:00
2023-01-13T09:03:00
241,222,000
0
0
MIT
2020-02-17T22:34:37
2020-02-17T22:34:36
null
UTF-8
Python
false
false
2,831
py
import structlog from django.db import connection, migrations from django.db.models import Q # 0220_set_primary_dashboard set the primary dashboard for teams, but # it didn't account for deleted dashboards. This migration fixes projects # that have a primary dashboard set to a deleted dashboard. def fix_for_deleted_primary_dashboards(apps, _): logger = structlog.get_logger(__name__) logger.info("starting 0222_fix_deleted_primary_dashboards") Team = apps.get_model("posthog", "Team") expected_team_dashboards = [] with connection.cursor() as cursor: # Fetch a list of teams and the id of the dashboard that should be set as the primary dashboard # The primary dashboard should be the oldest pinned dashboard, if one exists # or the oldest dashboard, if no pinned dashboards exist # Notes: # - We use id as a proxy for dashboard age because dashboards use a simple incrementing id # - We ignore teams that already have a primary dashboard set # - Remove deleted dashboards cursor.execute( """ SELECT posthog_team.id, COALESCE( MIN( CASE WHEN posthog_dashboard.pinned THEN posthog_dashboard.id ELSE NULL END ), MIN( CASE WHEN NOT posthog_dashboard.pinned THEN posthog_dashboard.id ELSE NULL END ) ) AS primary_dashboard_id FROM posthog_team INNER JOIN posthog_dashboard ON posthog_dashboard.team_id = posthog_team.id WHERE NOT posthog_dashboard.deleted GROUP BY posthog_team.id """ ) expected_team_dashboards = cursor.fetchall() team_to_primary_dashboard = dict(expected_team_dashboards) teams_to_update = Team.objects.filter(Q(primary_dashboard__deleted=True) | Q(primary_dashboard__isnull=True)).only( "id", "primary_dashboard_id" ) for team in teams_to_update: team.primary_dashboard_id = team_to_primary_dashboard.get(team.id, None) Team.objects.bulk_update(teams_to_update, ["primary_dashboard_id"], batch_size=500) # Because of the nature of this migration, there's no way to reverse it without potentially destroying customer data # However, we still need a reverse function, so that we can rollback other migrations def reverse(apps, _): pass class Migration(migrations.Migration): atomic = False dependencies = [ ("posthog", "0221_add_activity_log_model"), ] operations = [migrations.RunPython(fix_for_deleted_primary_dashboards, reverse)]
[ "noreply@github.com" ]
dorucioclea.noreply@github.com
ce041d314e8b73e5a06532fc7a6ea5a036ac0a5b
6547747b93196f4f4d74248de81e943cdd253d3e
/projects/views.py
6e9a7b56a0beef4fee5c92c2dfd926ef3ebfe757
[]
no_license
cyberkuroneko/PCN_CODEV_project
1551fa2768dc1a08f452636c42191e380ea602f6
66e5004ad045ad9d83f880affbf6cfa60eab2706
refs/heads/master
2022-12-18T08:55:07.879527
2020-09-12T07:06:43
2020-09-12T07:06:43
294,866,626
0
0
null
2020-09-12T04:03:16
2020-09-12T04:03:16
null
UTF-8
Python
false
false
198
py
from django.views.generic import ListView from .models import Projects # Create your views here. class ProjectsHomePageView(ListView): template_name = 'projects_home.html' model = Projects
[ "vagrant@vagrant.vm" ]
vagrant@vagrant.vm
de03e9aca5ea4d28dfd81d74ba7ff87890be777f
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03015/s226920110.py
5c2c5ed57fb8a4d7c4a1620946f7cbd6703e4a3e
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
224
py
MOD = 10**9+7 L = input() N = len(L) ans = 0 c = 0 for i in range(N): if L[i] == '1': ans += pow(3,N-i-1,MOD)*pow(2,c,MOD) ans %= MOD c += 1 ans += pow(2,c,MOD) ans %= MOD print(ans)
[ "66529651+Aastha2104@users.noreply.github.com" ]
66529651+Aastha2104@users.noreply.github.com
3aa2ceaf8175c3dc9b6b0f5c72ae1b49efbe0cc4
597c4f48332251552a602122bb3d325bc43a9d7f
/chapter11_dynamic_programming/04_recursive_memo/02_memo_recursive.py
74fcfce3fb9665a82e4dde4dd6df3caee6c34d14
[]
no_license
Kyeongrok/python_algorithm
46de1909befc7b17766a57090a7036886361fd06
f0cdc221d7908f26572ae67b5c95b12ade007ccd
refs/heads/master
2023-07-11T03:23:05.782478
2023-06-22T06:32:31
2023-06-22T06:32:31
147,303,654
0
1
null
null
null
null
UTF-8
Python
false
false
300
py
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] # => [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] n = 10 memo = [0] * n print(memo, len(memo)) def checkMemo(memo, n): if memo[n] == 0: return memo else: n = n - 1 memo[n] = n checkMemo(memo[n], n) result = checkMemo(memo, n) print(result)
[ "oceanfog1@gmail.com" ]
oceanfog1@gmail.com
359abbbcf3e86f4744a33647da97ba7bd44c27b7
6be845bf70a8efaf390da28c811c52b35bf9e475
/windows/Resources/Python/Core/Lib/hotshot/log.py
a8655b0a99f0d251de073b6e4842948aa55050c8
[]
no_license
kyeremalprime/ms
228194910bf2ed314d0492bc423cc687144bb459
47eea098ec735b2173ff0d4e5c493cb8f04e705d
refs/heads/master
2020-12-30T15:54:17.843982
2017-05-14T07:32:01
2017-05-14T07:32:01
91,180,709
2
2
null
null
null
null
UTF-8
Python
false
false
5,436
py
# uncompyle6 version 2.9.10 # Python bytecode 2.7 (62211) # Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10) # [GCC 6.2.0 20161005] # Embedded file name: log.py import _hotshot import os.path import parser import symbol from _hotshot import WHAT_ENTER, WHAT_EXIT, WHAT_LINENO, WHAT_DEFINE_FILE, WHAT_DEFINE_FUNC, WHAT_ADD_INFO __all__ = [ 'LogReader', 'ENTER', 'EXIT', 'LINE'] ENTER = WHAT_ENTER EXIT = WHAT_EXIT LINE = WHAT_LINENO class LogReader: def __init__(self, logfn): self._filemap = {} self._funcmap = {} self._reader = _hotshot.logreader(logfn) self._nextitem = self._reader.next self._info = self._reader.info if 'current-directory' in self._info: self.cwd = self._info['current-directory'] else: self.cwd = None self._stack = [] self._append = self._stack.append self._pop = self._stack.pop return def close(self): self._reader.close() def fileno(self): """Return the file descriptor of the log reader's log file.""" return self._reader.fileno() def addinfo(self, key, value): """This method is called for each additional ADD_INFO record. This can be overridden by applications that want to receive these events. The default implementation does not need to be called by alternate implementations. The initial set of ADD_INFO records do not pass through this mechanism; this is only needed to receive notification when new values are added. Subclasses can inspect self._info after calling LogReader.__init__(). """ pass def get_filename(self, fileno): try: return self._filemap[fileno] except KeyError: raise ValueError, 'unknown fileno' def get_filenames(self): return self._filemap.values() def get_fileno(self, filename): filename = os.path.normcase(os.path.normpath(filename)) for fileno, name in self._filemap.items(): if name == filename: return fileno raise ValueError, 'unknown filename' def get_funcname(self, fileno, lineno): try: return self._funcmap[fileno, lineno] except KeyError: raise ValueError, 'unknown function location' def next(self, index=0): while 1: what, tdelta, fileno, lineno = self._nextitem() if what == WHAT_ENTER: filename, funcname = self._decode_location(fileno, lineno) t = (filename, lineno, funcname) self._append(t) return ( what, t, tdelta) if what == WHAT_EXIT: try: return ( what, self._pop(), tdelta) except IndexError: raise StopIteration if what == WHAT_LINENO: filename, firstlineno, funcname = self._stack[-1] return ( what, (filename, lineno, funcname), tdelta) if what == WHAT_DEFINE_FILE: filename = os.path.normcase(os.path.normpath(tdelta)) self._filemap[fileno] = filename elif what == WHAT_DEFINE_FUNC: filename = self._filemap[fileno] self._funcmap[fileno, lineno] = (filename, tdelta) elif what == WHAT_ADD_INFO: if tdelta == 'current-directory': self.cwd = lineno self.addinfo(tdelta, lineno) else: raise ValueError, 'unknown event type' def __iter__(self): return self def _decode_location(self, fileno, lineno): try: return self._funcmap[fileno, lineno] except KeyError: if self._loadfile(fileno): filename = funcname = None try: filename, funcname = self._funcmap[fileno, lineno] except KeyError: filename = self._filemap.get(fileno) funcname = None self._funcmap[fileno, lineno] = (filename, funcname) return ( filename, funcname) def _loadfile(self, fileno): try: filename = self._filemap[fileno] except KeyError: print 'Could not identify fileId', fileno return 1 if filename is None: return 1 else: absname = os.path.normcase(os.path.join(self.cwd, filename)) try: fp = open(absname) except IOError: return st = parser.suite(fp.read()) fp.close() funcdef = symbol.funcdef lambdef = symbol.lambdef stack = [ st.totuple(1)] while stack: tree = stack.pop() try: sym = tree[0] except (IndexError, TypeError): continue if sym == funcdef: self._funcmap[fileno, tree[2][2]] = ( filename, tree[2][1]) elif sym == lambdef: self._funcmap[fileno, tree[1][2]] = ( filename, '<lambda>') stack.extend(list(tree[1:])) return
[ "kyeremalprime@gmail.com" ]
kyeremalprime@gmail.com
620f06ddbdef017d8c36b66631587e5dfd47c971
e322d01555aebbcf9f23a68fa9160e75d4397969
/YouCompleteMe/third_party/ycmd/third_party/racerd/scripts/run_wrk_benchmarks.py
85985137ca89c3ec08b6f907c789b7fd518d499d
[ "Apache-2.0", "GPL-1.0-or-later", "GPL-3.0-only" ]
permissive
liqiang0330/i3ForDebian9
3b2bb5ce104f25cadab7a57cdc7096fadeb4a9ef
37a63bdaf18dab847e57d328cdcb678668ab6207
refs/heads/master
2022-10-25T13:30:26.723690
2018-03-17T05:22:55
2018-03-17T05:22:55
162,018,419
1
1
Apache-2.0
2022-10-08T20:30:23
2018-12-16T16:11:57
Python
UTF-8
Python
false
false
3,139
py
#!/usr/bin/env python # vim: ts=4 sw=4 et cc=80 tw=79 import subprocess import tempfile import os from os import path # Support overriding RACERD. Assume racerd is on path by default. if os.environ.has_key('RACERD'): RACERD = os.environ['RACERD'] else: RACERD = 'racerd' def get_scripts_dir(): """ Return absolute path of scripts directory """ return path.abspath(path.dirname(__file__)) def write_wrk_script(completion_string): """ Read the wrk template script, replace the completion string and column number, write a temporary file, and return the file path """ # Read the file tpl_file = open(get_wrk_template_path(), 'r') tpl = tpl_file.read() # Replace template params tpl = tpl.replace('[[[completion]]]', completion_string) tpl = tpl.replace('[[[column]]]', str(len(completion_string))) # Write temp file lua_file_fd, lua_file_path = tempfile.mkstemp(suffix='.lua', text=True) lua_file = os.fdopen(lua_file_fd, 'w') lua_file.write(tpl) lua_file.close() return lua_file_path def get_wrk_template_path(): """ A template lua script for wrk is used to generate completion requests. This function returns the path to the template script """ return path.join(get_scripts_dir(), 'wrk_completion_bench.lua.tpl') def start_racerd(): """ Spawn a racerd process on random port. Returns the process and host string. # Example (process, host) = start_racerd() """ process = subprocess.Popen( [RACERD, 'serve', '--secret-file=hah', '--port=0'], stdout = subprocess.PIPE ) racerd_listen_line = process.stdout.readline() racerd_host = racerd_listen_line.split(' ')[3] return (process, racerd_host.strip()) def run_wrk(script_path, host): """ Spawn a `wrk` process with 1 thread, 1 connection, and run for 1 second. These should probably be changed to environment variables in the future. """ base_url = 'http://' + host output = subprocess.check_output( ['wrk', '-t1', '-c1', '-d1s', '-s', script_path, base_url] ) lines = output.splitlines() # Line 3 in the second column by whitespace has the average request length. latency_line = lines[3] latency_avg = latency_line.split()[1] return latency_avg def print_report(completion_str, latency_avg): """ Print a report for given run """ print 'Completion for "' + completion_str + '" averaged ' + latency_avg def bench_completion(completion_str): """ Start racerd and run wrk for a given completion string """ # Write wrk script for this completion wrk_script_path = write_wrk_script(completion_str) # Start racerd and run wrk process, host = start_racerd() latency_avg = run_wrk(wrk_script_path, host) # Print a report print_report(completion_str, latency_avg) # cleanup process.terminate() os.remove(wrk_script_path) completions = [ 'use ::std::', 'use ::std::io::', 'use ::std::path::', 'use ::std::path::P' ] for c in completions: bench_completion(c)
[ "yuan705791627@gmail.com" ]
yuan705791627@gmail.com
933df71bf9732ccc8ea0ee553ba02320d8126c6e
9a8416deb357d9d0714c3b09dc2c70e8fffa05e7
/Collect/MOD12/__init__.py
5eac4f4f7da1cb1c2bcd13bde5033ead4cf5d375
[ "Apache-2.0" ]
permissive
ali1100/wa
055d0989bb414e443319ee996a8049b5051a9e74
700e5014533c45f38a245c3abdeacc537cb307bc
refs/heads/master
2021-05-13T12:03:02.161649
2018-09-19T06:51:41
2018-09-19T06:51:41
117,149,867
0
1
Apache-2.0
2018-09-19T06:38:19
2018-01-11T20:29:13
Python
UTF-8
Python
false
false
798
py
# -*- coding: utf-8 -*- """ Authors: Tim Hessels UNESCO-IHE 2016 Contact: t.hessels@unesco-ihe.org Repository: https://github.com/wateraccounting/wa Module: Collect/MOD12 Description: This module downloads MOD12 LC data from http://e4ftl01.cr.usgs.gov/. Use the MOD12.LC_yearly function to download and create yearly LC images in Gtiff format. The data is available between 2001-01-01 till 2014-01-01 . Examples: from wa.Collect import MOD12 MOD17.LC_yearly(Dir='C:/Temp3/', Startdate='2003-12-01', Enddate='2003-12-20', latlim=[41, 45], lonlim=[-8, -5]) MOD17.LC_yearly(Dir='C:/Temp3/', Startdate='2003-12-01', Enddate='2003-12-20', latlim=[41, 45], lonlim=[-8, -5]) """ from .LC_yearly import main as LC_yearly __all__ = ['LC_yearly'] __version__ = '0.1'
[ "timhessels@hotmail.com" ]
timhessels@hotmail.com
ba0f5ce1975bf1f8c6ce378a443255fcb0443019
79f42fd0de70f0fea931af610faeca3205fd54d4
/base_lib/ChartDirector/pythondemo_cgi/finance.py
932f4132e25cedc156bfb1d23e9cb27aa6704e2e
[ "IJG" ]
permissive
fanwen390922198/ceph_pressure_test
a900a6dc20473ae3ff1241188ed012d22de2eace
b6a5b6d324e935915090e791d9722d921f659b26
refs/heads/main
2021-08-27T16:26:57.500359
2021-06-02T05:18:39
2021-06-02T05:18:39
115,672,998
0
0
null
null
null
null
UTF-8
Python
false
false
2,765
py
#!/usr/bin/python from FinanceChart import * # Create a finance chart demo containing 100 days of data noOfDays = 100 # To compute moving averages starting from the first day, we need to get extra data points before # the first day extraDays = 30 # In this exammple, we use a random number generator utility to simulate the data. We set up the # random table to create 6 cols x (noOfDays + extraDays) rows, using 9 as the seed. rantable = RanTable(9, 6, noOfDays + extraDays) # Set the 1st col to be the timeStamp, starting from Sep 4, 2002, with each row representing one # day, and counting week days only (jump over Sat and Sun) rantable.setDateCol(0, chartTime(2002, 9, 4), 86400, 1) # Set the 2nd, 3rd, 4th and 5th columns to be high, low, open and close data. The open value starts # from 100, and the daily change is random from -5 to 5. rantable.setHLOCCols(1, 100, -5, 5) # Set the 6th column as the vol data from 5 to 25 million rantable.setCol(5, 50000000, 250000000) # Now we read the data from the table into arrays timeStamps = rantable.getCol(0) highData = rantable.getCol(1) lowData = rantable.getCol(2) openData = rantable.getCol(3) closeData = rantable.getCol(4) volData = rantable.getCol(5) # Create a FinanceChart object of width 640 pixels c = FinanceChart(640) # Add a title to the chart c.addTitle("Finance Chart Demonstration") # Set the data into the finance chart object c.setData(timeStamps, highData, lowData, openData, closeData, volData, extraDays) # Add the main chart with 240 pixels in height c.addMainChart(240) # Add a 5 period simple moving average to the main chart, using brown color c.addSimpleMovingAvg(5, 0x663300) # Add a 20 period simple moving average to the main chart, using purple color c.addSimpleMovingAvg(20, 0x9900ff) # Add HLOC symbols to the main chart, using green/red for up/down days c.addHLOC(0x008000, 0xcc0000) # Add 20 days bollinger band to the main chart, using light blue (9999ff) as the border and # semi-transparent blue (c06666ff) as the fill color c.addBollingerBand(20, 2, 0x9999ff, 0xc06666ff) # Add a 75 pixels volume bars sub-chart to the bottom of the main chart, using green/red/grey for # up/down/flat days c.addVolBars(75, 0x99ff99, 0xff9999, 0x808080) # Append a 14-days RSI indicator chart (75 pixels high) after the main chart. The main RSI line is # purple (800080). Set threshold region to +/- 20 (that is, RSI = 50 +/- 25). The upper/lower # threshold regions will be filled with red (ff0000)/blue (0000ff). c.addRSI(75, 14, 0x800080, 20, 0xff0000, 0x0000ff) # Append a 12-days momentum indicator chart (75 pixels high) using blue (0000ff) color. c.addMomentum(75, 12, 0x0000ff) # Output the chart print("Content-type: image/png\n") binaryPrint(c.makeChart2(PNG))
[ "fanwen@sscc.com" ]
fanwen@sscc.com
fa46f790dc1f979f39ed1aee253c88fa73042a0a
b8f160d2e8c09d5fdce7171924765b74edb97141
/page/gouwuche_page.py
c0b3a147f1fc40bfc6b6c43f52c26d943ad66baf
[]
no_license
danyubiao/mryx
7bf3e0f2bae42ef2fe4c9238569194c767b3f754
2325c7854c5625babdb51b5c5e40fa860813a400
refs/heads/master
2023-01-05T01:17:28.259444
2020-10-20T02:02:20
2020-10-20T02:02:20
304,574,484
0
6
null
2020-10-20T02:02:21
2020-10-16T09:04:01
Python
UTF-8
Python
false
false
933
py
# @Time : 2020/10/19 11:00 # @Author : 白光华 # @Email : 1277987895@gmail.com # @File : gouwuche_page # @Project : app测试 """购物车封装定位""" import self from appium import webdriver from time import sleep from appium.webdriver.common.mobileby import MobileBy as By from selenium.webdriver.support.wait import WebDriverWait from selenium.common.exceptions import TimeoutException from appium.webdriver.common.touch_action import TouchAction from model.driver import driver from page.base_page import BasePage class GouWu(BasePage): #定位购物车 gouwuche_location =(By.ID,"cn.missfresh.application:id/cartTab") #定位断言元素 duanyan_gouwu =(By.ID,"cn.missfresh.application:id/tv_delete") #点击购物车 def click_dianji(self): self.driver.find_element(*self.gouwuche_location).click() def duanyan_gw(self): text =self.text(self.duanyan_gouwu) return text
[ "you@example.com" ]
you@example.com
fdc5040420ae8c8fccd6fcbcfd6571e629ec3fe2
3a1fea0fdd27baa6b63941f71b29eb04061678c6
/src/ch08/instructions/references/ArrayLength.py
1e62ebf2864f856112a9c4c1871c01f914b060df
[]
no_license
sumerzhang/JVMByPython
56a7a896e43b7a5020559c0740ebe61d608a9f2a
1554cf62f47a2c6eb10fe09c7216518416bb65bc
refs/heads/master
2022-12-02T17:21:11.020486
2020-08-18T06:57:10
2020-08-18T06:57:10
null
0
0
null
null
null
null
UTF-8
Python
false
false
792
py
#!/usr/bin/env python # encoding: utf-8 """ @author: HuRuiFeng @file: ArrayLength.py @time: 2019/9/17 21:31 @desc: arraylength指令用于获取数组长度,只需要一个操作数,即从操作数栈顶弹出的数组引用。 """ from ch08.instructions.base.Instruction import NoOperandsInstruction from ch08.rtda.Frame import Frame class ARRAY_LENGTH(NoOperandsInstruction): def execute(self, frame: Frame): stack = frame.operand_stack arr_ref = stack.pop_ref() # 如果数组引用是null,则抛出NullPointerException异常 if arr_ref is None: raise RuntimeError("java.lang.NullPointerException") # 否则获得数组长度,推入操作数栈 arr_len = arr_ref.array_length() stack.push_numeric(arr_len)
[ "huruifeng1202@163.com" ]
huruifeng1202@163.com
38475fa87788a196aa49972f1a1cf47a600c69cf
000c243b4c30bd089867f73ca1bcfede1c3ef801
/catkin_ws/build/mapviz/mapviz/cmake/mapviz-genmsg-context.py
a2f6c76c8fcf220d76e8e24263350aab52a67984
[]
no_license
dangkhoa1210/SLAM-AND-NAVIGATION-FOR-MOBILE-ROBOT-OUTDOOR-INDOOR-
b4d9bf2757d839d9766d512c2272731300320925
7273ea9e966353440d3993dcba112bc0a2262b98
refs/heads/master
2023-07-15T14:07:17.123812
2021-09-02T10:12:30
2021-09-02T10:12:30
402,361,868
0
0
null
null
null
null
UTF-8
Python
false
false
606
py
# generated from genmsg/cmake/pkg-genmsg.context.in messages_str = "" services_str = "/home/khoa/catkin_ws/src/mapviz/mapviz/srv/AddMapvizDisplay.srv" pkg_name = "mapviz" dependencies_str = "marti_common_msgs" langs = "gencpp;geneus;genlisp;gennodejs;genpy" dep_include_paths_str = "marti_common_msgs;/home/khoa/catkin_ws/src/marti_messages/marti_common_msgs/msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg" PYTHON_EXECUTABLE = "/usr/bin/python2" package_has_static_sources = '' == 'TRUE' genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
[ "dangkhoaphamdang1210@gmail.com" ]
dangkhoaphamdang1210@gmail.com
905e03a56f2de81f5788eb73a86993424cd54536
4a9e5f6f2bd6f8768533bc250b7f0bb8efb9620c
/hackerearth/ALGO_Semi/A.py
ca9b80f57b8303df7bdccf36bc10c081d92073c9
[ "MIT" ]
permissive
akshaynagpal/competitive-programming
28c9a6070d82ce9140a7173ddbb0080ac5b3e1fc
0a54f43e3e0f2135c9c952400c5a628244b667d1
refs/heads/master
2021-01-18T22:40:59.078826
2017-04-22T18:44:25
2017-04-22T18:44:25
41,152,292
0
0
null
null
null
null
UTF-8
Python
false
false
2,356
py
import itertools import sys num_tests = int(raw_input()) for i in range(num_tests): x, y, z = raw_input().split() a = int(x) b = int(y) q = int(z) q_list = [int(i) for i in raw_input().split()] add = a + b sub = max(a,b) - min(a,b) adder_list = [a,b,add,sub] for query in q_list: if query < min(a,b): sys.stdout.write('0') elif query in adder_list: sys.stdout.write('1') else: two_list = list(itertools.combinations_with_replacement(adder_list,2)) sum_two_list = [] for i in range(len(two_list)): sum_two_list.append(sum(two_list[i])) if query in sum_two_list: sys.stdout.write('1') else: three_list = list(itertools.combinations_with_replacement(adder_list,3)) sum_three_list = [] for i in range(len(three_list)): sum_three_list.append(sum(three_list[i])) if query in sum_three_list: sys.stdout.write('1') else: four_list = list(itertools.combinations_with_replacement(adder_list,4)) sum_four_list = [] for i in range(len(four_list)): sum_four_list.append(sum(four_list[i])) if query in sum_four_list: sys.stdout.write('1') else: five_list = list(itertools.combinations_with_replacement(adder_list,5)) sum_five_list = [] for i in range(len(five_list)): sum_five_list.append(sum(five_list[i])) if query in sum_five_list: sys.stdout.write('1') else: six_list = list(itertools.combinations_with_replacement(adder_list,6)) sum_six_list = [] for i in range(len(six_list)): sum_six_list.append(sum(six_list[i])) if query in sum_six_list: sys.stdout.write('1') else: sys.stdout.write('0') print ""
[ "akshay2626@gmail.com" ]
akshay2626@gmail.com
a9767ff8584097a72971d1e8644b417eb926a01d
78d23de227a4c9f2ee6eb422e379b913c06dfcb8
/LeetCode/846.py
384d03f6466286d2c3fad09a5cdd6413b61dcffb
[]
no_license
siddharthcurious/Pythonic3-Feel
df145293a3f1a7627d08c4bedd7e22dfed9892c0
898b402b7a65073d58c280589342fc8c156a5cb1
refs/heads/master
2020-03-25T05:07:42.372477
2019-09-12T06:26:45
2019-09-12T06:26:45
143,430,534
1
0
null
null
null
null
UTF-8
Python
false
false
746
py
from collections import Counter class Solution: def isNStraightHand(self, hand, W): """ :type hand: List[int] :type W: int :rtype: bool """ L = len(hand) if L%W != 0: return False counter = Counter(hand) while counter: tmin = min(counter) for k in range(tmin, tmin+W): v = counter.get(k) if not v: return False if v == 1: del counter[k] else: counter[k] = v-1 return True if __name__ == "__main__": s = Solution() hand = [1, 2, 3, 6, 2, 3, 4, 7, 8] W = 3 s.isNStraightHand(hand, W)
[ "sandhyalalkumar@gmail.com" ]
sandhyalalkumar@gmail.com
2fecfee104269eac54eb1ff97981413680f0aca5
60ca69e2a4c6b05e6df44007fd9e4a4ed4425f14
/beginner_contest/183/A.py
0c3a7cb3882bd72537672f3fac59c59a477676b3
[ "MIT" ]
permissive
FGtatsuro/myatcoder
12a9daafc88efbb60fc0cd8840e594500fc3ee55
25a3123be6a6311e7d1c25394987de3e35575ff4
refs/heads/master
2021-06-13T15:24:07.906742
2021-05-16T11:47:09
2021-05-16T11:47:09
195,441,531
0
0
MIT
2021-05-16T11:47:10
2019-07-05T16:47:58
Python
UTF-8
Python
false
false
131
py
import sys input = sys.stdin.readline sys.setrecursionlimit(10 ** 7) x = int(input()) if x >= 0: print(x) else: print(0)
[ "204491+FGtatsuro@users.noreply.github.com" ]
204491+FGtatsuro@users.noreply.github.com
03873d4ea28c94d0fce1511dcf5e24a1225b1d9c
3fa7b041caf5dfe8e10d2086bc3127859e610227
/python2/2/knock12.py
5f22d6b6bfe7e69c59c395befd593b2fb8ad9add
[]
no_license
himkt/nlp-100knock
693b71e1d3ef5d65276d2694f309a7e39b5e002c
3be136307271f0aa5f46aef366bac0c53513c5ca
refs/heads/master
2020-04-16T15:14:19.962351
2016-08-19T10:30:43
2016-08-19T10:30:43
32,132,964
31
4
null
2021-03-05T14:39:46
2015-03-13T06:02:54
Jupyter Notebook
UTF-8
Python
false
false
623
py
# -*- coding: utf-8 -*- # @author = himkt # 2015/07/16 ''' 12. 1列目をcol1.txtに,2列目をcol2.txtに保存 各行の1列目だけを抜き出したものをcol1.txtに, 2列目だけを抜き出したものをcol2.txtとしてファイルに保存せよ. 確認にはcutコマンドを用いよ. ''' f = open('../data/hightemp.txt', 'r') f_col1 = open('./col1.txt','w') f_col2 = open('./col2.txt','w') for line in f: col1, col2, temperature, timestamp = line.split("\t")#.decode('utf-8').split("\t") f_col1.write("%s\n" % col1)#.encode('utf-8')) f_col2.write("%s\n" % col2)#.encode('utf-8'))
[ "himkt@klis.tsukuba.ac.jp" ]
himkt@klis.tsukuba.ac.jp
9de9ff070cc553ba6c7017ee27c8c3c37ec80577
aa0d55b2aa22da0af6545ce0da46d04dbdc3bffc
/cpgames/core/games/breakoutclone/modules/utils.py
f8df36ce3d52ef56483f54fbabdceb11d2d70859
[ "Apache-2.0" ]
permissive
cyanghsieh/Games
19fdad463cf12cbd503a399ed2700c0dae615714
07767df6d181b9eae89ce0a8b883d19afb450cc1
refs/heads/master
2023-05-11T11:11:09.777569
2023-02-22T14:28:18
2023-02-22T14:28:18
283,113,319
0
0
MIT
2020-07-28T05:49:13
2020-07-28T05:49:12
null
UTF-8
Python
false
false
551
py
''' Function: 工具函数 Author: Charles 微信公众号: Charles的皮卡丘 ''' '''导入关卡文件''' def loadLevel(levelpath): brick_positions = [] fp = open(levelpath, 'r', encoding='utf-8') y = -1 for line in fp.readlines(): if (not line.strip()) or (line.startswith('#')): continue else: y += 1 x = -1 for c in line: x += 1 if c == 'B': brick_positions.append([x, y]) return brick_positions
[ "1159254961@qq.com" ]
1159254961@qq.com
f6e89855368091cfa463917ef3abf2fed8677d25
e540a64d8a23ee83b3d2a5842636a2ea7486a52f
/test_petcd/test_unit/client_get_64a46cd84bc94765b8a167e7f6582eab.py
31be52dca66015fffe2eeb5d46d462c3d7b5578f
[ "Apache-2.0" ]
permissive
alunduil/petcd
27066c735667d1de2308a165b5c8dfe59f3d2dc7
a9579259b6ecc12182cc57a0549a7618c0c70a27
refs/heads/master
2021-01-22T16:26:14.682404
2015-07-25T15:26:54
2015-07-25T15:26:54
46,867,580
0
0
null
2015-11-25T14:47:20
2015-11-25T14:47:19
null
UTF-8
Python
false
false
1,495
py
# Copyright 2015 Alex Brandt # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import functools from torment import fixtures from torment import helpers from test_petcd import test_helpers from test_petcd.test_unit import AsyncEtcdClientGetFixture expected = { 'key': '/foo', 'quorum': False, 'recursive': False, 'sorted': False, 'wait': False, 'wait_index': None, } arguments = [ { 'quorum': ( True, ), }, { 'recursive': ( True, ), }, { 'sorted': ( True, ), }, { 'wait_index': ( 0, 1, ), }, { 'wait': ( True, ), }, ] for combination in test_helpers.powerset(arguments): for subset in test_helpers.evert(combination): fixtures.register(globals(), ( AsyncEtcdClientGetFixture, ), { 'parameters': { 'kwargs': functools.reduce(helpers.extend, list(subset), { 'key': '/foo', }), }, 'expected': functools.reduce(helpers.extend, [ expected ] + list(subset), { 'key': '/foo', }), })
[ "alunduil@alunduil.com" ]
alunduil@alunduil.com
0d04e6bc9a229cb2cd1c8e3d668a8821c929b8d8
223b7276b54fee31f1510145ac4c4e13c6b6a3e0
/HearthStoneSpider/tools/remote_server.py
3cb8649cedbf55d55671b9f396de4bb25c38b519
[]
no_license
CoderEnko007/HearthStoneSpider
6c2d5e63fc6ee64e4838d5b77937da66972fcd82
7bafa78c10dcdf3d17dd33a0fbf9abfe7726284a
refs/heads/master
2023-02-18T11:05:16.644753
2022-09-18T07:01:58
2022-09-18T07:01:58
144,394,353
0
0
null
2023-02-07T21:53:30
2018-08-11T15:00:25
HTML
UTF-8
Python
false
false
2,973
py
import json import requests from urllib.parse import urljoin class HSServer(object): def __init__(self, path=None): # self.host = 'http://127.0.0.1:8001' self.host = 'http://47.98.187.217' self.url = urljoin(self.host, path) def set_url_path(self, path): self.url = urljoin(self.host, path) def list(self, params): response = requests.get(self.url, params=params) re_dict = json.loads(response.text) print(response.status_code, re_dict) if response.status_code == 200: res = dict(re_dict, **{ 'status_code': 200 }) return res else: return { 'status_code': response.status_code } def get(self, id): url = "{0}{1}/".format(self.url, str(id)) response = requests.get(url) re_dict = json.loads(response.text) print('get', re_dict) if response.status_code == 200: res = dict(re_dict, **{ 'status_code': 200 }) return res else: return { 'status_code': response.status_code } def post(self, data): headers = { 'Content-Type': 'application/json' } data = json.dumps(data) response = requests.post(self.url, headers=headers, data=data) re_dict = json.loads(response.text) print('post', re_dict) if response.status_code == 200: res = dict(re_dict, **{ 'status_code': 200 }) return res else: return { 'status_code': response.status_code } def put(self, id, data): url = "{0}{1}/".format(self.url, str(id)) response = requests.put(url, json=data) re_dict = json.loads(response.text) print('put', re_dict) if response.status_code == 200: res = dict(re_dict, **{ 'status_code': 200 }) return res else: return { 'status_code': response.status_code } def delete(self, id): url = "{0}{1}/".format(self.url, id) response = requests.delete(url) status = response.status_code print(status) return 'success' if status == '204' else 'failed' if __name__ == '__main__': url = 'http://127.0.0.1:8001/winrate/' server = HSServer(url) params = { 'rank_range': 'TOP_1000_LEGEND', 'faction': 'Hunter', 'archetype': 'Highlander Hunter', 'create_time': '2020-7-19' } server.list(params=params) data = { 'rank_range': 'TOP_1000_LEGEND', 'faction': 'Hunter', 'archetype': 'test', 'winrate': '99.99' # 'create_time': str(datetime.datetime.now()) } # server.put(id=117265, data=data) # server.delete(id=117264)
[ "yf381966217@163.com" ]
yf381966217@163.com
4cb20d9c4f026d16c353ff24d766471117d06273
d4eb113c44c86322b3811513a7286d176f106eb6
/experiments/convolutional_autoencoder_perturbations/rotated+scaled-precip/autoencoder.py
9f8144f085411691ebeb36578a3dcb59a2b63136
[]
no_license
philip-brohan/Machine-Learning
67a2eb780383b3436da4fef1d763f39d255ae696
dc53b9c336d5f12272257f327abe49dec436ea04
refs/heads/master
2021-03-27T12:33:07.518279
2020-04-30T19:38:02
2020-04-30T19:38:02
56,614,781
0
0
null
null
null
null
UTF-8
Python
false
false
3,609
py
#!/usr/bin/env python # Convolutional autoencoder for 20CR prmsl fields. # This version is all-convolutional - it uses strided convolutions # instead of max-pooling, and transpose convolution instead of # upsampling. # This version uses scaled input fields that have a size (79x159) that # match the strided convolution upscaling and downscaling. # It also works on tensors with a rotated pole - so the data boundary # is the equator - this limits the problems with boundary conditions. # This version looks at precip import os import tensorflow as tf import ML_Utilities import pickle import numpy # How many epochs to train for n_epochs=50 # Create TensorFlow Dataset object from the prepared training data (tr_data,n_steps) = ML_Utilities.dataset(purpose='training', source='rotated_pole/20CR2c', variable='prate') tr_data = tr_data.repeat(n_epochs) # Also produce a tuple (source,target) for model def to_model(ict): ict=tf.reshape(ict,[79,159,1]) return(ict,ict) tr_data = tr_data.map(to_model) tr_data = tr_data.batch(1) # Similar dataset from the prepared test data (tr_test,test_steps) = ML_Utilities.dataset(purpose='test', source='rotated_pole/20CR2c', variable='prate') tr_test = tr_test.repeat(n_epochs) tr_test = tr_test.map(to_model) tr_test = tr_test.batch(1) # Input placeholder original = tf.keras.layers.Input(shape=(79,159,1,)) # Encoding layers x = tf.keras.layers.Conv2D(16, (3, 3), padding='same')(original) x = tf.keras.layers.LeakyReLU()(x) x = tf.keras.layers.Conv2D(8, (3, 3), strides= (2,2), padding='valid')(x) x = tf.keras.layers.LeakyReLU()(x) x = tf.keras.layers.Conv2D(8, (3, 3), strides= (2,2), padding='valid')(x) x = tf.keras.layers.LeakyReLU()(x) x = tf.keras.layers.Conv2D(8, (3, 3), strides= (2,2), padding='valid')(x) x = tf.keras.layers.LeakyReLU()(x) encoded = x # Decoding layers x = tf.keras.layers.Conv2DTranspose(8, (3, 3), strides= (2,2), padding='valid')(encoded) x = tf.keras.layers.LeakyReLU()(x) x = tf.keras.layers.Conv2DTranspose(8, (3, 3), strides= (2,2), padding='valid')(x) x = tf.keras.layers.LeakyReLU()(x) x = tf.keras.layers.Conv2DTranspose(8, (3, 3), strides= (2,2), padding='valid')(x) x = tf.keras.layers.LeakyReLU()(x) decoded = tf.keras.layers.Conv2D(1, (3, 3), padding='same')(x) # Model relating original to output autoencoder = tf.keras.models.Model(original,decoded) # Choose a loss metric to minimise (RMS) # and an optimiser to use (adadelta) autoencoder.compile(optimizer='adadelta', loss='mean_squared_error') # Train the autoencoder history=autoencoder.fit(x=tr_data, epochs=n_epochs, steps_per_epoch=n_steps, validation_data=tr_test, validation_steps=test_steps, verbose=2) # One line per epoch # Save the model save_file=("%s/Machine-Learning-experiments/"+ "convolutional_autoencoder_perturbations/"+ "rotated+scaled_precip/saved_models/Epoch_%04d") % ( os.getenv('SCRATCH'),n_epochs) if not os.path.isdir(os.path.dirname(save_file)): os.makedirs(os.path.dirname(save_file)) tf.keras.models.save_model(autoencoder,save_file) history_file=("%s/Machine-Learning-experiments/"+ "convolutional_autoencoder_perturbations/"+ "rotated+scaled_precip/saved_models/history_to_%04d.pkl") % ( os.getenv('SCRATCH'),n_epochs) pickle.dump(history.history, open(history_file, "wb"))
[ "philip@brohan.org" ]
philip@brohan.org
ea167fb8ab273f422a67ad5fd1577b556d59819c
04a540847c1333c987a1957fd8d31197c594f6bb
/programmers/64063_1_2.py
bb87f123a44803709df076a015cef03e5372985c
[]
no_license
k8440009/Algorithm
fd148269b264b580876c7426e19dbe2425ddc1ab
a48eba0ac5c9f2e10f3c509ce9d349c8a1dc3f0c
refs/heads/master
2023-04-02T16:06:10.260768
2023-04-02T11:04:32
2023-04-02T11:04:32
200,506,643
0
0
null
null
null
null
UTF-8
Python
false
false
376
py
# 호텔 방 배정 (효율성 x) # https://programmers.co.kr/learn/courses/30/lessons/64063 def solution(k, room_number): answer = [] room = dict() for room_key in room_number: key = room_key while(key in room): key += 1 room[key] = 1 answer.append(key) #print(answer) return answer solution(10, [1,3,4,1,3,1])
[ "k8440009@gmail.com" ]
k8440009@gmail.com
1f7e64d2a7a01b64856c7764d77116c496b38347
4f4776eb69cbea9ee1c87a22732c5d778855c83a
/leetcode/Number_of_1_Bits.py
836df79c90ca983c9a66722751cf1ec02cb6fddf
[]
no_license
k4u5h4L/algorithms
4a0e694109b8aadd0e3b7a66d4c20692ecdef343
b66f43354792b1a6facff90990a7685f5ed36a68
refs/heads/main
2023-08-19T13:13:14.931456
2021-10-05T13:01:58
2021-10-05T13:01:58
383,174,341
1
0
null
null
null
null
UTF-8
Python
false
false
797
py
''' Number of 1 Bits Easy Write a function that takes an unsigned integer and returns the number of '1' bits it has (also known as the Hamming weight). Note: Note that in some languages, such as Java, there is no unsigned integer type. In this case, the input will be given as a signed integer type. It should not affect your implementation, as the integer's internal binary representation is the same, whether it is signed or unsigned. In Java, the compiler represents the signed integers using 2's complement notation. Therefore, in Example 3, the input represents the signed integer. -3. ''' class Solution: def hammingWeight(self, n: int) -> int: res = [] while n > 0: res.append(n % 2) n = int(n/2) return res.count(1)
[ "kaushal.v.bhat@gmail.com" ]
kaushal.v.bhat@gmail.com
a6f1ee88d3be933a37a62eb2d070d8032201a492
24f664aa2344d4f5d5e7b048ac4e85231715c4c8
/datasets/me_db/tests/integration_tests/conftest.py
205f1f009297ae600813291058172b6b97e9018b
[ "MIT" ]
permissive
speycode/clfuzz
79320655e879d1e0a06a481e8ec2e293c7c10db7
f2a96cf84a7971f70cb982c07b84207db407b3eb
refs/heads/master
2020-12-05T13:44:55.486419
2020-01-03T14:14:03
2020-01-03T14:15:31
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,358
py
# Copyright 2018, 2019 Chris Cummins <chrisc.101@gmail.com> # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of # the Software, and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Pytest fixtures for me.db tests.""" import tempfile from datasets.me_db import me_db from labm8.py import app from labm8.py import bazelutil from labm8.py import test FLAGS = app.FLAGS app.DEFINE_string( "integration_tests_inbox", None, "If set, this sets the inbox path to be used by the " "integration tests. This overrides the default in " "//datasets/me_db/integration_tests/inbox.", ) TEST_INBOX_PATH = bazelutil.DataPath("phd/datasets/me_db/tests/test_inbox") @test.Fixture(scope="function") def mutable_db() -> me_db.Database: """Returns a populated database for the scope of the function.""" with tempfile.TemporaryDirectory(prefix="phd_") as d: db = me_db.Database(f"sqlite:///{d}/me.db") db.ImportMeasurementsFromInboxImporters(TEST_INBOX_PATH) yield db @test.Fixture(scope="session") def db() -> me_db.Database: """Returns a populated database that is reused for all tests. DO NOT MODIFY THE TEST DATABASE. This will break other tests. For a test that modifies the database, use the `mutable_db` fixture. """ with tempfile.TemporaryDirectory(prefix="phd_") as d: db = me_db.Database(f"sqlite:///{d}/me.db") db.ImportMeasurementsFromInboxImporters(TEST_INBOX_PATH) yield db
[ "chrisc.101@gmail.com" ]
chrisc.101@gmail.com
7def0c19f5d2bf3212b2a2cf63953f42fc2debb6
2fb7188adb6f52023485d8a775e4ecbf96331b81
/PixelGeometry/calculateRMS.py
eaba86ec104f0376ab2dd280fa35e239332310ec
[]
no_license
amassiro/dEdxCalibration
f5c580247041b4477a118772bbda9b924ae1103a
837eb440711983ddd0a283ec0cf3582c4a454d24
refs/heads/master
2020-03-24T03:31:45.099672
2019-11-08T13:09:40
2019-11-08T13:09:40
142,422,991
0
0
null
null
null
null
UTF-8
Python
false
false
3,185
py
import os # example of file name: # plots_run/cc_add_layer_3_eta_5_BPIX.root list_files = [] for root, dirs, files in os.walk("plots_run/"): for filename in files: if "cc_add_" in filename and ".root" in filename : #print(filename) list_files.append(filename) #print "list_files = " , list_files eta_edges = {} eta_edges[0] = [0.0, 0.3] eta_edges[1] = [0.3, 0.6] eta_edges[2] = [0.6, 1.0] eta_edges[3] = [1.0, 1.3] eta_edges[4] = [1.3, 1.6] eta_edges[5] = [1.6, 2.1] eta_edges[6] = [2.1, 2.5] print " writing results into : ", "smear_for_cmssw.txt" file_out = open("smear_for_cmssw.txt","w") file_out.write("# " + "\n") file_out.write("# pix layerorside etaMin etaMax value iedge " + "\n") file_out.write("# " + "\n") import ROOT for name_file in list_files: f = ROOT.TFile.Open("plots_run/" + name_file) canvas_name = name_file.replace('.root', '') canvas = f.Get(canvas_name) #print " histograms = ", canvas.GetListOfPrimitives() histograms = canvas.GetListOfPrimitives() rms_mc = 0.0 rms_data = 0.0 scale_rms = -1 ilayer = -1 iedge = -1 isBPIX = -1 for histo in histograms: #print histo.GetName() if "_mc" in histo.GetName(): # this is the MC histogram if histo.GetEntries() > 10: rms_mc = histo.GetRMS() if "_data" in histo.GetName(): if histo.GetEntries() > 10: rms_data = histo.GetRMS() # example name histogram: h_ilayer_1__iEdge_2__dedxById_BPIX_data name_histo = histo.GetName() " ilayer_1 --> 1" position_layer = name_histo.find("ilayer") if position_layer != -1 : #print " name_histo = ", name_histo, " --> position_layer = ", position_layer ilayer = name_histo[position_layer+7] #print " name_histo = ", name_histo, " --> ", ilayer position_edge = name_histo.find("iEdge") if position_edge != -1 : iedge = name_histo[position_edge+6] isBPIX = name_histo.find("BPIX") if isBPIX == -1 : isBPIX = 2 # FPIX = 2 ! else : isBPIX = 1 # pix = 1 (BPIX), 2 (FPIX) if rms_mc != 0 and rms_data != 0: scale_rms = rms_data/rms_mc - 1.0 print " file = ", name_file, " name_histo = ", name_histo, " --> data = ", rms_data, " ; mc = ", rms_mc #print " name_histo = ", name_histo, " --> ", ilayer, " ; ", iedge, " : ", isBPIX #print " ---> " + " " + str(isBPIX) + " " + str(ilayer) + " " + str(eta_edges[iedge][0]) + " " + str(eta_edges[iedge][1]) + " " + str(scale_rms) + " " + str(iedge) + "\n" #print " ---> iedge = ", iedge, " => ", eta_edges[iedge] #print " iedge = ", iedge #print eta_edges[int(iedge)][1] file_out.write(" " + str(isBPIX) + " " + str(ilayer) + " " + str(eta_edges[int(iedge)][0]) + " " + str(eta_edges[int(iedge)][1]) + " " + str(scale_rms) + " " + str(iedge) + "\n") #print eta_edges #print eta_edges[0] #print eta_edges[0][1] #print str(eta_edges[0][1]) # pix layerorside etaMin etaMax value # # 1 1 0.000 0.300 0.01 # file_out.close()
[ "massironi.andrea@gmail.com" ]
massironi.andrea@gmail.com
7a567594c1e7adc2858272c848dcbaa3f69ad25b
a9c3db07c29a46baf4f88afe555564ed0d8dbf2e
/src/1018-largest-perimeter-triangle/largest-perimeter-triangle.py
0a77f6046ef72192ef87f5b340d5bb1cb593a6eb
[]
no_license
HLNN/leetcode
86d2f5b390be9edfceadd55f68d94c78bc8b7644
35010d67341e6038ae4ddffb4beba4a9dba05d2a
refs/heads/master
2023-03-13T16:44:58.901326
2023-03-03T00:01:05
2023-03-03T00:01:05
165,402,662
6
6
null
null
null
null
UTF-8
Python
false
false
1,073
py
# Given an integer array nums, return the largest perimeter of a triangle with a non-zero area, formed from three of these lengths. If it is impossible to form any triangle of a non-zero area, return 0. # #   # Example 1: # # # Input: nums = [2,1,2] # Output: 5 # Explanation: You can form a triangle with three side lengths: 1, 2, and 2. # # # Example 2: # # # Input: nums = [1,2,1,10] # Output: 0 # Explanation: # You cannot use the side lengths 1, 1, and 2 to form a triangle. # You cannot use the side lengths 1, 1, and 10 to form a triangle. # You cannot use the side lengths 1, 2, and 10 to form a triangle. # As we cannot use any three side lengths to form a triangle of non-zero area, we return 0. # # #   # Constraints: # # # 3 <= nums.length <= 104 # 1 <= nums[i] <= 106 # # class Solution: def largestPerimeter(self, nums: List[int]) -> int: nums.sort() for i in range(len(nums) - 3, -1, -1): if nums[i] + nums[i + 1] > nums[i + 2]: return nums[i] + nums[i + 1] + nums[i + 2] return 0
[ "Huangln555@gmail.com" ]
Huangln555@gmail.com
a40ca002ab664eb7d143fa43e4065c311d103456
8c7be58d2ddb6d1d10b93cb1139d2376be9fd07d
/naive_chatbot/predictor_pb2.py
c5bb06e1718c3cad08cebb4bfb529397d2cb7a66
[]
no_license
zhouziqunzzq/MiraiGo-DD-naive-chatbot
c47e630cf38dd1b67ec4ccda2769a5db4a49608c
a69143c306a7e91e9262cec82ea1a8af8afecb45
refs/heads/main
2023-04-27T19:43:57.272470
2021-05-16T23:40:53
2021-05-16T23:40:53
343,130,542
1
0
null
null
null
null
UTF-8
Python
false
true
7,834
py
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: predictor.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='predictor.proto', package='', syntax='proto2', serialized_options=None, create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x0fpredictor.proto\"o\n\x0ePredictRequest\x12\x0b\n\x03msg\x18\x01 \x02(\t\x12\x17\n\x0cn_prediction\x18\x02 \x01(\x03:\x01\x35\x12 \n\x13time_offset_seconds\x18\x03 \x01(\x03:\x03\x33\x30\x30\x12\x15\n\nsim_cutoff\x18\x04 \x01(\x02:\x01\x30\"l\n\x0cPredictReply\x12.\n\x06result\x18\x01 \x03(\x0b\x32\x1e.PredictReply.PredictReplyElem\x1a,\n\x10PredictReplyElem\x12\x0b\n\x03msg\x18\x01 \x02(\t\x12\x0b\n\x03sim\x18\x02 \x02(\x02\x32?\n\rChatPredictor\x12.\n\nPredictOne\x12\x0f.PredictRequest\x1a\r.PredictReply\"\x00' ) _PREDICTREQUEST = _descriptor.Descriptor( name='PredictRequest', full_name='PredictRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='msg', full_name='PredictRequest.msg', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='n_prediction', full_name='PredictRequest.n_prediction', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=True, default_value=5, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='time_offset_seconds', full_name='PredictRequest.time_offset_seconds', index=2, number=3, type=3, cpp_type=2, label=1, has_default_value=True, default_value=300, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='sim_cutoff', full_name='PredictRequest.sim_cutoff', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=19, serialized_end=130, ) _PREDICTREPLY_PREDICTREPLYELEM = _descriptor.Descriptor( name='PredictReplyElem', full_name='PredictReply.PredictReplyElem', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='msg', full_name='PredictReply.PredictReplyElem.msg', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='sim', full_name='PredictReply.PredictReplyElem.sim', index=1, number=2, type=2, cpp_type=6, label=2, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=196, serialized_end=240, ) _PREDICTREPLY = _descriptor.Descriptor( name='PredictReply', full_name='PredictReply', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='PredictReply.result', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[_PREDICTREPLY_PREDICTREPLYELEM, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=132, serialized_end=240, ) _PREDICTREPLY_PREDICTREPLYELEM.containing_type = _PREDICTREPLY _PREDICTREPLY.fields_by_name['result'].message_type = _PREDICTREPLY_PREDICTREPLYELEM DESCRIPTOR.message_types_by_name['PredictRequest'] = _PREDICTREQUEST DESCRIPTOR.message_types_by_name['PredictReply'] = _PREDICTREPLY _sym_db.RegisterFileDescriptor(DESCRIPTOR) PredictRequest = _reflection.GeneratedProtocolMessageType('PredictRequest', (_message.Message,), { 'DESCRIPTOR': _PREDICTREQUEST, '__module__': 'predictor_pb2' # @@protoc_insertion_point(class_scope:PredictRequest) }) _sym_db.RegisterMessage(PredictRequest) PredictReply = _reflection.GeneratedProtocolMessageType('PredictReply', (_message.Message,), { 'PredictReplyElem': _reflection.GeneratedProtocolMessageType('PredictReplyElem', (_message.Message,), { 'DESCRIPTOR': _PREDICTREPLY_PREDICTREPLYELEM, '__module__': 'predictor_pb2' # @@protoc_insertion_point(class_scope:PredictReply.PredictReplyElem) }) , 'DESCRIPTOR': _PREDICTREPLY, '__module__': 'predictor_pb2' # @@protoc_insertion_point(class_scope:PredictReply) }) _sym_db.RegisterMessage(PredictReply) _sym_db.RegisterMessage(PredictReply.PredictReplyElem) _CHATPREDICTOR = _descriptor.ServiceDescriptor( name='ChatPredictor', full_name='ChatPredictor', file=DESCRIPTOR, index=0, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=242, serialized_end=305, methods=[ _descriptor.MethodDescriptor( name='PredictOne', full_name='ChatPredictor.PredictOne', index=0, containing_service=None, input_type=_PREDICTREQUEST, output_type=_PREDICTREPLY, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_CHATPREDICTOR) DESCRIPTOR.services_by_name['ChatPredictor'] = _CHATPREDICTOR # @@protoc_insertion_point(module_scope)
[ "zhouziqun@cool2645.com" ]
zhouziqun@cool2645.com
6ec8eb0c7925f1935b91bc24f5fc9ba37f80f94e
3a4549470cb0e6e55c98522ba08ce629d60960ea
/froide/foirequest/migrations/0014_auto_20180111_0738.py
6ac2c84671c22abd5063ab69c08f7dae915f3e1f
[ "MIT" ]
permissive
lanmarc77/froide
4e28d3e33017b3e776a7eb13d63c7b71bdb3bc68
bddc8bb27c8a7c2a959003dda724194948bc381a
refs/heads/main
2023-03-17T03:02:01.277465
2021-03-06T16:37:26
2021-03-06T16:37:26
345,137,125
0
0
MIT
2021-03-06T16:13:09
2021-03-06T16:13:09
null
UTF-8
Python
false
false
779
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-01-11 06:38 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('team', '0001_initial'), ('foirequest', '0013_auto_20171220_1718'), ] operations = [ migrations.AddField( model_name='foiproject', name='team', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='team.Team', verbose_name='Team'), ), migrations.AlterField( model_name='foiproject', name='last_update', field=models.DateTimeField(auto_now=True), ), ]
[ "mail@stefanwehrmeyer.com" ]
mail@stefanwehrmeyer.com
a9f89df989abb980a1262104c411c0d54f5ed4f1
f75609812d20d46a9f94ee0cfdb91c321d26b63d
/_python/python_OOP/ReverseList.py
bce461e45a9c3d243043de9f60d6131bff3880a7
[]
no_license
IanAranha/Python2021
eff47a20451f61b144b17f48321a7b06308aadca
d9769b8b387b77753b77f6efe3a9a270a1f158d3
refs/heads/main
2023-04-02T08:20:24.382913
2021-04-10T22:27:10
2021-04-10T22:27:10
345,918,060
0
0
null
null
null
null
UTF-8
Python
false
false
220
py
def reverseList(list): mid = int(len(list)/2) for i in range(0, mid): print("Swapping", list[i], list[len(list)-1-i]) list[i], list[len(list)-1-i] = list[len(list)-1-i], list[i] return list
[ "ianorama@gmail.com" ]
ianorama@gmail.com
9a1921c0fab1929c7b32e8be96af2137df25ab32
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_014/ch21_2020_03_04_16_45_58_943718.py
00d24fdcc27af6720298b24527260aeea05098a5
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
135
py
a = input ('Quantos dias?') b = input ('Quantas horas?') c = input ('Quantos minutos?') d = input ('Quantos segundos?') print(a,b,c,d)
[ "you@example.com" ]
you@example.com
271dec9f5adcad66f6210b0577ceeb4e167480f9
3474b315da3cc5cb3f7823f19a18b63a8da6a526
/scratch/KRAMS/src/apps/scratch/jakub/xdomain/examples/2D_sep_f.py
1084bb4e78de58c3c36540538e88662be0ba3cde
[]
no_license
h4ck3rm1k3/scratch
8df97462f696bc2be00f1e58232e1cd915f0fafd
0a114a41b0d1e9b2d68dbe7af7cf34db11512539
refs/heads/master
2021-01-21T15:31:38.718039
2013-09-19T10:48:24
2013-09-19T10:48:24
29,173,525
0
0
null
2015-01-13T04:58:57
2015-01-13T04:58:56
null
UTF-8
Python
false
false
4,901
py
''' Created on Sep 23, 2009 @author: jakub ''' if __name__ == '__main__': from ibvpy.api import FEDomain, FERefinementGrid, FEGrid, TStepper as TS,\ BCDofGroup,BCDof, RTraceDomainListField from ibvpy.core.tloop import TLoop, TLine from ibvpy.mesh.xfe_subdomain import XFESubDomain from ibvpy.mats.mats2D.mats2D_elastic.mats2D_elastic import MATS2DElastic from ibvpy.fets.fets2D.fets2D4q import FETS2D4Q from ibvpy.fets.fets2D.fets2D4q8u import FETS2D4Q8U from ibvpy.fets.fets2D.fets2D4q9u import FETS2D4Q9U from ibvpy.fets.fets2D.fets2D9q import FETS2D9Q from ibvpy.fets.fets_ls.fets_crack import FETSCrack fets_eval = FETS2D4Q(mats_eval = MATS2DElastic(E= 1.,nu=0.)) xfets_eval = FETSCrack( parent_fets = fets_eval, int_order = 5 ) # Discretization fe_domain = FEDomain() fe_level1 = FERefinementGrid( domain = fe_domain, fets_eval = fets_eval ) fe_grid1 = FEGrid( coord_max = (1.,1.,0.), shape = (1,1), fets_eval = fets_eval, level = fe_level1 ) # fe_grid1.deactivate( (1,0) ) # fe_grid1.deactivate( (1,1) ) fe_xdomain = XFESubDomain( domain = fe_domain, fets_eval = xfets_eval, #fe_grid_idx_slice = fe_grid1[1,0], fe_grid_slice = fe_grid1['X - 0.5 '] ) ts = TS( dof_resultants = True, sdomain = fe_domain, bcond_list = [BCDofGroup(var='u', value = 0., dims = [0,1], get_dof_method = fe_grid1.get_left_dofs ), BCDofGroup(var='u', value = 0., dims = [0], get_dof_method = fe_grid1.get_right_dofs ), BCDofGroup(var='u', value = 0., dims = [1], get_dof_method = fe_grid1.get_bottom_right_dofs ), BCDofGroup(var='f', value = -.2, dims = [1], get_dof_method = fe_grid1.get_top_right_dofs ), BCDof(var='u', value = 0., dof = 9 ), BCDof(var='f', value = -0.4/3, dof = 11 ), ], rtrace_list = [ # RTraceGraph(name = 'Fi,right over u_right (iteration)' , # var_y = 'F_int', idx_y = 0, # var_x = 'U_k', idx_x = 1), RTraceDomainListField(name = 'Stress' , var = 'sig_app', idx = 0, warp = True ), RTraceDomainListField(name = 'Displacement' , var = 'u', idx = 0, warp = True), # RTraceDomainField(name = 'N0' , # var = 'N_mtx', idx = 0, # record_on = 'update') ] ) # # # Add the time-loop control tloop = TLoop( tstepper = ts, # tolerance = 1e-4, KMAX = 4, # debug = True, RESETMAX = 2, tline = TLine( min = 0.0, step = 1., max = 1.0 )) #print "elements ",fe_xdomain.elements[0] fe_xdomain.deactivate_sliced_elems() print 'parent elems ',fe_xdomain.fe_grid_slice.elems print 'parent dofs ',fe_xdomain.fe_grid_slice.dofs print "dofmap ",fe_xdomain.elem_dof_map print "ls_values ", fe_xdomain.dots.dof_node_ls_values print 'intersection points ',fe_xdomain.fe_grid_slice.r_i print "triangles ", fe_xdomain.dots.rt_triangles print "vtk points ", fe_xdomain.dots.vtk_X print "vtk data ", fe_xdomain.dots.get_vtk_cell_data('blabla',0,0) print 'ip_triangles', fe_xdomain.dots.int_division print 'ip_coords', fe_xdomain.dots.ip_coords print 'ip_weigths', fe_xdomain.dots.ip_weights print 'ip_offset', fe_xdomain.dots.ip_offset print 'ip_X_coords', fe_xdomain.dots.ip_X print 'ip_ls', fe_xdomain.dots.ip_ls_values print 'vtk_ls', fe_xdomain.dots.vtk_ls_values print 'J_det ',fe_xdomain.dots.J_det_grid print tloop.eval() # #ts.setup() from ibvpy.plugins.ibvpy_app import IBVPyApp ibvpy_app = IBVPyApp( ibv_resource = ts ) ibvpy_app.main()
[ "Axel@Axel-Pc" ]
Axel@Axel-Pc
cff565e39aba802e39091d4bd9ce442f20ad1c56
87ad372898e793faf1ad89f4bb3b6e84a8002131
/tests/unit/FundManager/test_set_next_time_lock.py
8ce922c052d511d6829b68fa62421a68b8fab166
[]
no_license
atsignhandle/unagii-vault-v2
6a9a96c11d34257bc3fdae57455ec3b2f9c0029a
548f715f34329eb5abebffe40acbeb56a31cb6f3
refs/heads/main
2023-08-27T00:59:48.080152
2021-09-28T02:47:36
2021-09-28T02:47:36
413,448,825
0
0
null
2021-10-04T14:07:37
2021-10-04T14:07:36
null
UTF-8
Python
false
false
405
py
import brownie import pytest def test_set_next_time_lock(fundManager, user): timeLock = fundManager.timeLock() # not time lock with brownie.reverts("!time lock"): fundManager.setNextTimeLock(user, {"from": user}) tx = fundManager.setNextTimeLock(user, {"from": timeLock}) assert fundManager.nextTimeLock() == user assert tx.events["SetNextTimeLock"].values() == [user]
[ "tsk.nakamura@gmail.com" ]
tsk.nakamura@gmail.com
152a0575c29496fc966528cfe473fc4a02bb05aa
a4364a0ee3dd9aa057c049771319c628b88b1b8d
/week5/file.py
90250ed1abed55b0f11a3670aecbae07c9106e37
[]
no_license
lsh931125/altole
22e69c98af2fd673fa2dc40d58b64a6ebc0ce577
15afc3d511ed5be56ea091e9842408b362ad79e3
refs/heads/main
2023-04-10T16:15:14.865549
2021-04-12T14:13:59
2021-04-12T14:13:59
336,302,892
0
0
null
null
null
null
UTF-8
Python
false
false
155
py
# writedata.py f = open("새파일.txt", 'w',encoding='utf-8') for i in range(1, 11): data = "%d번째 줄입니다.\n" % i f.write(data) f.close()
[ "you@example.com" ]
you@example.com
88cf61b2fac3023bb2a72c2fb2afef1483328050
29da2ca6def1270be13a3096685a8e5d82828dff
/CIM15/IEC61970/Informative/InfWork/WorkStatusEntry.py
e3719f0ca36369573702570e2383844ff6dcbd92
[ "MIT" ]
permissive
rimbendhaou/PyCIM
75eb3bcd3729b2410c03f3d5c66d6f1e05e21df3
d578bb0bf1af344342bd23344385ed9c06c2d0ee
refs/heads/master
2022-04-28T01:16:12.673867
2020-04-16T02:19:09
2020-04-16T02:19:09
256,085,381
0
0
MIT
2020-04-16T02:15:20
2020-04-16T02:08:14
null
UTF-8
Python
false
false
2,108
py
# Copyright (C) 2010-2011 Richard Lincoln # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from CIM15.IEC61968.Common.ActivityRecord import ActivityRecord class WorkStatusEntry(ActivityRecord): """A type of ActivityRecord that records information about the status of an item, such as a Work or WorkTask, at a point in time.A type of ActivityRecord that records information about the status of an item, such as a Work or WorkTask, at a point in time. """ def __init__(self, percentComplete=0.0, *args, **kw_args): """Initialises a new 'WorkStatusEntry' instance. @param percentComplete: Estimated percentage of completion of this individual work task or overall work order. """ #: Estimated percentage of completion of this individual work task or overall work order. self.percentComplete = percentComplete super(WorkStatusEntry, self).__init__(*args, **kw_args) _attrs = ["percentComplete"] _attr_types = {"percentComplete": float} _defaults = {"percentComplete": 0.0} _enums = {} _refs = [] _many_refs = []
[ "rwl@thinker.cable.virginmedia.net" ]
rwl@thinker.cable.virginmedia.net
a65c657ece078bdf1546ffad0971554575ffe0ec
3d947e8502aa677398f4f8e102f6dc10dfa4a8ae
/apps/workpoint/admin.py
1dd68861a9eef366fa3b43767a789c59b15a8e05
[]
no_license
wd5/3gspeed
400f65996a168efb2322e736f47de7d5667bb568
9451605898541bd8f912ed84d23c74e500b49595
refs/heads/master
2016-08-08T22:02:08.798728
2012-09-11T01:50:37
2012-09-11T01:50:37
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,548
py
# -*- coding: utf-8 -*- from django.contrib import admin from django import forms from apps.utils.widgets import Redactor from sorl.thumbnail.admin import AdminImageMixin from models import Ability, Distinct, City, Point, SpeedAtPoint, ModemType, Operator class AbilityAdmin(AdminImageMixin, admin.ModelAdmin): list_display = ('id','title','download_speed',) list_display_links = ('id','title',) list_editable = ('download_speed',) search_fields = ('title','download_speed',) list_filter = ('download_speed',) admin.site.register(Ability, AbilityAdmin) class CityAdminForm(forms.ModelForm): class Meta: model = City class Media: js = ( '/media/js/jquery.js', 'http://api-maps.yandex.ru/2.0/?load=package.full&mode=debug&lang=ru-RU', '/media/js/ymaps_form.js', ) class DistinctInline(admin.TabularInline): fields = ('title', 'is_published') model = Distinct class CityAdmin(admin.ModelAdmin): list_display = ('id','title','coord','is_published',) list_display_links = ('id','title','coord',) list_editable = ('is_published',) search_fields = ('title',) list_filter = ('is_published',) form = CityAdminForm inlines = [DistinctInline,] admin.site.register(City, CityAdmin) class SpeedAtPointInline(admin.TabularInline): fields = ('operator', 'modem_type', 'internet_speed') model = SpeedAtPoint class PointAdminForm(forms.ModelForm): class Meta: model = Point class Media: js = ( '/media/js/jquery.js', 'http://api-maps.yandex.ru/2.0/?load=package.full&mode=debug&lang=ru-RU', '/media/js/ymaps_form.js', '/media/js/js_loads.js', ) class PointAdmin(admin.ModelAdmin): list_display = ('id','distinct','coord','datetime_create',) list_display_links = ('id','distinct','coord','datetime_create',) list_filter = ('datetime_create','distinct',) inlines = [SpeedAtPointInline,] form = PointAdminForm admin.site.register(Point, PointAdmin) class ModemTypeInline(admin.TabularInline): fields = ('vendor', 'model', 'download_speed') model = ModemType class OperatorAdmin(AdminImageMixin, admin.ModelAdmin): list_display = ('id','title','order', 'is_published',) list_display_links = ('id','title',) list_editable = ('order', 'is_published',) search_fields = ('title',) list_filter = ('is_published',) inlines = [ModemTypeInline,] admin.site.register(Operator, OperatorAdmin)
[ "steeg.xs@gmail.com" ]
steeg.xs@gmail.com
72934dd176732ebd3730bd794e8e20805878afb9
3ab5cc24ce16f937fbc0d8906b5a3a3e95fd08f2
/Kapittel-03/fasit_3.py
1ec0549290b8def11279486f0b77f7251cdb2572
[]
no_license
magnusoy/Python-Grunnleggende
8aaf4ed5b373f17233bdcf38f52f5a1fa90db5bd
984e04e285b46d920c51c92095223fee0766fa29
refs/heads/master
2023-02-24T21:38:35.823047
2021-01-19T19:37:02
2021-01-19T19:37:02
295,389,253
2
0
null
null
null
null
UTF-8
Python
false
false
633
py
""" Oppgave 1 Gjør om all teksten i variabelen til store bokstaver. """ txt = "vI LæReR om PYTon i dETte kurSEt" txt2 = txt.upper() print(txt2) """ Oppgave 2 Slutter teksten med: pappesker? """ txt = "Her var det veldig mange tomme pappesker" result = txt.endswith("pappesker") print(result) """ Oppgave 3 Finn ut om teksten inneholder ordet "is" og print ut indeksen. """ txt = "På tivoli er det mange som spiser is, og tar karuseller." result = txt.find("is") print(result) """ Oppgave 4 Bytt ut alle komma med mellomrom. """ txt = "Hund,Katt,Geit,Bjørn,Gaupe,Ørn,Spurv" result = txt.replace(",", " ") print(result)
[ "magnus.oye@gmail.com" ]
magnus.oye@gmail.com
11cc25e0f718e43bfcb3c39dc12f902e4724bdf2
c5be6a92f216957d340474b58507606a38c10f5f
/course-files/projects/project_01/option1_graphics/main.py
5b91603f485c3f9fdd9db91d718b32be8446b708
[]
no_license
eecs110/winter2019
0b314c35e886b8099368ed7dfd51b707ab73c0c2
f4107207ca1c9c10b78bdbb74fd82410b00ee363
refs/heads/master
2020-04-11T10:09:28.100445
2019-03-21T18:00:25
2019-03-21T18:00:25
161,705,160
3
0
null
null
null
null
UTF-8
Python
false
false
646
py
from tkinter import Canvas, Tk import helpers import utilities import helpers import time gui = Tk() gui.title('My Terrarium') # initialize canvas: window_width = gui.winfo_screenwidth() window_height = gui.winfo_screenheight() canvas = Canvas(gui, width=window_width, height=window_height, background='white') canvas.pack() ########################## YOUR CODE BELOW THIS LINE ############################## # sample code to make a creature: helpers.make_creature(canvas, (200, 200), fill='white') ########################## YOUR CODE ABOVE THIS LINE ############################## # makes sure the canvas keeps running: canvas.mainloop()
[ "vanwars@gmail.com" ]
vanwars@gmail.com
fb6cac8ca7336dff94c7a117a60d6edc63f4e026
19316c08712a502b1124f2b55cb98bfcbcca7af5
/dev/docs-old/advanced/v1 cookbook/uses/2017-12-19 search abf for comments/go.py
742202ac1771508baccbc0fe5b6b22965932ae34
[ "MIT" ]
permissive
swharden/pyABF
49a50d53015c50f1d5524242d4192718e6f7ccfa
06247e01ca3c19f5419c3b9b2207ee544e30dbc5
refs/heads/main
2023-08-28T02:31:59.540224
2023-08-17T16:34:48
2023-08-17T16:34:48
109,707,040
92
39
MIT
2023-04-06T00:37:29
2017-11-06T14:39:21
Jupyter Notebook
UTF-8
Python
false
false
405
py
import glob import sys import os sys.path.insert(0,os.path.abspath("../../../src/")) import pyabf if __name__=="__main__": PATH=R"X:\Data\projects\2017-01-09 AT1-Cre mice\2017-01-09 global expression NTS\data" for fname in sorted(glob.glob(PATH+"/*.abf")): abf=pyabf.ABF(fname) if not abf.commentsExist: continue print(abf.ID,abf.commentTags) print("DONE")
[ "swharden@gmail.com" ]
swharden@gmail.com
ddd8b19f908ab3f029b6724dfada7b82a7cf029c
19af5a32468f39e8fa198901b6585ca485aca0ca
/sketches/particle/particlesystem.py
669112dca09c760ea2c1c201df79b2ce2046e4dd
[ "MIT" ]
permissive
kantel/nodebox1
9bf8f4bd5cd8fde569cb235a60be2bda4607ab76
9f9e8fe3cb7f033e55c4a52fe106c6b3fd849406
refs/heads/master
2022-05-18T12:12:14.314978
2022-04-09T16:04:00
2022-04-09T16:04:00
92,529,020
1
0
null
null
null
null
UTF-8
Python
false
false
1,441
py
# Particle-System # Nach dem Processing (Java) Sketch von Daniel Shiffmann # aus: The Nature of Code, o.O. 2012, Seite 149ff size(560, 315) speed(30) colormode(RGB) from pvector import PVector #---------------------Klassendefinitionen ------------------------ class Particle(object): def __init__(self, l): self.acceleration = PVector(0, 0.05) self.velocity = PVector(random(-1.5, 1.5), random(-2.0, 2.0)) self.location = l.get() self.lifespan = 255 def run(self): self.update() self.display() def update(self): self.velocity.add(self.acceleration) self.location.add(self.velocity) self.lifespan -= 2 def display(self): colorrange(255) stroke(0, 0, 0) fill(255, 140, 0, self.lifespan) ellipse(self.location.x, self.location.y, 20, 20) def isDead(self): if self.lifespan <= 0: return True else: return False #---------------------------------------------------------------------- particles = [] def setup(): global loc loc = PVector(WIDTH/2, 50) def draw(): global loc background("#1f2838") particles.append(Particle(loc)) for i in range(len(particles) - 1, 0, -1): particles[i].run() if particles[i].isDead(): particles.pop(i) # print(len(particles))
[ "joerg@kantel.de" ]
joerg@kantel.de
952ec57240ec0cd076b3210096804a21425ada68
839ed24e7ecf66b6983f4f2629ef87b90c02d41e
/C/python3/c025.py
babcf7bbffb550765c70a42db073bacaff42438b
[]
no_license
ht0919/ProgrammingExercises
86634153be11d08ba45c8d06bf2574b48974a2a6
5a9dc356603e3a54d7fdd9d8780c8851f7f37928
refs/heads/master
2020-04-04T07:32:09.083569
2018-10-17T04:28:37
2018-10-17T04:28:37
25,193,626
0
0
null
null
null
null
UTF-8
Python
false
false
793
py
M = int(input()) # 1回に運べる枚数 N = int(input()) # FAXが届く回数 first = True cnt = 0 for i in range(N): tmp = input().rstrip().split(' ') x = int(tmp[0]) #時 y = int(tmp[1]) #分 c = int(tmp[2]) #枚数 if first == True: # 最初のケース h = x p = c first = False else: # 2回め以降のケース if h == x: # 同じ時間帯 p += c else: # 時間帯が変わったケース # 前時間帯の集計処理 cnt += int(p / M) if p % M != 0: cnt += 1 # 変数のリセット h = x p = c # 未集計データ処理 cnt += int(p / M) if p % M != 0: cnt += 1 print(cnt)
[ "ht0919@gmail.com" ]
ht0919@gmail.com
a7b89ed666f90f741189709714a0d769e456603a
9dc496421cc79235c6ad4a5207de52ec2298b0dd
/post/migrations/0005_auto_20190117_1443.py
1eab8ae327ec6f5836545c39540e0aa3ad2f1b77
[]
no_license
m0nte-cr1st0/my-first-blog
f9494189566984c62f16e579830074e539d19bd8
56aa3804253ea6c80560b6b5fcc6534614a0ac5f
refs/heads/master
2020-04-17T14:21:11.979436
2019-01-22T11:36:20
2019-01-22T11:36:20
155,852,519
0
0
null
null
null
null
UTF-8
Python
false
false
534
py
# Generated by Django 2.1.5 on 2019-01-17 12:43 from django.db import migrations, models import post.models class Migration(migrations.Migration): dependencies = [ ('post', '0004_auto_20190117_1429'), ] operations = [ migrations.AlterField( model_name='post', name='logo', field=models.ImageField(blank=True, default='media/def-avatar.png', height_field='height_field', null=True, upload_to=post.models.upload_location, width_field='width_field'), ), ]
[ "dinamo.mutu111@gmail.com" ]
dinamo.mutu111@gmail.com
261b1e853ef38fe2040e5be44aa5017d5c600f5c
f76b743338f48bef09bdf9447cf230719b0a3c76
/深度学习/分数阶微分/赵骞/mnist_loader.py
e0a9c01d325e95807daebd1d75c81bde7434d8c0
[]
no_license
shao1chuan/regression
ca71a165067f7407a598f593d61e1df24632a42e
4ec9e40b1eafb697f89e956d1c625d8bb8c10ada
refs/heads/master
2023-08-23T01:30:05.924203
2021-10-13T14:32:43
2021-10-13T14:32:43
329,070,549
2
0
null
null
null
null
UTF-8
Python
false
false
3,480
py
""" mnist_loader ~~~~~~~~~~~~ A library to load the MNIST image data. For details of the data structures that are returned, see the doc strings for ``load_data`` and ``load_data_wrapper``. In practice, ``load_data_wrapper`` is the function usually called by our neural network mycode. """ #### Libraries # Standard library import pickle import gzip # Third-party libraries import numpy as np def load_data(): """Return the MNIST data as a tuple containing the training data, the validation data, and the test data. The ``training_data`` is returned as a tuple with two entries. The first entry contains the actual training images. This is a numpy ndarray with 50,000 entries. Each entry is, in turn, a numpy ndarray with 784 values, representing the 28 * 28 = 784 pixels in a single MNIST image. The second entry in the ``training_data`` tuple is a numpy ndarray containing 50,000 entries. Those entries are just the digit values (0...9 卷积神经网络CNN) for the corresponding images contained in the first entry of the tuple. The ``validation_data`` and ``test_data`` are similar, except each contains only 10,000 images. This is a nice data format, but for use in neural networks it's helpful to modify the format of the ``training_data`` a little. That's done in the wrapper function ``load_data_wrapper()``, see below. """ f = gzip.open('data/mnist.pkl.gz', 'rb') training_data, validation_data, test_data = pickle.load(f, encoding='latin1') f.close() return (training_data, validation_data, test_data) def load_data_wrapper(): """Return a tuple containing ``(training_data, validation_data, test_data)``. Based on ``load_data``, but the format is more convenient for use in our implementation of neural networks. In particular, ``training_data`` is a list containing 50,000 2-tuples ``(x, y)``. ``x`` is a 784-dimensional numpy.ndarray containing the input image. ``y`` is a 10-dimensional numpy.ndarray representing the unit vector corresponding to the correct digit for ``x``. ``validation_data`` and ``test_data`` are lists containing 10,000 2-tuples ``(x, y)``. In each case, ``x`` is a 784-dimensional numpy.ndarry containing the input image, and ``y`` is the corresponding classification, i.e., the digit values (integers) corresponding to ``x``. Obviously, this means we're using slightly different formats for the training data and the validation / test data. These formats turn out to be the most convenient for use in our neural network mycode.""" tr_d, va_d, te_d = load_data() training_inputs = [np.reshape(x, (784, 1)) for x in tr_d[0]] training_results = [vectorized_result(y) for y in tr_d[1]] training_data = list(zip(training_inputs, training_results)) validation_inputs = [np.reshape(x, (784, 1)) for x in va_d[0]] validation_data = list(zip(validation_inputs, va_d[1])) test_inputs = [np.reshape(x, (784, 1)) for x in te_d[0]] test_data = list(zip(test_inputs, te_d[1])) return (training_data, validation_data, test_data) def vectorized_result(j): """Return a 10-dimensional unit vector with a 1.0 in the jth position and zeroes elsewhere. This is used to convert a digit (0...9 卷积神经网络CNN) into a corresponding desired output from the neural network.""" e = np.zeros((10, 1)) e[j] = 1.0 return e
[ "4448445@qq.com" ]
4448445@qq.com
4ac51470ec597cda7528073e0c5c0b5d8d5a926f
4a50774fb00196e0bb57edf47a442e30dc1aa5be
/ฝึกเขียน/For_loop/mainloop.py
614d7b696686ddbf50faf163e35196e13cb38ec3
[]
no_license
Sittisukintaruk/learning-Coding-Self-Python
1124581aba4ebb01bcdee85e328a67d9c061931f
b0d2cad76a54ab94ceed3b456eca48eb09225aa3
refs/heads/master
2023-02-16T21:48:57.310708
2021-01-13T18:10:57
2021-01-13T18:10:57
298,890,199
0
0
null
null
null
null
UTF-8
Python
false
false
527
py
import math def fact (n): if n >= 0: if n == 0 or n == 1 : return 1 else: f = 1 for i in range(2 ,n+1): f = f * i return f else: return math.nan def permut(n , k): return fact (n) / (fact (n - k)) def combition(n , k): #return fact (n) / (fact (k) * fact (n-k)) return permut(n , k) / fact (k) if __name__ == '__main__': print(fact (1)) print(fact (5)) print(fact (-7)) print(fact (0))
[ "fah78963214@gmail.com" ]
fah78963214@gmail.com
16786f8c9d2bef2bd6ac9518fcf57cc7fc19e607
73770ddb5441f589742dd600545c555dd9922ae8
/db_scripts/versions/d2c4a25c3c1d_v30_course_tree.py
824db58eae92c5499013a31689236ed782979526
[]
no_license
annndrey/trainingsite
c200a203c7a5fe832de70e9497a93df3745d63ef
6dce55dc25b2a6f5285a8cfba1fbd7d83dbbe96b
refs/heads/master
2020-03-29T16:11:06.336532
2018-09-24T12:53:55
2018-09-24T12:53:55
150,102,082
0
0
null
null
null
null
UTF-8
Python
false
false
7,792
py
"""V30_course_tree Revision ID: d2c4a25c3c1d Revises: a24e5727c770 Create Date: 2017-08-11 17:37:18.038419 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. revision = 'd2c4a25c3c1d' down_revision = 'a24e5727c770' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('elements', sa.Column('id', sa.Integer(), nullable=False), sa.Column('parent_id', sa.Integer(), nullable=True), sa.Column('name', sa.String(length=200), nullable=True), sa.Column('author_id', sa.Integer(), nullable=True), sa.Column('is_published', sa.Integer(), nullable=True), sa.Column('is_archived', sa.Integer(), nullable=True), sa.Column('lastchanged', sa.DateTime(), nullable=False), sa.Column('headerimage', sa.Text(), nullable=True), sa.Column('preview', sa.Text(), nullable=True), sa.Column('elemtype', sa.Enum('course', 'week', 'workout', 'excercise'), nullable=True), sa.Column('weektype', sa.Enum('power', 'endurance', 'fingers', 'projecting', 'rest', 'test'), nullable=True), sa.Column('descr', sa.Text(), nullable=True), sa.Column('timetotal', sa.String(length=200), nullable=True), sa.Column('numreps', sa.Integer(), nullable=True), sa.Column('numsets', sa.Integer(), nullable=True), sa.Column('resttime', sa.Text(), nullable=True), sa.Column('perftime', sa.String(length=200), nullable=True), sa.Column('comments', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['author_id'], ['users.id'], name=op.f('fk_elements_author_id_users')), sa.ForeignKeyConstraint(['parent_id'], [u'elements.id'], name=op.f('fk_elements_parent_id_elements')), sa.PrimaryKeyConstraint('id', name=op.f('pk_elements')) ) op.drop_table('excersise_workout_association') op.drop_table('courses') op.drop_table('workout_week_association') op.drop_table('weeks') op.drop_table('excersises') op.drop_table('workouts') op.drop_table('week_course_association') op.drop_constraint(u'fk_media_excercise_id_excersises', 'media', type_='foreignkey') op.create_foreign_key(op.f('fk_media_excercise_id_elements'), 'media', 'elements', ['excercise_id'], ['id']) op.drop_constraint(u'fk_subscriptions_course_id_courses', 'subscriptions', type_='foreignkey') op.create_foreign_key(op.f('fk_subscriptions_course_id_elements'), 'subscriptions', 'elements', ['course_id'], ['id']) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(op.f('fk_subscriptions_course_id_elements'), 'subscriptions', type_='foreignkey') op.create_foreign_key(u'fk_subscriptions_course_id_courses', 'subscriptions', 'courses', ['course_id'], ['id']) op.drop_constraint(op.f('fk_media_excercise_id_elements'), 'media', type_='foreignkey') op.create_foreign_key(u'fk_media_excercise_id_excersises', 'media', 'excersises', ['excercise_id'], ['id']) op.create_table('week_course_association', sa.Column('weeks_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('courses_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['courses_id'], [u'courses.id'], name=u'fk_week_course_association_courses_id_courses'), sa.ForeignKeyConstraint(['weeks_id'], [u'weeks.id'], name=u'fk_week_course_association_weeks_id_weeks'), mysql_collate=u'utf8_unicode_ci', mysql_default_charset=u'utf8', mysql_engine=u'InnoDB' ) op.create_table('workouts', sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), sa.Column('name', mysql.VARCHAR(collation=u'utf8_unicode_ci', length=200), nullable=True), sa.Column('descr', mysql.MEDIUMTEXT(collation=u'utf8_unicode_ci'), nullable=True), sa.Column('time', mysql.VARCHAR(collation=u'utf8_unicode_ci', length=200), nullable=True), sa.PrimaryKeyConstraint('id'), mysql_collate=u'utf8_unicode_ci', mysql_default_charset=u'utf8', mysql_engine=u'InnoDB' ) op.create_table('excersises', sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), sa.Column('name', mysql.VARCHAR(collation=u'utf8_unicode_ci', length=200), nullable=True), sa.Column('descr', mysql.MEDIUMTEXT(collation=u'utf8_unicode_ci'), nullable=True), sa.Column('numreps', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('numsets', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('comments', mysql.MEDIUMTEXT(collation=u'utf8_unicode_ci'), nullable=True), sa.Column('rest', mysql.TEXT(collation=u'utf8_unicode_ci'), nullable=True), sa.Column('perftime', mysql.VARCHAR(collation=u'utf8_unicode_ci', length=200), nullable=True), sa.Column('time', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id'), mysql_collate=u'utf8_unicode_ci', mysql_default_charset=u'utf8', mysql_engine=u'InnoDB' ) op.create_table('weeks', sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), sa.Column('name', mysql.VARCHAR(collation=u'utf8_unicode_ci', length=200), nullable=True), sa.Column('comments', mysql.MEDIUMTEXT(collation=u'utf8_unicode_ci'), nullable=True), sa.PrimaryKeyConstraint('id'), mysql_collate=u'utf8_unicode_ci', mysql_default_charset=u'utf8', mysql_engine=u'InnoDB' ) op.create_table('workout_week_association', sa.Column('workouts_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('weeks_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['weeks_id'], [u'weeks.id'], name=u'fk_workout_week_association_weeks_id_weeks'), sa.ForeignKeyConstraint(['workouts_id'], [u'workouts.id'], name=u'fk_workout_week_association_workouts_id_workouts'), mysql_collate=u'utf8_unicode_ci', mysql_default_charset=u'utf8', mysql_engine=u'InnoDB' ) op.create_table('courses', sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), sa.Column('author_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('is_published', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('headerimage', mysql.TEXT(collation=u'utf8_unicode_ci'), nullable=True), sa.Column('preview', mysql.TEXT(collation=u'utf8_unicode_ci'), nullable=True), sa.Column('is_archived', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('lastchanged', mysql.DATETIME(), nullable=False), sa.Column('header', mysql.VARCHAR(collation=u'utf8_unicode_ci', length=200), nullable=True), sa.ForeignKeyConstraint(['author_id'], [u'users.id'], name=u'fk_courses_author_id_users'), sa.PrimaryKeyConstraint('id'), mysql_collate=u'utf8_unicode_ci', mysql_default_charset=u'utf8', mysql_engine=u'InnoDB' ) op.create_table('excersise_workout_association', sa.Column('excersise_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('workout_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['excersise_id'], [u'excersises.id'], name=u'fk_excersise_workout_association_excersise_id_excersises'), sa.ForeignKeyConstraint(['workout_id'], [u'workouts.id'], name=u'fk_excersise_workout_association_workout_id_workouts'), mysql_collate=u'utf8_unicode_ci', mysql_default_charset=u'utf8', mysql_engine=u'InnoDB' ) op.drop_table('elements') # ### end Alembic commands ###
[ "annndrey@trololo.info" ]
annndrey@trololo.info
6c833c96e9b2d68d9022c12b7ad1c058fa95fa67
4252102a1946b2ba06d3fa914891ec7f73570287
/pylearn2/datasets/tfd.py
2a8cff8eaf7353dfa2b022826fe526b825b96b87
[]
no_license
lpigou/chalearn2014
21d487f314c4836dd1631943e20f7ab908226771
73b99cdbdb609fecff3cf85e500c1f1bfd589930
refs/heads/master
2020-05-17T00:08:11.764642
2014-09-24T14:42:00
2014-09-24T14:42:00
24,418,815
2
3
null
null
null
null
UTF-8
Python
false
false
4,795
py
import numpy as np from pylearn2.datasets import dense_design_matrix from pylearn2.utils.serial import load from pylearn2.utils.rng import make_np_rng class TFD(dense_design_matrix.DenseDesignMatrix): """ Pylearn2 wrapper for the Toronto Face Dataset. http://aclab.ca/users/josh/TFD.html Parameters ---------- which_set : WRITEME Dataset to load. One of ['train','valid','test','unlabeled']. fold : WRITEME TFD contains 5 official folds for train, valid and test. image_size : WRITEME One of [48,96]. Load smaller or larger dataset variant. example_range : WRITEME Array_like. Load only examples in range [example_range[0]:example_range[1]]. center : WRITEME Move data from range [0.,255.] to [-127.5,127.5] scale : WRITEME shuffle : WRITEME one_hot : WRITEME rng : WRITEME seed : WRITEME preprocessor : WRITEME axes : WRITEME """ mapper = {'unlabeled': 0, 'train': 1, 'valid': 2, 'test': 3, 'full_train': 4} def __init__(self, which_set, fold = 0, image_size = 48, example_range = None, center = False, scale = False, shuffle=False, one_hot = False, rng=None, seed=132987, preprocessor = None, axes = ('b', 0, 1, 'c')): """ Creates a DenseDesignMatrix object for the Toronto Face Dataset. Parameters ---------- which_set : str dataset to load. One of ['train','valid','test','unlabeled']. center : Bool if True, move data from range [0.,255.] to [-127.5,127.5] example_range : array_like. Load only examples in range [example_range[0]:example_range[1]]. fold : int in {0,1,2,3,4} TFD contains 5 official folds for train, valid and test. image_size : one of [48,96]. Load smaller or larger dataset variant. """ if which_set not in self.mapper.keys(): raise ValueError("Unrecognized which_set value: %s. Valid values are %s." % (str(which_set), str(self.mapper.keys()))) assert (fold >=0) and (fold <5) # load data path = '${PYLEARN2_DATA_PATH}/faces/TFD/' if image_size == 48: data = load(path + 'TFD_48x48.mat') elif image_size == 96: data = load(path + 'TFD_96x96.mat') else: raise ValueError("image_size should be either 48 or 96.") # retrieve indices corresponding to `which_set` and fold number if self.mapper[which_set] == 4: set_indices = (data['folds'][:, fold] == 1) + (data['folds'][:,fold] == 2) else: set_indices = data['folds'][:, fold] == self.mapper[which_set] assert set_indices.sum() > 0 # limit examples returned to `example_range` ex_range = slice(example_range[0], example_range[1]) \ if example_range else slice(None) # get images and cast to float32 data_x = data['images'][set_indices] data_x = np.cast['float32'](data_x) data_x = data_x[ex_range] # create dense design matrix from topological view data_x = data_x.reshape(data_x.shape[0], image_size ** 2) if center: data_x -= 127.5 if scale: assert not center data_x /= 255. if shuffle: rng = make_np_rng(rng, seed, which_method='permutation') rand_idx = rng.permutation(len(data_x)) data_x = data_x[rand_idx] # get labels if which_set != 'unlabeled': data_y = data['labs_ex'][set_indices] data_y = data_y[ex_range] -1 data_y_identity = data['labs_id'][set_indices] data_y_identity = data_y_identity[ex_range] if shuffle: data_y = data_y[rand_idx] data_y_identity = data_y_identity[rand_idx] self.one_hot = one_hot if one_hot: one_hot = np.zeros((data_y.shape[0], 7), dtype = 'float32') for i in xrange(data_y.shape[0]): one_hot[i, data_y[i]] = 1. data_y = one_hot else: data_y = None data_y_identity = None # create view converting for retrieving topological view view_converter = dense_design_matrix.DefaultViewConverter((image_size, image_size, 1), axes) # init the super class super(TFD, self).__init__(X = data_x, y = data_y, view_converter = view_converter) assert not np.any(np.isnan(self.X)) self.y_identity = data_y_identity self.axes = axes if preprocessor is not None: preprocessor.apply(self)
[ "lionelpigou@gmail.com" ]
lionelpigou@gmail.com
7445fa91b3c0c18b7a57868865369d11968c26c3
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_196/ch25_2020_03_11_11_18_42_760637.py
290e11d5e107eaf55948946794c96f9f115582db
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
273
py
a = float (input("Qual a velocidade?")) b = float (input("Qual o ângulo de lançamento?")) g = 9.8 from math import sin d = ((a**2)*(sin*(2*b)))/(g) if (100-d)==2 or (100-d)<2: print ("Acertou!") elif d < 98: print ("Muito perto") else: print ("Muito longe")
[ "you@example.com" ]
you@example.com
3860aeb9a171e0c0407f824c7a3b97b4b7518ba3
ed682f663f29d43f45ece61afe8925055ecec161
/wsconnections/counter/views.py
0621dfc7e6f7a03bad8cbd26459756653f7f3035
[]
no_license
aaronbassett/django-ws-connections
0f24493c157748d809d623d610f6f21e27dbdba2
675b2431ee80bf76aee744a904eee0e284324678
refs/heads/master
2020-05-09T13:28:11.611876
2019-04-10T19:56:55
2019-04-10T19:56:55
181,153,357
5
0
null
null
null
null
UTF-8
Python
false
false
393
py
from django.views.generic import TemplateView from nanoid import generate class CountersWrapperView(TemplateView): template_name = "counter/index.html" def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['nanoid'] = generate(size=6) return context class CountView(TemplateView): template_name = "counter/count.html"
[ "me@aaronbassett.com" ]
me@aaronbassett.com
1a4049be4c964247c6ed69d9dbb126d7e5729c14
48e124e97cc776feb0ad6d17b9ef1dfa24e2e474
/sdk/python/pulumi_azure_native/deviceupdate/v20200301preview/__init__.py
ed784172151ec8f045708ca4679c6f49841e4023
[ "BSD-3-Clause", "Apache-2.0" ]
permissive
bpkgoud/pulumi-azure-native
0817502630062efbc35134410c4a784b61a4736d
a3215fe1b87fba69294f248017b1591767c2b96c
refs/heads/master
2023-08-29T22:39:49.984212
2021-11-15T12:43:41
2021-11-15T12:43:41
null
0
0
null
null
null
null
UTF-8
Python
false
false
615
py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** from ... import _utilities import typing # Export this package's modules as members: from ._enums import * from .account import * from .get_account import * from .get_instance import * from .get_private_endpoint_connection import * from .get_private_endpoint_connection_proxy import * from .instance import * from .private_endpoint_connection import * from .private_endpoint_connection_proxy import * from ._inputs import * from . import outputs
[ "noreply@github.com" ]
bpkgoud.noreply@github.com
3557914301b9c6b30c03271309e557c40ac31c24
90419da201cd4948a27d3612f0b482c68026c96f
/sdk/python/pulumi_azure_nextgen/devices/v20200301/iot_dps_resource.py
24ff4aa176709c49c612e62feff015417f73edda
[ "BSD-3-Clause", "Apache-2.0" ]
permissive
test-wiz-sec/pulumi-azure-nextgen
cd4bee5d70cb0d332c04f16bb54e17d016d2adaf
20a695af0d020b34b0f1c336e1b69702755174cc
refs/heads/master
2023-06-08T02:35:52.639773
2020-11-06T22:39:06
2020-11-06T22:39:06
312,993,761
0
0
Apache-2.0
2023-06-02T06:47:28
2020-11-15T09:04:00
null
UTF-8
Python
false
false
7,270
py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs from ._inputs import * __all__ = ['IotDpsResource'] class IotDpsResource(pulumi.CustomResource): def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, etag: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, properties: Optional[pulumi.Input[pulumi.InputType['IotDpsPropertiesDescriptionArgs']]] = None, provisioning_service_name: Optional[pulumi.Input[str]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, sku: Optional[pulumi.Input[pulumi.InputType['IotDpsSkuInfoArgs']]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, __props__=None, __name__=None, __opts__=None): """ The description of the provisioning service. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] etag: The Etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal ETag convention. :param pulumi.Input[str] location: The resource location. :param pulumi.Input[pulumi.InputType['IotDpsPropertiesDescriptionArgs']] properties: Service specific properties for a provisioning service :param pulumi.Input[str] provisioning_service_name: Name of provisioning service to create or update. :param pulumi.Input[str] resource_group_name: Resource group identifier. :param pulumi.Input[pulumi.InputType['IotDpsSkuInfoArgs']] sku: Sku info for a provisioning Service. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The resource tags. """ if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() __props__['etag'] = etag if location is None: raise TypeError("Missing required property 'location'") __props__['location'] = location if properties is None: raise TypeError("Missing required property 'properties'") __props__['properties'] = properties if provisioning_service_name is None: raise TypeError("Missing required property 'provisioning_service_name'") __props__['provisioning_service_name'] = provisioning_service_name if resource_group_name is None: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name if sku is None: raise TypeError("Missing required property 'sku'") __props__['sku'] = sku __props__['tags'] = tags __props__['name'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:devices/latest:IotDpsResource"), pulumi.Alias(type_="azure-nextgen:devices/v20170821preview:IotDpsResource"), pulumi.Alias(type_="azure-nextgen:devices/v20171115:IotDpsResource"), pulumi.Alias(type_="azure-nextgen:devices/v20180122:IotDpsResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200101:IotDpsResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200901preview:IotDpsResource")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(IotDpsResource, __self__).__init__( 'azure-nextgen:devices/v20200301:IotDpsResource', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'IotDpsResource': """ Get an existing IotDpsResource resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() return IotDpsResource(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter def etag(self) -> pulumi.Output[Optional[str]]: """ The Etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal ETag convention. """ return pulumi.get(self, "etag") @property @pulumi.getter def location(self) -> pulumi.Output[str]: """ The resource location. """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ The resource name. """ return pulumi.get(self, "name") @property @pulumi.getter def properties(self) -> pulumi.Output['outputs.IotDpsPropertiesDescriptionResponse']: """ Service specific properties for a provisioning service """ return pulumi.get(self, "properties") @property @pulumi.getter def sku(self) -> pulumi.Output['outputs.IotDpsSkuInfoResponse']: """ Sku info for a provisioning Service. """ return pulumi.get(self, "sku") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ The resource tags. """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> pulumi.Output[str]: """ The resource type. """ return pulumi.get(self, "type") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
[ "public@paulstack.co.uk" ]
public@paulstack.co.uk
2976ea7ae45914e9623b137b7d0c8dbd805a632f
f71273e5a2c4382ad990e350dfd58b702db56fdf
/app/migrations/0002_hashtag.py
79996242ffd39f4eb93d96ffb79c5a15687fff46
[]
no_license
jangheeseung/Django_pr
dcb6c4596b61a8f524964f735b726087c763ad7c
94dfe789b361ddb6cf28575d42238f85f8d25129
refs/heads/master
2023-04-27T08:07:16.398124
2019-06-02T07:19:07
2019-06-02T07:19:07
189,816,258
0
0
null
2022-11-22T03:33:52
2019-06-02T07:15:27
Python
UTF-8
Python
false
false
497
py
# Generated by Django 2.2 on 2019-05-21 15:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app', '0001_initial'), ] operations = [ migrations.CreateModel( name='Hashtag', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ], ), ]
[ "gmltmd121@gmail.com" ]
gmltmd121@gmail.com
bb4c204fd838cabab66d96bd22ffe3b75aba2054
2bba4782f9085d2c0c324f6668709a81e482e095
/secao06/ex39.py
60905c0de2ffaf50db15faaf89b89564324145f2
[]
no_license
Saccha/Exercicios_Python
886ae07392f006226688b8817bf17a7a52020ef9
e54203cb8754180f0fe120ee60c462c2e74c86e3
refs/heads/main
2023-04-18T06:44:42.243579
2021-04-18T03:49:30
2021-04-18T03:49:30
346,230,383
0
0
null
null
null
null
UTF-8
Python
false
false
570
py
""" 39. Faça um programa que calcule a área de um triângulo, cuja base e altura são fornecida pelo usuário. Esse programa não pode permitir a entrada de dados inválidos, ou seja, medidas menores ou iguais a 0. """ base = int(input("Digite o tamanho da base do triângulo: ")) if base > 0: altura = int(input("Digite o tamanho da altura do triângulo: ")) if altura > 0: area_triangulo = (base * altura) / 2 print(f"A aréa do triângulo é {area_triangulo}") else: print("Altura inválida!") else: print("Base inválida!")
[ "noreply@github.com" ]
Saccha.noreply@github.com
421b77d86e1828dc04ab6239fe78cafe93a062c5
d6e53e37c394acab024696b15c2da0a53a00d317
/zanhu/news/tests/test_urls.py
d11cf8b1a85d2fb9e98a5ab9fc6d0c7b5a5636e7
[]
no_license
3pacccccc/zanhu
04943ebefb32bac5668287c75dd1e6793010760d
c4ee76ef6018ae98cab7ea53255feeae83c8dfaf
refs/heads/master
2022-12-13T21:24:18.136698
2019-07-21T17:19:15
2019-07-21T17:19:15
191,209,450
4
0
null
2022-12-08T05:17:22
2019-06-10T16:57:02
CSS
UTF-8
Python
false
false
636
py
from test_plus import TestCase from django.urls import reverse, resolve class TestUserURLs(TestCase): def setUp(self): self.user = self.make_user() def test_detail_reverse(self): self.assertEqual(reverse('users:detail', kwargs={'username': 'testuser'}), '/users/testuser/') def test_detail_resolve(self): self.assertEqual(resolve('/users/testuser/').view_name, 'users:detail') def test_update_reverse(self): self.assertEqual(reverse('users:update'), '/users/update/') def test_update_resolve(self): self.assertEqual(resolve('/users/update/').view_name, 'users:update')
[ "351489917@qq.com" ]
351489917@qq.com
8fbf4dbd5b542b886e1093c883ec70240afca7aa
3be42b83a15d022f5863c96ec26e21bac0f7c27e
/tensorflow_probability/python/bijectors/categorical_to_discrete_test.py
9ed4b7e54dc20c538567c8a95c95853e66ff5711
[ "Apache-2.0" ]
permissive
ogrisel/probability
846f5c13cddee5cf167b215e651b7479003f15d2
8f67456798615f9bf60ced2ce6db5d3dba3515fe
refs/heads/master
2022-11-09T10:53:23.000918
2020-07-01T23:16:03
2020-07-01T23:17:25
276,580,359
2
1
Apache-2.0
2020-07-02T07:37:58
2020-07-02T07:37:57
null
UTF-8
Python
false
false
5,842
py
# Copyright 2018 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Tests CategoricalToDiscrete bijector.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow.compat.v2 as tf import tensorflow_probability as tfp from tensorflow_probability.python.bijectors import bijector_test_util from tensorflow_probability.python.bijectors import categorical_to_discrete from tensorflow_probability.python.internal import test_util @test_util.test_all_tf_execution_regimes class CategoricalToDiscreteTest(test_util.TestCase): def testUnsortedValuesRaises(self): with self.assertRaisesOpError('map_values is not strictly increasing'): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[1, 3, 2], validate_args=True) self.evaluate(bijector.forward([0, 1, 2])) def testMapValuesRankNotEqualToOneRaises(self): with self.assertRaisesWithPredicateMatch(ValueError, 'Rank of map_values must be 1'): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[[1, 2], [3, 4]], validate_args=True) self.evaluate(bijector.map_values) def testMapValuesSizeZeroRaises(self): with self.assertRaisesWithPredicateMatch( ValueError, 'Size of map_values must be greater than 0'): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[], validate_args=True) self.evaluate(bijector.map_values) def testBijectorForward(self): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[0.1, 0.2, 0.3, 0.4], validate_args=True) self.assertAllClose([[0.1, 0.2, 0.3, 0.4], [0.4, 0.3, 0.2, 0.1]], self.evaluate( bijector.forward([[0, 1, 2, 3], [3, 2, 1, 0]]))) def testBijectorForwardOutOfBoundIndicesRaises(self): with self.assertRaisesOpError('indices out of bound'): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[0.1, 0.2, 0.3, 0.4], validate_args=True) self.evaluate(bijector.forward([5])) def testBijectorInverse(self): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[0.1, 0.2, 0.3, 0.4], validate_args=True) self.assertAllEqual([[3, 3, 3], [0, 1, 2]], self.evaluate( bijector.inverse([[0.400001, 0.4, 0.399999], [0.1, 0.2, 0.3]]))) def testBijectorInverseValueNotFoundRaises(self): with self.assertRaisesOpError('inverse value not found'): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[0.1, 0.2, 0.3, 0.4], validate_args=True) self.evaluate(bijector.inverse([0.21, 0.4])) def testInverseLogDetJacobian(self): bijector = categorical_to_discrete.CategoricalToDiscrete( map_values=[0.1, 0.2], validate_args=True) self.assertAllClose( 0, self.evaluate( bijector.inverse_log_det_jacobian([0.1, 0.2], event_ndims=0))) def testBijectiveAndFinite32bit(self): x = np.arange(100).astype(np.int32) y = np.logspace(-10, 10, 100).astype(np.float32) bijector = categorical_to_discrete.CategoricalToDiscrete(map_values=y) bijector_test_util.assert_bijective_and_finite( bijector, x, y, eval_func=self.evaluate, event_ndims=0) def testBijectiveAndFinite16bit(self): x = np.arange(100).astype(np.int32) y = np.logspace(-5, 4, 100).astype(np.float16) bijector = categorical_to_discrete.CategoricalToDiscrete(map_values=y) bijector_test_util.assert_bijective_and_finite( bijector, x, y, eval_func=self.evaluate, event_ndims=0) @test_util.jax_disable_variable_test @test_util.numpy_disable_gradient_test def testVariableGradients(self): map_values = tf.Variable([0.3, 0.5]) b = categorical_to_discrete.CategoricalToDiscrete(map_values=map_values, validate_args=True) with tf.GradientTape() as tape: y = tf.reduce_sum(b.forward([0, 1])) grads = tape.gradient(y, [map_values]) self.assertAllNotNone(grads) @test_util.numpy_disable_gradient_test def testNonVariableGradients(self): map_values = tf.convert_to_tensor([0.3, 0.5]) def _func(map_values): b = categorical_to_discrete.CategoricalToDiscrete(map_values=map_values, validate_args=True) return tf.reduce_sum(b.forward([0, 1])) grads = tfp.math.value_and_gradient(_func, [map_values]) self.assertAllNotNone(grads) def testModifiedMapValuesIncreasingAssertion(self): map_values = tf.Variable([0.1, 0.2]) b = categorical_to_discrete.CategoricalToDiscrete(map_values=map_values, validate_args=True) self.evaluate(map_values.initializer) with self.assertRaisesOpError('map_values is not strictly increasing.'): with tf.control_dependencies([map_values.assign([0.2, 0.1])]): self.evaluate(b.forward([0, 1])) if __name__ == '__main__': tf.test.main()
[ "gardener@tensorflow.org" ]
gardener@tensorflow.org
d1110e2cb826366becbe3ad092ac71f92eaa8b9b
bb3c7f1af8520ec1ba117353819af1066d098b35
/Backend/ShipEzy/settings.py
eead43054d37953888167f2c0e1321fc750a47f1
[]
no_license
RitikaSingh02/ShipEzyy
fc36cab264848d61090d4a0824cde7f453b8fcc6
a00e86afd39efcfe99a2e770cbde5d15ff8fdae0
refs/heads/master
2023-07-11T21:50:58.788964
2021-07-30T15:56:05
2021-07-30T15:56:05
394,031,670
0
0
null
null
null
null
UTF-8
Python
false
false
2,777
py
import os from dotenv import load_dotenv load_dotenv('./.env') # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ.get('secret_key') DEBUG = True ALLOWED_HOSTS = ['*'] INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'Main', 'Admin', 'Customer', 'Driver', 'Rides', 'Vehicles' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'corsheaders.middleware.CorsMiddleware', # 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'ShipEzy.urls' DATABASES = { 'default': { 'ENGINE': 'djongo', 'NAME': 'ShipEzy', 'CLIENT': { 'host':os.environ.get('host'), 'port': int(os.environ.get('port')), 'username': os.environ.get('username'), 'password': os.environ.get('password'), 'authSource': os.environ.get('authSource'), 'authMechanism': 'SCRAM-SHA-1' }, } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'ShipEzy.wsgi.application' AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] LANGUAGE_CODE = 'en-us' TIME_ZONE = 'Asia/Kolkata' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.2/howto/static-files/ STATIC_URL = '/static/' SESSION_COOKIE_SECURE = True MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/'
[ "ritika2002singh@gmail.com" ]
ritika2002singh@gmail.com
9e0d5240635da3a56ff4da7586e70ff3ae30616e
84abce44bd0278fa99e9556168290675f399834c
/EcalAlCaRecoProducers/config/reRecoTags/Cal_Nov2016_ped_v1.py
d5be47190da438134c7f8e99b01ad280f2f89499
[]
no_license
ECALELFS/ECALELF
7c304c6b544b0f22a4b62cf942f47fa8b58abef0
62a046cdf59badfcb6281a72923a0f38fd55e183
refs/heads/master
2021-01-23T13:36:31.574985
2017-06-22T12:26:28
2017-06-22T12:26:28
10,385,620
1
9
null
2017-06-30T12:59:05
2013-05-30T15:18:55
C++
UTF-8
Python
false
false
725
py
import FWCore.ParameterSet.Config as cms # Official GT for september re-reco # pedestal tags produced by Jean for pedestal studies: # 6a9a2818932fce79d8222768ba4f2ad3f60f894c payload is used (first Bon pedestal run of 2016, Apr) from CondCore.ESSources.CondDBESSource_cfi import * #CondDBConnection.connect = cms.string( 'frontier://FrontierProd/CMS_CONDITIONS' ) RerecoGlobalTag = GlobalTag.clone( globaltag = cms.string('80X_dataRun2_2016SeptRepro_v4'), toGet = cms.VPSet( cms.PSet(record = cms.string("EcalPedestalsRcd"), tag = cms.string("EcalPedestals_laser_2016"), connect = cms.string("frontier://FrontierPrep/CMS_CONDITIONS"), ), ), )
[ "shervin.nourbakhsh@cern.ch" ]
shervin.nourbakhsh@cern.ch
1f802b207229f09d76b481f39de935bcd569ef13
6db8aba817161dc573f16cde185f4a1c02c753e0
/KadaneAlgo.py
b019a9517f32915d97e6d71d177e4028b4821033
[]
no_license
Prakashchater/Leetcode-array-easy-questions
456153a13397c895acae6550dad8f1b1851ff854
7c5d40f9d68dbf61f4a61a33d9b54f769473b057
refs/heads/main
2023-06-19T14:01:52.483440
2021-07-22T19:44:40
2021-07-22T19:44:40
354,926,404
0
0
null
null
null
null
UTF-8
Python
false
false
752
py
""" # Time: O(N) Space: O(1) def kadaneAlgo(arr): maxsum = 0 currsum = 0 for i in range(len(arr)): currsum = currsum + arr[i] if currsum > maxsum: maxsum = currsum if currsum < 0: currsum = 0 return maxsum if __name__ == '__main__': arr = [5, -4, -2, 6, -1] print(kadaneAlgo(arr)) """ # Time : O(N) Space: O(1) def kadaneAlgorithm(arr): maxEnding = arr[0] maxSoFar = arr[0] for num in arr[1:]: maxEnding = max(num, maxEnding + num) maxSoFar = max(maxSoFar, maxEnding) return maxSoFar if __name__ == '__main__': # arr = [3, 5, -9, 1, 3, -2, 3, 4, 7, 2, -9, 6, 3, 1, -5, 4] arr = [5, -4, -2, 6, -1] print(kadaneAlgorithm(arr))
[ "prakashchater@gmail.com" ]
prakashchater@gmail.com
1331240bbd0b83d5db30d30dc807f04821e2ffaa
927b50cdaf1c384c8bbf6f13816d0ba465852fd8
/main/admin.py
28531a2a583252a8277b2b1e07b6cbe1b50faae0
[ "MIT" ]
permissive
jhabarsingh/DOCMED
f37d336483cffd874b0a7db43677c08a47bd639c
8a831886d3dd415020699491687fb73893e674c5
refs/heads/main
2023-04-26T06:45:10.409633
2021-05-19T14:37:53
2021-05-19T14:37:53
316,683,855
3
5
MIT
2021-02-21T13:32:33
2020-11-28T07:51:22
JavaScript
UTF-8
Python
false
false
341
py
from django.contrib import admin from main.models import UserCategory, Patient, Doctor, Appointment, Prescription, Contact # Register your models here. admin.site.register(UserCategory) admin.site.register(Patient) admin.site.register(Contact) admin.site.register(Doctor) admin.site.register(Appointment) admin.site.register(Prescription)
[ "jhabarsinghbhati23@gmail.com" ]
jhabarsinghbhati23@gmail.com
f485063b77515a897b7551e37b28a77deb3059e6
700577285824a21df647aba584d51420db59c598
/OpenColibri/allauth/socialaccount/providers/oauth2/views.py
669a16b17622bb341dac4351bf60494a8bf28ae9
[ "LicenseRef-scancode-warranty-disclaimer" ]
no_license
epu-ntua/opencolibri
2c05acc43ef1b1c86608f6e729a4f83773b01b73
78e2411f78a0213b3961145cfe67cd52398cea70
refs/heads/master
2016-09-11T02:39:43.798777
2014-04-06T11:30:39
2014-04-06T11:30:39
15,764,540
2
0
null
null
null
null
UTF-8
Python
false
false
2,973
py
from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from allauth.socialaccount.helpers import render_authentication_error from allauth.socialaccount import providers from allauth.socialaccount.providers.oauth2.client import (OAuth2Client, OAuth2Error) from allauth.socialaccount.helpers import complete_social_login from allauth.socialaccount.models import SocialToken, SocialLogin class OAuth2Adapter(object): def get_provider(self): return providers.registry.by_id(self.provider_id) def complete_login(self, request, app, access_token): """ Returns a SocialLogin instance """ raise NotImplementedError class OAuth2View(object): @classmethod def adapter_view(cls, adapter): def view(request, *args, **kwargs): self = cls() self.request = request self.adapter = adapter() return self.dispatch(request, *args, **kwargs) return view def get_client(self, request, app): callback_url = reverse(self.adapter.provider_id + "_callback") callback_url = request.build_absolute_uri(callback_url) client = OAuth2Client(self.request, app.client_id, app.secret, self.adapter.authorize_url, self.adapter.access_token_url, callback_url, self.adapter.get_provider().get_scope()) return client class OAuth2LoginView(OAuth2View): def dispatch(self, request): app = self.adapter.get_provider().get_app(self.request) client = self.get_client(request, app) client.state = SocialLogin.marshall_state(request) try: return HttpResponseRedirect(client.get_redirect_url()) except OAuth2Error: return render_authentication_error(request) class OAuth2CallbackView(OAuth2View): def dispatch(self, request): if 'error' in request.GET or not 'code' in request.GET: # TODO: Distinguish cancel from error return render_authentication_error(request) app = self.adapter.get_provider().get_app(self.request) client = self.get_client(request, app) try: access_token = client.get_access_token(request.GET['code']) token = SocialToken(app=app, token=access_token) login = self.adapter.complete_login(request, app, token) token.account = login.account login.token = token login.state = SocialLogin.unmarshall_state(request.REQUEST .get('state')) return complete_social_login(request, login) except OAuth2Error: return render_authentication_error(request)
[ "smouzakitis@epu.ntua.gr" ]
smouzakitis@epu.ntua.gr
c738ca9f29e6ac11acd68e45f8559240446bfa5b
b7da8d949067a6a7c3a177693ba1bc1f1fe4d533
/quantdigger/branches/quantweb/demo/backup/dstushare.py
5a9cb6ddd582847d0365cd348ccd8339f0c35624
[]
no_license
sunlandli/stock
422722d14405e368f7f295ef2ef7c40e880cd4ef
0112f70335d5ae17b05093ac7c36824a41caf88c
refs/heads/master
2021-07-15T15:21:45.834055
2017-10-19T07:05:13
2017-10-19T07:05:13
100,480,563
2
0
null
null
null
null
UTF-8
Python
false
false
1,510
py
# -*- coding: utf-8 -*- import pandas as pd import tushare as ts import cache as cc # tsshare的get_h_data接口默认只取一年 # 所以这里使用一个很早的时间强制获取所有历史数据 _VERY_EARLY_START = '1988-12-12' def _process_dt(dt): return str(pd.to_datetime(dt).date()) if dt else None def _process_tushare_data(data): data.open = data.open.astype(float) data.close = data.close.astype(float) data.high = data.high.astype(float) data.low = data.low.astype(float) ## @todo bug: data.volume 里面有浮点值! data.volume = data.volume.astype(int) data.amount = data.amount.astype(float) data.index.names = ['datetime'] data.index = pd.to_datetime(data.index) return data class StockTsSource(object): '''tushare股票数据源''' def __init__(self): pass def load_data(self, pcontract, dt_start=None, dt_end=None): dt_start = _process_dt(dt_start) if not dt_start: dt_start = _VERY_EARLY_START dt_end = _process_dt(dt_end) data = ts.get_h_data(pcontract.contract.code, start=dt_start, end=dt_end) if data is None: return None else: return _process_tushare_data(data.iloc[::-1]) class CachedStockTsSource(cc.CachedDatasource): def __init__(self, base_path): datasource = StockTsSource() cache = cc.LocalFsCache(base_path) super(CachedStockTsSource, self).__init__(datasource, cache)
[ "sunlandli@tom.com" ]
sunlandli@tom.com
667ab42b912802cfb95982deef3a8ed88a2b595b
0a88cb38439a3e19dd4d4f052895c94f19979649
/box/Scripts/migrate-script.py
ceadc22465e8cc343d82766ee1a183e9d804415d
[]
no_license
pace-noge/online_shop
a9427fbb2eff205af87f03c0bbb3585a37bb697f
ac20010d4046c86b399217cab09267c890a777c2
refs/heads/master
2020-05-18T03:03:16.364749
2013-02-15T06:28:28
2013-02-15T06:28:28
null
0
0
null
null
null
null
UTF-8
Python
false
false
315
py
#!C:\EmeraldBox\box\Scripts\python.exe # EASY-INSTALL-ENTRY-SCRIPT: 'sqlalchemy-migrate==0.7.2','console_scripts','migrate' __requires__ = 'sqlalchemy-migrate==0.7.2' import sys from pkg_resources import load_entry_point sys.exit( load_entry_point('sqlalchemy-migrate==0.7.2', 'console_scripts', 'migrate')() )
[ "nasa.freaks@gmail.com" ]
nasa.freaks@gmail.com