blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
288
content_id
stringlengths
40
40
detected_licenses
listlengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
684 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
147 values
src_encoding
stringclasses
25 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
128
12.7k
extension
stringclasses
142 values
content
stringlengths
128
8.19k
authors
listlengths
1
1
author_id
stringlengths
1
132
efcfc1b4ef74378b813bd4f0312e79c71328f77a
3767e31f1c3a53d388cdc6fc1244bf980dfe039a
/pepysdiary/membership/forms.py
c846a8714a7a3e3ca2e7b01a686abf0ec14c79c8
[]
no_license
philgyford/pepysdiary
73749f389c226a35876e55e108a9f39e6afece5e
c6d99f39046eb5309f3292bfb4edb8b008f37aeb
refs/heads/main
2023-09-01T21:27:41.762431
2023-08-30T08:49:44
2023-08-30T08:49:44
7,092,491
16
6
null
2023-09-11T15:06:09
2012-12-10T11:55:11
Python
UTF-8
Python
false
false
7,794
py
# coding: utf-8 from django import forms from django.contrib.auth import password_validation from django.contrib.auth.forms import ( AuthenticationForm, PasswordResetForm, SetPasswordForm, ) from django.utils.translation import gettext_lazy as _ from hcaptcha.fields import hCaptchaField from pepysdiary.common.models import Config from pepysdiary.membership.models import Person from pepysdiary.membership.utilities import validate_person_name from .utilities import send_email #  Much of this based on django-registration. attrs_dict = {"class": "required form-control"} class LoginForm(AuthenticationForm): username = forms.EmailField( widget=forms.EmailInput(attrs=attrs_dict), max_length=254, label="Email address", error_messages={"invalid": "Please enter a valid email address."}, ) password = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict)) def clean(self): config = Config.objects.get_site_config() if config is not None: if config.allow_login is False: raise forms.ValidationError("Sorry, logging in is currently disabled.") return super().clean() class PersonEditForm(forms.ModelForm): class Meta: model = Person fields = ("email", "url") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["email"].widget = forms.TextInput(attrs=attrs_dict) self.fields["url"].widget = forms.TextInput(attrs=attrs_dict) class PasswordResetForm(PasswordResetForm): email = forms.EmailField( widget=forms.EmailInput(attrs=attrs_dict), max_length=254, label="Email address", error_messages={"invalid": "Please enter a valid email address."}, ) def send_mail( self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None, ): """ Overriding the default so that we can use our custom send_email() method which includes the headers we want. """ send_email( to_email, from_email, subject_template_name, email_template_name, context ) class RegistrationForm(forms.Form): """ Form for registering a new user account. Validates that the requested name and email are not already in use, and requires the password to be entered twice to catch typos. Subclasses should feel free to add any additional validation they need, but should avoid defining a ``save()`` method -- the actual saving of collected user data is delegated to the active registration backend. """ name = forms.CharField( widget=forms.TextInput(attrs=attrs_dict), max_length=50, validators=[validate_person_name], required=True, label=_("Your name"), help_text="How people will know you. Can use spaces, eg “Samuel Pepys”.", ) email = forms.EmailField( required=True, label=_("Email address"), max_length=254, widget=forms.EmailInput(attrs=attrs_dict), help_text="This will not be visible to others.", ) password1 = forms.CharField( widget=forms.PasswordInput(attrs=attrs_dict, render_value=False), required=True, label=_("Password"), help_text="At least 8 characters.", ) password2 = forms.CharField( widget=forms.PasswordInput(attrs=attrs_dict, render_value=False), required=True, label=_("Repeat password"), ) url = forms.URLField( widget=forms.URLInput(attrs=attrs_dict), label=_("Personal URL"), max_length=255, required=False, help_text="Optional. eg, the web address of your blog, Facebook page, " "Twitter page, etc.", ) honeypot = forms.CharField( required=False, label=_( "If you enter anything in this field " "your registration will be treated as spam" ), ) def __init__(self, *args, **kwargs): """ We might need to add captcha and question/answer anti-spam fields, depending on our site config. """ super().__init__(*args, **kwargs) config = Config.objects.get_site_config() if config is not None: if config.use_registration_captcha is True: self.fields["captcha"] = hCaptchaField(label=_("Anti-spam test")) if ( config.use_registration_question is True and config.registration_question != "" and config.registration_answer != "" ): self.fields["answer"] = forms.CharField( widget=forms.TextInput(attrs=attrs_dict), max_length=255, required=True, label=_(config.registration_question), ) def clean_name(self): """ Validate that the name is alphanumeric and is not already in use. """ existing = Person.objects.filter(name__iexact=self.cleaned_data["name"]) if existing.exists(): raise forms.ValidationError(_("That name has already been used.")) else: return self.cleaned_data["name"] def clean_email(self): """ Validate that the email is not already in use. """ existing = Person.objects.filter(email__iexact=self.cleaned_data["email"]) if existing.exists(): raise forms.ValidationError(_("That email address has already been used.")) else: return self.cleaned_data["email"] def clean_honeypot(self): """Check that nothing's been entered into the honeypot.""" value = self.cleaned_data["honeypot"] if value: raise forms.ValidationError(self.fields["honeypot"].label) return value def clean_answer(self): """ Validate that the anti-spam question was answered successfully. """ config = Config.objects.get_site_config() if config is not None: if ( self.cleaned_data["answer"].lower() == config.registration_answer.lower() ): return self.cleaned_data["answer"] else: raise forms.ValidationError(_("Please try again.")) def clean_password1(self): """Check the password is OK by Django >=1.9's validators.""" password1 = self.cleaned_data["password1"] password_validation.validate_password(password1) return password1 def clean(self): """ Verifiy that the values entered into the two password fields match. Note that an error here will end up in ``non_field_errors()`` because it doesn't apply to a single field. """ config = Config.objects.get_site_config() if config is not None: if config.allow_registration is False: raise forms.ValidationError( "Sorry, new registrations aren't allowed at the moment." ) if "password1" in self.cleaned_data and "password2" in self.cleaned_data: if self.cleaned_data["password1"] != self.cleaned_data["password2"]: raise forms.ValidationError(_("The two password fields didn't match.")) return self.cleaned_data class SetPasswordForm(SetPasswordForm): new_password1 = forms.CharField( label="New password", widget=forms.PasswordInput(attrs=attrs_dict) ) new_password2 = forms.CharField( label="Repeat password", widget=forms.PasswordInput(attrs=attrs_dict) )
[ "phil@gyford.com" ]
phil@gyford.com
1f441384521bb711ae6fdce8fef88f9fae846dec
891371ebcb696b91df1d00f98cb595608b580d34
/documents/migrations/0015_requestarchive_approved_by.py
80acd3afe27f687758cfbe9bb0993586f1b4c45a
[]
no_license
NiiColeman/law-firm
867df413e9ca30c57afad9d2743fe19ab96ba586
f41ee5ac88d3f640dce720c90cbfefb93a267400
refs/heads/master
2022-09-23T22:52:25.312903
2020-06-01T14:30:24
2020-06-01T14:30:24
241,650,134
0
0
null
null
null
null
UTF-8
Python
false
false
418
py
# Generated by Django 2.2.6 on 2020-02-18 11:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('documents', '0014_documentarchive_location'), ] operations = [ migrations.AddField( model_name='requestarchive', name='approved_by', field=models.CharField(default='', max_length=250), ), ]
[ "nii.cole@outlook.com" ]
nii.cole@outlook.com
012a8170f648836e33bbe603989e95ffb9215a03
3f13885fdb0649374d866d24a43f86ccc6b4c782
/apps/tools/views/qizhi.py
ca388f32f00be53c879448e92b8d76df70c1fcbd
[]
no_license
linkexf/oneops
426b271c00c5b4b4c55d1d91bf42030dab29623a
64a9c7fd949b6220234a276614ab6555dc8cc17c
refs/heads/master
2020-12-10T04:45:55.681731
2019-11-28T09:02:30
2019-11-28T09:02:30
null
0
0
null
null
null
null
UTF-8
Python
false
false
442
py
from django.views.generic import TemplateView from django.contrib.auth.mixins import LoginRequiredMixin class QiZhiCreateHostView(LoginRequiredMixin, TemplateView): template_name = "tools/qizhi_create_host.html" def get(self, request, **kwargs): context = { 'path1': '小工具', 'path2': '堡垒机录入' } context.update(**kwargs) return self.render_to_response(context)
[ "andykaiyu@163.com" ]
andykaiyu@163.com
f1e366f27ad1885260c92fa6e048d493ea794f29
597b82737635e845fd5360e191f323669af1b2ae
/08_full_django/products/products/urls.py
6e148c5162f8e91d1ea8758a9a4ef402749fc16d
[]
no_license
twknab/learning-python
1bd10497fbbe181a26f2070c147cb2fed6955178
75b76b2a607439aa2d8db675738adf8d3b8644df
refs/heads/master
2021-08-08T08:50:04.337490
2017-11-10T00:28:45
2017-11-10T00:28:45
89,213,845
0
1
null
null
null
null
UTF-8
Python
false
false
326
py
"""products URL Configuration Sets up URL configuration for each application in the `products` project. Current Applications: -`products`: a simple app to play with Django Models and creating and retrieving data. """ from django.conf.urls import url, include urlpatterns = [ url(r'^', include("apps.products.urls")), ]
[ "natureminded@users.noreply.github.com" ]
natureminded@users.noreply.github.com
69db29490d01dc933aed028527cca2bbcb2a80a9
0d0afd1dce972b4748ce8faccd992c019794ad9e
/integra/seguranca/models/sale_defeito_os.py
484544383c69811435d08f76d682a553b9c6d529
[]
no_license
danimaribeiro/odoo-erp
e2ca2cfe3629fbedf413e85f7c3c0453fd16941e
d12577bf7f5266b571cbedeb930720d653320e96
refs/heads/master
2020-01-23T21:32:16.149716
2016-11-05T15:35:40
2016-11-05T15:35:40
67,892,809
0
1
null
null
null
null
UTF-8
Python
false
false
699
py
# -*- coding: utf-8 -*- #from __future__ import division, print_function, unicode_literals from osv import osv, orm, fields class sale_defeito_os(osv.Model): _description = u'Defeito da OS' _name = 'sale.defeito.os' _rec_name = 'nome' _order = 'id' def _codigo(self, cr, uid, ids, nome_campo, args=None, context={}): res = {} for os_obj in self.browse(cr, uid, ids): res[os_obj.id] = str(os_obj.id).zfill(4) return res _columns = { 'codigo': fields.function(_codigo, type='char', method=True, string=u'Código', size=20, store=False, select=True), 'nome': fields.char(u'Nome', size=180), } sale_defeito_os()
[ "danimaribeiro@gmail.com" ]
danimaribeiro@gmail.com
117788556929425f430585c961bb4ff7d6162fee
8d753bb8f19b5b1f526b0688d3cb199b396ed843
/osp_sai_2.1.8/system/apps/web/api/web_sessions.py
146b4f0981fe26b5dc00f9568af90c121d95faf4
[]
no_license
bonald/vim_cfg
f166e5ff650db9fa40b564d05dc5103552184db8
2fee6115caec25fd040188dda0cb922bfca1a55f
refs/heads/master
2023-01-23T05:33:00.416311
2020-11-19T02:09:18
2020-11-19T02:09:18
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,073
py
#!/usr/bin/python #-*- coding: utf-8 -*- from flask import url_for import os import base from vcl import vcmd import types def get(file_type): """ API: GET: Retrun: { sess: [ {id: string, user: string, expire: string, client: string,}, ... ], } """ # get web sessions obj = {'error': False, 'err_code': 0, 'err_reason': ''} res = [] # get cmd lines cmd = 'cdbctl read/cdb/app/login/web | grep web'; lines = vcmd.get_lines(cmd) if not lines: return res # make dict for line in lines: key = vcmd.cmd_get_token(line, "key") user = vcmd.cmd_get_token(line, "user") deccmd = 'fnconvert -c decoding -m "%s"'%(user) decuser = vcmd.get_lines(deccmd) ipaddr = vcmd.cmd_get_token(line, "ipaddr") etime = base.relative_time.get(int(vcmd.cmd_get_token(line, "expire_time"))) res.append({ 'id': key, 'user': decuser[0], 'expire': etime, 'client': ipaddr, }); obj['sess'] = res return obj def delete(req_data): """ API: DELETE: { file_arr: [ { 'id': string, }, ... ... ], } Retrun: { error: bool, err_code: int, err_reason: string } """ _err_reason = [ '', # err_code: 0 'bad request', # err_code: 1 'delete failed', # err_code: 2 ] obj = {'error': False, 'err_code': 0, 'err_reason': ''} # param check sess_arr = req_data.get('sess_arr') if not type(sess_arr) is types.ListType: # bad request obj['error'] = True obj['err_code'] = 1 obj['err_reason'] = _err_reason[1] return obj # make cmd for sid in sess_arr: exec_str = 'cdbctl delete/cdb/app/login/%s' %(str(sid)); status, output = vcmd.get_status_lines(exec_str); return obj
[ "zhwwan@gmail.com" ]
zhwwan@gmail.com
3c2b44b45e010b4843d1e27a86696ab4d83f9802
13a13b4f93ef6664dcf610a556c53a1f6c3c8bc4
/ques9.py
d41b7a7f7fccd0c92d0dca3ec26dbf55f564b6f0
[]
no_license
BhagyashreeKarale/more-exercise
54ea9f6fa2f4f007278631535362959446980ea9
c0f34d345d2e5bef19f872861fafb3f8a0233e43
refs/heads/main
2023-08-02T14:49:44.403733
2021-09-11T19:07:51
2021-09-11T19:07:51
405,454,804
0
0
null
null
null
null
UTF-8
Python
false
false
2,176
py
# Harshad numbers aise number hote hain jo apni digits ke sum se dhang se divide hote hain. # Dhang se divide hone ka matlab ki remainder 0 aata hai. # Jaise 42 ek Harshad number hai kyunki 4 + 2 = 6 aur 42 ko 6 se divide kar ke remainder aata hai # Aise hi 18, 21 aur 24 bhi harshad number hai # kyunki 1 + 8 = 9 aur 18 ko 9 se divide kar ke remainder 0 hai. # Aise hi 1, 2, 3, 4, 5, 6, 7, 8, 9 saare harshad number hain # kyunki inki digits ka sum khud yeh number hain aur yeh apne aap se divide ho jate hain. # Ek number ke digits nikalne ka code yeh hai: # x = 42 # x_digits = list(str(x)) # n=len(x_digits) # sum=0 # for i in x_digits: # sum=sum+int(i) # if x % sum == 0: # print("It is an Harshad number") # Yahan humne list function mein x ko string mein type cast kar ke de diya. # Toh ab yeh har 42 ke alag alag number se list bana dega. # x_digits mein ["4", "2"] list hogi. # Iss list mein saare digits string ki form mein hogi, # aap unko firse integer mein convert kar sakte ho # Ek function likho is_harshad_number jo ek number parameter ki tarah le # aur fir agar woh number harshad number hai toh ek boolean (True agar harshad number hai, # False agar nahi hai toh) return kare. # Fir iss function ka use kar ke 1 se 1000 ke beech mein saare harshad number print karo. # i=1 # while i <= 1000: # x_digits = list(str(i)) # n=len(x_digits) # sum=0 # for i in x_digits: # sum=sum+int(i) # if i % sum == 0: # print("It is an Harshad number") # i=i+1 # i=1 # while i <= 1000: # x_digits = list(str(i)) # n=len(x_digits) # sum=0 # for k in x_digits: # sum=sum+int(k) # if i % sum == 0: # print(i,"Is an Harshad number") # i=i+1 # else: # print(i,"Is not an Harshad Number") # i=i+1 ############################################################################################### #printing only harshad numbers i=1 while i <= 1000: x_digits = list(str(i)) n=len(x_digits) sum=0 for k in x_digits: sum=sum+int(k) if i % sum == 0: print(i) i=i+1 print("These are the harshad numbers from 1-1000")
[ "noreply@github.com" ]
BhagyashreeKarale.noreply@github.com
3b907f1a9e78e5f2499489ec4a96db16c6a67c09
45b8e141f762b95edec36ce40809ea4b89e3d287
/mahkalastore/mahkalastore/settings.py
5838b8a0373f9a932a716f814882743e16317924
[]
no_license
nimanoori22/mys
73d7a0ad141e1c6208e776a15d079a2599c46a7f
0122586a4d69f80219ad25e42ef89f3052f5cb81
refs/heads/master
2022-11-28T22:24:44.947703
2020-08-13T14:52:19
2020-08-13T14:52:19
279,652,903
0
0
null
null
null
null
UTF-8
Python
false
false
3,679
py
""" Django settings for mahkalastore project. Generated by 'django-admin startproject' using Django 3.0.8. For more information on this file, see https://docs.djangoproject.com/en/3.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.0/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! from mahkalastore import secrets SECRET_KEY = secrets.seckey # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'product.apps.ProductConfig', 'mptt', 'ckeditor', 'home', 'user', 'order', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'mahkalastore.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'mahkalastore.wsgi.application' # Database # https://docs.djangoproject.com/en/3.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ STATIC_URL = '/static/' MEDIA_URL = '/uploads/' MEDIA_ROOT = os.path.join(BASE_DIR, 'uploads') #......... SITE_ID = 1 #################################### ## CKEDITOR CONFIGURATION ## #################################### CKEDITOR_JQUERY_URL = 'https://ajax.googleapis.com/ajax/libs/jquery/2.2.4/jquery.min.js' CKEDITOR_UPLOAD_PATH = 'images/' CKEDITOR_IMAGE_BACKEND = "pillow" CKEDITOR_CONFIGS = { 'default': { 'toolbar': None, }, } ###################################
[ "nimanoori000@gmail.com" ]
nimanoori000@gmail.com
5ef6346413ae628ba03088b2d210c9ae42298bd9
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03352/s946533946.py
12fad2670ffb01c55aa659302df6c57fe72d8080
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
173
py
X = int(input()) res = 1 for i in range(X): for p in range(2, X): if i ** p <= X: res = max(res, i ** p) else: break print(res)
[ "66529651+Aastha2104@users.noreply.github.com" ]
66529651+Aastha2104@users.noreply.github.com
5438414fe8e44e712ecbfe276212b0d08e32fd70
e3b9aa9b17ebb55e53dbc4fa9d1f49c3a56c6488
/samanage/komand_samanage/triggers/new_incidents/trigger.py
470860710f58f767235a820a10dff1bf13aa4beb
[ "MIT" ]
permissive
OSSSP/insightconnect-plugins
ab7c77f91c46bd66b10db9da1cd7571dfc048ab7
846758dab745170cf1a8c146211a8bea9592e8ff
refs/heads/master
2023-04-06T23:57:28.449617
2020-03-18T01:24:28
2020-03-18T01:24:28
248,185,529
1
0
MIT
2023-04-04T00:12:18
2020-03-18T09:14:53
null
UTF-8
Python
false
false
2,192
py
import komand import time from .schema import NewIncidentsInput, NewIncidentsOutput # Custom imports below class NewIncidents(komand.Trigger): def __init__(self): super(self.__class__, self).__init__( name='new_incidents', description='Check for new incidents', input=NewIncidentsInput(), output=NewIncidentsOutput()) def run(self, params={}): frequency = params.get('frequency', 10) cache_file_name = 'cached_incidents_ids' with komand.helper.open_cachefile(cache_file_name) as cache_file: self.logger.info( 'Found or created cache file: {}'.format(cache_file_name) ) cached_ids = {l.strip() for l in cache_file.readlines()} self.logger.info('Cached IDs: {}'.format(cached_ids)) while True: try: incidents = self.connection.api.list_incidents() new_ids = set() for incident in incidents: incident_id = str(incident['id']) if incident_id not in cached_ids: cached_ids.add(incident_id) new_ids.add(incident_id) self.logger.info( 'New incident found: {}'.format(incident_id) ) self.send({'incident': incident}) with komand.helper.open_cachefile( cache_file_name, append=True ) as cache_file: for incident_id in new_ids: self.logger.info( 'Writing incident {} to cache file'.format( incident_id ) ) cache_file.write(incident_id) time.sleep(frequency) except Exception as e: raise Exception( 'An error occurred while reading incidents: {}'.format(e) ) def test(self): self.connection.api.list_incidents() return {'incident': {}}
[ "jonschipp@gmail.com" ]
jonschipp@gmail.com
516fab03d860d4df846fb70176bd0d16d5007d2f
ddc6e402758c364d25ce9caeda7b3cd94dbcd546
/Medium/535_EncodeandDecodeTinyURL.py
2cda233163cdee5989cbaf60070c314a400fbc4d
[]
no_license
J-pcy/Jffery_Leetcode_Python
f01cdbb31a114bc6ed91139d0bd2cdddda35a503
f34c370fbb9fb171d5ec33337116a764c25cd2dd
refs/heads/master
2020-03-20T20:20:02.931776
2018-11-02T19:41:36
2018-11-02T19:41:36
137,682,076
0
0
null
null
null
null
UTF-8
Python
false
false
1,105
py
""" TinyURL is a URL shortening service where you enter a URL such as https://leetcode.com/problems/design-tinyurl and it returns a short URL such as http://tinyurl.com/4e9iAk. Design the encode and decode methods for the TinyURL service. There is no restriction on how your encode/decode algorithm should work. You just need to ensure that a URL can be encoded to a tiny URL and the tiny URL can be decoded to the original URL. """ class Codec: def __init__(self): self.index = 0 self.map = {} def encode(self, longUrl): """Encodes a URL to a shortened URL. :type longUrl: str :rtype: str """ self.index += 1 self.map[self.index] = longUrl return "http://tinyurl.com/" + str(self.index) def decode(self, shortUrl): """Decodes a shortened URL to its original URL. :type shortUrl: str :rtype: str """ return self.map[int(shortUrl.split('/')[-1])] # Your Codec object will be instantiated and called as such: # codec = Codec() # codec.decode(codec.encode(url))
[ "chenyupe@usc.edu" ]
chenyupe@usc.edu
d4aa33ef9af10d78de2c1ed7a52823bdcaaae7c9
31cf77b4c0342c6148b35ae2613d5e2501d5e755
/src/encoded/tests/fixtures/schemas/organism_development_series.py
1a85ac543daf46cc0080b5b5fe5361773c6e4447
[ "MIT" ]
permissive
ENCODE-DCC/encoded
096de8a6d60c959a783cc9517f1d60bd6c21b71f
80e05610c79b46d0890228555bb03e436b2fef11
refs/heads/dev
2023-08-08T15:45:07.493187
2023-08-03T20:01:24
2023-08-03T20:01:24
7,045,549
110
69
MIT
2023-09-12T23:59:45
2012-12-07T00:52:21
JavaScript
UTF-8
Python
false
false
405
py
import pytest @pytest.fixture def organism_development_series_17(testapp, lab, base_experiment_submitted, award): item = { 'award': award['uuid'], 'lab': lab['uuid'], 'related_datasets': [base_experiment_submitted['@id']], 'schema_version': '17', 'internal_tags': ['ENCYCLOPEDIAv3', 'ENCYCLOPEDIAv4', 'ENCYCLOPEDIAv5', 'ENCYCLOPEDIAv6'] } return item
[ "noreply@github.com" ]
ENCODE-DCC.noreply@github.com
c047613e9ed2f92e9bf284726158a743c18d970f
a5747577f1f4b38823f138ec0fbb34a0380cd673
/17/mc/ExoDiBosonResonances/EDBRTreeMaker/test/crab3_analysisnewnewST_s-channel_4f_leptonDecays.py
f1704df01dc11d1aad6690e6981b899f1585d06c
[]
no_license
xdlyu/fullRunII_ntuple
346fc1da4cec9da4c404aa1ec0bfdaece6df1526
aa00ca4ce15ae050c3096d7af779de44fc59141e
refs/heads/master
2020-08-03T07:52:29.544528
2020-01-22T14:18:12
2020-01-22T14:18:12
211,673,739
0
3
null
null
null
null
UTF-8
Python
false
false
2,396
py
from WMCore.Configuration import Configuration config = Configuration() config.section_("General") config.General.requestName = 'newnewST_s-channel_4f_leptonDecays' config.General.transferLogs = True config.section_("JobType") config.JobType.pluginName = 'Analysis' config.JobType.inputFiles = ['Fall17_17Nov2017_V8_MC_L1FastJet_AK4PFchs.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK4PFchs.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK4PFchs.txt','Fall17_17Nov2017_V8_MC_L1FastJet_AK8PFchs.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK8PFchs.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK8PFchs.txt','Fall17_17Nov2017_V8_MC_L1FastJet_AK8PFPuppi.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK8PFPuppi.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK8PFPuppi.txt','Fall17_17Nov2017_V8_MC_L1FastJet_AK4PFPuppi.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK4PFPuppi.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK4PFPuppi.txt'] #config.JobType.inputFiles = ['PHYS14_25_V2_All_L1FastJet_AK4PFchs.txt','PHYS14_25_V2_All_L2Relative_AK4PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK4PFchs.txt','PHYS14_25_V2_All_L1FastJet_AK8PFchs.txt','PHYS14_25_V2_All_L2Relative_AK8PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK8PFchs.txt'] # Name of the CMSSW configuration file #config.JobType.psetName = 'bkg_ana.py' config.JobType.psetName = 'analysis.py' #config.JobType.allowUndistributedCMSSW = True config.JobType.sendExternalFolder = True config.JobType.allowUndistributedCMSSW = True config.section_("Data") #config.Data.inputDataset = '/WJetsToLNu_13TeV-madgraph-pythia8-tauola/Phys14DR-PU20bx25_PHYS14_25_V1-v1/MINIAODSIM' config.Data.inputDataset = '/ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM' config.Data.inputDBS = 'global' #config.Data.inputDBS = 'phys03' config.Data.splitting = 'FileBased' config.Data.unitsPerJob =5 config.Data.totalUnits = -1 config.Data.publication = False name = 'idle' steam_dir = 'xulyu' config.Data.outLFNDirBase = '/store/group/dpg_trigger/comm_trigger/TriggerStudiesGroup/STEAM/' + steam_dir + '/' + name + '/' # This string is used to construct the output dataset name config.JobType.sendExternalFolder = True config.Data.outputDatasetTag = 'newnewST_s-channel_4f_leptonDecays' config.section_("Site") # Where the output files will be transmitted to config.Site.storageSite = 'T2_CH_CERN'
[ "XXX@cern.ch" ]
XXX@cern.ch
bcca40aea327335a5be27121afa6c83aad3e0049
3267fb38696d7b114a22f476f2c60425d6ee349a
/src/tests/test_api/test_auth_endpoint.py
b2aa2759373a6cb2b92f4a901b98debd44f51dfe
[]
no_license
marcinowski/github-adapter
c0092e3f817f9dc1d97691e81b1c247ae281b2c7
2d7c6b9601da082de246450cc840412f0c4331b5
refs/heads/master
2022-12-10T00:53:39.386198
2017-09-06T10:57:09
2017-09-06T10:57:09
100,716,960
0
0
null
2021-06-01T22:02:20
2017-08-18T13:55:02
Python
UTF-8
Python
false
false
1,805
py
""" :created on: 2017-08-22 :author: Marcin Muszynski :contact: marcinowski007@gmail.com """ from flask import session from unittest.mock import MagicMock from api.auth import AuthLogin, AuthLogout from ..generic import GenericTestCase from ..github_responses import AUTH_RESPONSE, AUTH_STATUS, ERROR_RESPONSE_401 class TestAuthResource(GenericTestCase): def test_login(self): """ Simple workflow test for fetching credentials from request """ with self.app.test_request_context('/', data={'username': 'test', 'password': 'test'}): username, password = AuthLogin()._get_credentials_from_request() self.assertEqual(username, 'test') def test_storing_credentials(self): """ Simple test for keeping credentials in session while authenticating """ a = AuthLogin() mock = MagicMock() mock.return_value = AUTH_RESPONSE, AUTH_STATUS a.fetch_from_github = mock with self.app.test_request_context('/', data={'username': 't_username', 'password': 'test'}): a.post() self.assertTrue(a.is_authenticated()) self.assertEqual(session['username'], 't_username') def test_nok_response(self): """ Error GitHub response handling """ a = AuthLogin() mock = MagicMock() mock.return_value = ERROR_RESPONSE_401, 401 a.fetch_from_github = mock resp, status_code = a.post() self.assertEqual(status_code, 401) def test_logout(self): """ Simple workflow test for logging out """ a = AuthLogout() with self.app.test_request_context('/'): session['authenticated'] = True a.get() self.assertFalse(a.is_authenticated()) self.assertFalse('username' in session)
[ "muszynskimarcin@wp.pl" ]
muszynskimarcin@wp.pl
af0406bc37dc6322179122bb2b34e46c5407bf26
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
/BZ4mMcEz3aqosEtbC_7.py
e3f9da38350d7784a4a4a0d20383b509a28ee3da
[]
no_license
daniel-reich/ubiquitous-fiesta
26e80f0082f8589e51d359ce7953117a3da7d38c
9af2700dbe59284f5697e612491499841a6c126f
refs/heads/master
2023-04-05T06:40:37.328213
2021-04-06T20:17:44
2021-04-06T20:17:44
355,318,759
0
0
null
null
null
null
UTF-8
Python
false
false
144
py
def mean(num): count = 0 str_num = str(num) for i in range(len(str_num)): count += int(str_num[i]) return int(count/len(str_num))
[ "daniel.reich@danielreichs-MacBook-Pro.local" ]
daniel.reich@danielreichs-MacBook-Pro.local
5e47baa390d8d2f117b9ae64bc39ef1cfc413d28
27acb207b21b4572561de4a5f7dfb9740318c0b8
/Python-Programming-Essentials/Week3/Ex10_W3_smaller_root.py
be67b001047ad7f7053dfbcba4458612dc558b08
[]
no_license
iamieht/intro-scripting-in-python-specialization
ee836ef05b62f6c74fe8da3ee137687b4d0035cf
8ea4f85f0ed3dcd541f89521c013335e9eb32980
refs/heads/master
2021-01-16T05:35:51.616276
2020-06-08T18:39:45
2020-06-08T18:39:45
242,993,577
0
0
null
null
null
null
UTF-8
Python
false
false
1,723
py
""" Compute the smaller root of a quadratic equation. """ ################################################### # Smaller quadratic root formula # Student should enter function on the next lines. def smaller_root(a, b, c): ''' returns the smaller solution to the quadratic equation if one exists ''' discriminant = b ** 2 - 4 * a * c smaller = 0 if discriminant < 0 or a == 0: print("Error: No real solutions") else: discriminant_sqrt = discriminant ** 0.5 if a > 0: smaller = - discriminant_sqrt else: smaller = discriminant_sqrt return (-b + smaller) / (2 * a) ################################################### # Tests # Student should not change this code. coeff_a, coeff_b, coeff_c = 1, 2, 3 print("The smaller root of " + str(coeff_a) + "x^2 + " + str(coeff_b) + "x + " + str(coeff_c) + " is: ") print(str(smaller_root(coeff_a, coeff_b, coeff_c))) coeff_a, coeff_b, coeff_c = 2, 0, -10 print("The smaller root of " + str(coeff_a) + "x^2 + " + str(coeff_b) + "x + " + str(coeff_c) + " is: ") print(str(smaller_root(coeff_a, coeff_b, coeff_c))) coeff_a, coeff_b, coeff_c = 6, -3, 5 print("The smaller root of " + str(coeff_a) + "x^2 + " + str(coeff_b) + "x + " + str(coeff_c) + " is: ") print(str(smaller_root(coeff_a, coeff_b, coeff_c))) ################################################### # Expected output # Student should look at the following comments and compare to printed output. #The smaller root of 1x^2 + 2x + 3 is: #Error: No real solutions #None #The smaller root of 2x^2 + 0x + -10 is: #-2.2360679775 #The smaller root of 6x^2 + -3x + 5 is: #Error: No real solutions #None
[ "iamieht@gmail.com" ]
iamieht@gmail.com
6783bf9ea4a5df08ea547298358e38c0ad0d7867
2ac13c73340e5f4126e1dc394cdca45e3b2b223b
/utils/time_now.py
76025e95be078f76beee9634e373b4a500d4b4d8
[]
no_license
EgbieAndersonUku1/price_alerter_app
36074fc32aedde1aee0524a271e98d3da18126d1
87e1d6ac05a19b0255c43003e957190f597d4655
refs/heads/master
2020-03-17T07:04:50.299396
2018-06-06T00:06:45
2018-06-06T00:06:45
133,375,739
0
0
null
null
null
null
UTF-8
Python
false
false
444
py
from datetime import datetime from datetime import timedelta def time_passed_since_current_time(minutes): """time_passed_since_current_time(int) -> returns time obj Returns the number of minutes that has elapsed between the current time and the passed in parameter minutes. """ return time_now() - timedelta(minutes=minutes) def time_now(): """Returns the current time object""" return datetime.utcnow()
[ "jayunderwood2011@hotmail.com" ]
jayunderwood2011@hotmail.com
773c3cc121a7313a3dce5a3dbaea4cc5f874ba89
29f6b4804f06b8aabccd56fd122b54e4d556c59a
/CodeAnalysis/SourceMeter_Interface/SourceMeter-8.2.0-x64-linux/Python/Demo/ceilometer/ceilometer/network/statistics/port.py
157c6686776787237e2dcaa610abe36336383467
[ "Apache-2.0" ]
permissive
obahy/Susereum
6ef6ae331c7c8f91d64177db97e0c344f62783fa
56e20c1777e0c938ac42bd8056f84af9e0b76e46
refs/heads/master
2020-03-27T11:52:28.424277
2018-12-12T02:53:47
2018-12-12T02:53:47
146,511,286
3
2
Apache-2.0
2018-12-05T01:34:17
2018-08-28T21:57:59
HTML
UTF-8
Python
false
false
2,937
py
# # Copyright 2014 NEC Corporation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer.network import statistics from ceilometer import sample class PortPollster(statistics._Base): meter_name = 'switch.port' meter_type = sample.TYPE_GAUGE meter_unit = 'port' class PortPollsterReceivePackets(statistics._Base): meter_name = 'switch.port.receive.packets' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterTransmitPackets(statistics._Base): meter_name = 'switch.port.transmit.packets' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterReceiveBytes(statistics._Base): meter_name = 'switch.port.receive.bytes' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'B' class PortPollsterTransmitBytes(statistics._Base): meter_name = 'switch.port.transmit.bytes' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'B' class PortPollsterReceiveDrops(statistics._Base): meter_name = 'switch.port.receive.drops' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterTransmitDrops(statistics._Base): meter_name = 'switch.port.transmit.drops' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterReceiveErrors(statistics._Base): meter_name = 'switch.port.receive.errors' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterTransmitErrors(statistics._Base): meter_name = 'switch.port.transmit.errors' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterReceiveFrameErrors(statistics._Base): meter_name = 'switch.port.receive.frame_error' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterReceiveOverrunErrors(statistics._Base): meter_name = 'switch.port.receive.overrun_error' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterReceiveCRCErrors(statistics._Base): meter_name = 'switch.port.receive.crc_error' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet' class PortPollsterCollisionCount(statistics._Base): meter_name = 'switch.port.collision.count' meter_type = sample.TYPE_CUMULATIVE meter_unit = 'packet'
[ "abelgomezr45@gmail.com" ]
abelgomezr45@gmail.com
0c8e9163f2888dad87748866da7f0d5bbc0f96f5
212d39dd0e12d42ce9b830de7e8738504dda2428
/ipc/fork2.py
121bd21a75495360d20399dd7b2d622483bf3c29
[ "CC0-1.0", "LicenseRef-scancode-public-domain" ]
permissive
waveform80/presentations
a0c7869f5acd699922f84ed1b510519c00472887
9e8d9f63d4e841e573d5b9b01c234128d49c29c5
refs/heads/master
2023-05-12T21:29:29.083191
2023-05-04T07:29:59
2023-05-04T07:29:59
21,940,196
0
0
null
null
null
null
UTF-8
Python
false
false
270
py
import io import os from time import sleep f = io.open('my_data', 'w+') pid = os.fork() if pid: print("I'm the master: sending data") f.write("hello") else: print("I'm the slave: waiting for data") sleep(1) f.seek(0) print("Received", f.read())
[ "dave@waveform.org.uk" ]
dave@waveform.org.uk
a641b6ef567ff86587e0d3453127a1d2d44f66f2
1dd687bdb3bb964383c3f4dde7e9eae8a09be5f5
/pyleecan/Methods/Machine/Machine/plot_anim_rotor.py
4524207e96adf3c8153a8d9be9ce90a1feec4933
[ "Apache-2.0" ]
permissive
Kelos-Zhu/pyleecan
4daa2c8738cfe8a721ac2bdf883c59a1b52d8570
368f8379688e31a6c26d2c1cd426f21dfbceff2a
refs/heads/master
2022-11-18T14:30:29.787005
2020-07-09T16:55:02
2020-07-09T16:55:02
278,112,321
0
0
Apache-2.0
2020-07-08T14:31:39
2020-07-08T14:31:38
null
UTF-8
Python
false
false
2,121
py
from numpy import pi import matplotlib.pyplot as plt import matplotlib.animation as animation from matplotlib.pyplot import axis, subplots from matplotlib.patches import Polygon, Circle def plot_anim_rotor(self, Nframe, Tanim, Nrot=1, is_loop=True): """Plot the machine with an animation of the rotor (inner rotor for now ?) Parameters ---------- self : Machine Machine object Nframe: int Number of frame for the animation Tanim : float Duration of the animation [ms] Nrot : float Number of rotation is_loop : bool True to activate the loop animation """ # Display fig, axes = subplots() axes.set_xlabel("(m)") axes.set_ylabel("(m)") axes.set_title("Machine") # Axis Setup axis("equal") # The Lamination is centered in the figure Lim = (self.stator.Rext) * 1.5 # Axes limit for plot Rsurf = self.rotor.build_geometry(sym=1, alpha=0, delta=0) # Rotation angle between each frame Dalpha = 2 * pi * Nrot / Nframe def init(): """Create the patches for the first image """ Spatches = self.stator.plot(is_display=False) Rpatches = self.rotor.plot(is_display=False) for patch in Spatches: axes.add_patch(patch) for patch in Rpatches: axes.add_patch(patch) return [] def update_rotor(ii): """Rotate and update the rotor patches """ for ii in range(len(Rsurf)): Rsurf[ii].rotate(Dalpha) patches = Rsurf[ii].get_patches() for patch in patches: if type(patch) is Polygon: axes.patches[-len(Rsurf) + ii].xy = patch.xy # elif type(patch) is Circle: # pass axes.set_xlim(-Lim, Lim) axes.set_ylim(-Lim, Lim) return [] # Animation definition anim = animation.FuncAnimation( fig, update_rotor, init_func=init, frames=Nframe, interval=Tanim / Nframe, blit=True, repeat=is_loop, ) plt.show()
[ "pierre.bonneel@gmail.com" ]
pierre.bonneel@gmail.com
318bb13a3661a89b40f876c3b2d72dfee56d1365
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_176/ch27_2019_03_15_04_30_11_986374.py
c83630adaec1d3021c42b98cdc52c9fc5978453f
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
173
py
f= int(input('quantos cigarros fuma por dia?')) a= int(input('há quantos anos fuma?')) print ('a quantidade de tempo perdido em dias é: {0:.2f}'.format(a*n))
[ "you@example.com" ]
you@example.com
0e2a2e57dcde388e4acef2b5ce123becfe52f7be
08ee36e0bb1c250f7f2dfda12c1a73d1984cd2bc
/src/mnistk/networks/linearbias_11.py
2dcc5515834c7e7014dcd4a2738d353cae809eab
[]
no_license
ahgamut/mnistk
58dadffad204602d425b18549e9b3d245dbf5486
19a661185e6d82996624fc6fcc03de7ad9213eb0
refs/heads/master
2021-11-04T07:36:07.394100
2021-10-27T18:37:12
2021-10-27T18:37:12
227,103,881
2
1
null
2020-02-19T22:07:24
2019-12-10T11:33:09
Python
UTF-8
Python
false
false
802
py
# -*- coding: utf-8 -*- """ linearbias_11.py :copyright: (c) 2019 by Gautham Venkatasubramanian. :license: MIT """ import torch from torch import nn class LinearBias_11(nn.Module): def __init__(self): nn.Module.__init__(self) self.f0 = nn.Linear(in_features=784, out_features=79, bias=True) self.f1 = nn.Linear(in_features=79, out_features=79, bias=True) self.f2 = nn.Linear(in_features=79, out_features=65, bias=True) self.f3 = nn.Linear(in_features=65, out_features=10, bias=True) self.f4 = nn.LogSoftmax(dim=1) def forward(self, *inputs): x = inputs[0] x = x.view(x.shape[0],784) x = self.f0(x) x = self.f1(x) x = self.f2(x) x = self.f3(x) x = self.f4(x) return x
[ "41098605+ahgamut@users.noreply.github.com" ]
41098605+ahgamut@users.noreply.github.com
f29d716cc9eccd1a742ce10c9deb5c7d563cfdf5
d210853ba6d1f3b5383a09e1b553c19083d78014
/server/cart/utils.py
c5c47f24bfe9d396766e798b67c8a78dc7232248
[]
no_license
Hagen013/presidentwatches
f252c7995e39f6cffb6608e43f555abc32f6a9fc
b9ca72aef1db01262675274c83a5c5dff4d6e2da
refs/heads/master
2022-12-17T08:45:15.541869
2019-12-29T17:48:56
2019-12-29T17:48:56
162,160,435
0
0
null
2022-12-08T01:49:45
2018-12-17T16:36:05
HTML
UTF-8
Python
false
false
680
py
from .models import Promocode from .constants import (PICKPOINT_TO_CDEK_STATUS_CODE_MAPPING, RUPOST_TO_CDEK_STATUS_CODE_MAPPING, RUPOST_TEXT_TO_STATUS_MAPPING) def get_promocode_by_brand(brand): return Promocode.objects.filter(brands__in=self.brand).order_by(sale_amount) def pickpoint_to_cdek_code(code): return PICKPOINT_TO_CDEK_STATUS_CODE_MAPPING.get(code, code) def rupost_to_cdek_code(code): cdek_code = RUPOST_TO_CDEK_STATUS_CODE_MAPPING.get(str(code), None) if cdek_code is None: return code return cdek_code def rupost_msg_to_code(msg): return RUPOST_TEXT_TO_STATUS_MAPPING.get(msg, 0)
[ "=" ]
=
0097f56446dc96fbe0812c302ca72860b5a374e9
6fbb1f5625aaa6381ec09e76e0f52fc48e2af9b0
/alternatingSort.py
ab89cf927f4447ebee51ce2c9a00680f8bec26e8
[]
no_license
hemal507/CS-Algorithms
273341383bb00d843d95fe26a07d47555c2d7728
154c21982485a1c9b94ed54df6e6e12be9c54f98
refs/heads/master
2021-01-01T20:02:32.734198
2020-05-24T12:06:51
2020-05-24T12:06:51
98,743,842
0
0
null
null
null
null
UTF-8
Python
false
false
323
py
def alternatingSort(a): if len(a) != len(set(a)) : return False b = [None]*(len(a)) for i in range(len(a)) : if i % 2 == 0 : b[i] = a[i/2] else : b[i] = a[-i/2] s = sorted(b) if b == s : return True else : return False
[ "noreply@github.com" ]
hemal507.noreply@github.com
6642866cf5ff1d95a5d9e5c2d5f549797601a25f
e75cb799000d3120d4a93a2826fe101228d1e1a7
/03-python practical examples/01a-beautiful soup - coreyms dot com - brief.py
ad181e5f815eca3b4abe9fc50e2cf30c6332ff6d
[]
no_license
atrox3d/python-corey-schafer-tutorials
89fb410f1e13fd6910dc42401782b1406ffb87e8
9d991e3dd9647adc55ae1f343fedfc3faa202b01
refs/heads/master
2022-12-14T17:55:01.566427
2022-09-02T15:48:20
2022-09-02T15:48:20
221,426,141
0
0
null
null
null
null
UTF-8
Python
false
false
2,154
py
from modules import utils import os.path from bs4 import BeautifulSoup import requests ######################################################################################################################## utils.banner( 'https://www.youtube.com/watch?v=ng2o98k983k', 'web scraping with beautiful soup', 'http://coreyms.com' ) request = requests.get('http://coreyms.com') # execute request print(f'request to http://coreyms.com: {request}') # print request http status source = request.text # extract request text (html) ######################################################################################################################## utils.banner('parse source with BeautifulSoup (prettify)') soup = BeautifulSoup(source, 'lxml') # parse html with BeautifulSoup via lxml ######################################################################################################################## for article in soup.find_all('article'): # find tag article inside soup obj headline = article.h2.a.text # navigate to link text headline2 = article.a.text # same, different syntax print(f'{headline2!r}') summary = article.find('div', class_='entry-content') # extract div from article obj print(f'{summary.p.text!r}') video = article.find('iframe') # get iframe inside article if video is not None: video_src = video['src'] # display video src video_url = video_src.split('?')[0] # get left part of url before ? videourl_parts = video_url.split('/') # get url tokens split by / video_id = videourl_parts[-1] # finally, get id yt_link = f'https://youtube.com/watch?v={video_id}' # create link print(yt_link) print()
[ "atrox3d@gmail.com" ]
atrox3d@gmail.com
c3f55696585346c21eb70a605ee293b6d94b03f0
109a3ed4e5355e0ba5ef9454ff46ee2acc58e013
/background/alert_handler.py
71da5910b10fe5608f511b2407091804a0870aea
[]
no_license
zhaobin022/monitor
9ff828b8316995a6cf930ae3643d25ff627d4bdb
fe5aacf49bf6961147030312a114986fda7f04ab
refs/heads/master
2020-07-10T21:04:24.024708
2016-09-08T03:35:46
2016-09-08T03:35:46
66,442,685
0
1
null
null
null
null
UTF-8
Python
false
false
6,896
py
__author__ = 'zhaobin022' from background import models import json import copy import time import operator class AlertHandler(object): def __init__(self,client_id,redis_obj,mq_conn): self.client_id = client_id self.redis_obj = redis_obj self.mq_conn = mq_conn self.mq_channel = self.mq_conn.channel() def get_host_triggers(self): triggers = [] host_obj = models.Host.objects.get(id=self.client_id) for t in host_obj.templates.select_related(): triggers.extend(t.triggers.select_related()) for g in host_obj.host_groups.select_related(): for t in g.templates.select_related(): triggers.extend(t.triggers.select_related()) return set(triggers) def load_data_from_redis(self,time_in_second,interval,redis_key): data_point_count = time_in_second/interval+5 redis_slice = self.redis_obj.lrange(redis_key,-data_point_count,-1) ret = [] redis_slice.reverse() for p in redis_slice: p = json.loads(p) update_time = p[1] if time.time() - update_time < time_in_second: ret.append(p) return ret def deal_expression(self,expression): time_range = expression.data_calc_args.split(',')[0] time_in_second = int(time_range) * 60 interval = expression.service.interval redis_key = "StatusData_%s_%s_latest" %(self.client_id,expression.service.name) data_set = self.load_data_from_redis(time_in_second,interval,redis_key) data_calc_func = getattr(self,'get_%s' % expression.data_calc_func) ret = data_calc_func(data_set,expression) return ret def get_avg(self,data_set,expression): temp_dic = {} if data_set: data_point = data_set[0] if 'data' not in data_point[0].keys(): ret_list = [] for p in data_set: val = p[0][expression.service_index.key] ret_list.append(float(val)) avg_num = sum(ret_list)/len(ret_list) f = getattr(operator,expression.operator_type) ret = [f(avg_num,expression.threshold),round(float(avg_num),2),None] return ret else: ret_dic = {} for key,val in data_point[0]['data'].items(): if key == expression.specified_index_key: for sub_key , sub_val in val.items(): if sub_key == expression.service_index.key: if not ret_dic.has_key(key): ret_dic[key] = {} if not ret_dic[key].has_key(sub_key): ret_dic[key][sub_key] = [] for p in data_set: data_point,time_stamp = p for key , val in data_point['data'].items(): if key == expression.specified_index_key: for sub_key , sub_val in val.items(): if sub_key == expression.service_index.key: ret_dic[key][sub_key].append(float(sub_val)) avg_num = sum(ret_dic[expression.specified_index_key][expression.service_index.key])/len(ret_dic[expression.specified_index_key][expression.service_index.key]) if hasattr(operator,expression.operator_type): func = getattr(operator,expression.operator_type) status = func(avg_num,expression.threshold) return [status,round(avg_num,2),expression.specified_index_key] def process(self): print 'in alert process ' triggers = self.get_host_triggers() for t in triggers: positive_expressions = [] expression_ret_str = '' redis_alert_key = 'host_%s_trigger_%s' %(self.client_id,t.id) alert_data_in_redis = self.redis_obj.get(redis_alert_key) redis_key_flag = False if alert_data_in_redis: redis_key_flag = True for expression in t.triggerexpression_set.select_related().order_by('id'): expression_ret = self.deal_expression(expression) if expression_ret: expression_ret_str += str(expression_ret[0]) if expression_ret[0]: expression_ret.insert(1,expression.service_index.key) expression_ret.insert(1,expression.data_calc_func) expression_ret.insert(1,expression.service.name) positive_expressions.append(expression_ret) if expression.logic_type: expression_ret_str += " "+expression.logic_type+" " notify_flag = eval(expression_ret_str) recover_data = '' if notify_flag: if redis_key_flag: notify_data = json.loads(alert_data_in_redis) else: notify_data = {} notify_data['client_id'] = self.client_id notify_data['trigger_id'] = t.id notify_data['trigger_name'] = t.name notify_data['status'] = True notify_data['notify_detail'] = positive_expressions self.redis_obj.set(redis_alert_key,json.dumps(notify_data)) print notify_data,'notify_data' self.mq_channel.queue_declare(queue='trigger_notify') self.mq_channel.basic_publish(exchange='', routing_key='trigger_notify', body=json.dumps(notify_data)) else: if redis_key_flag: # alert_data_in_redis = self.redis_obj.get(redis_alert_key) alert_data_in_redis = json.loads(alert_data_in_redis) alert_data_in_redis['status'] = False self.redis_obj.set(redis_alert_key,json.dumps(alert_data_in_redis)) self.mq_channel.queue_declare(queue='trigger_notify') self.mq_channel.basic_publish(exchange='', routing_key='trigger_notify', body=json.dumps(alert_data_in_redis)) # self.redis_obj.delete(redis_alert_key) alert_dic = {} alert_dic['client_id'] = self.client_id print 'alert_dic for host alive ..........................' self.mq_channel.queue_declare(queue='host_alive_notify') self.mq_channel.basic_publish(exchange='', routing_key='host_alive_notify', body=json.dumps(alert_dic))
[ "zhaobin022@qq.com" ]
zhaobin022@qq.com
00f11b662e6c46b1f0cc977d2fbb0f97ed91a934
45844683ca61f6f1a3c70d4a82d50ade067b9de7
/posts/migrations/0005_auto_20171210_0017.py
2de7bde5d668694a271554db585276150129e942
[]
no_license
PHironaka/bi-fun
5a8faf89d57ecb021eb4de9fcbb5f29bd7efd0fa
cb961678c938f0704c6c6127585c0a3c4044bbc8
refs/heads/master
2023-01-10T06:48:42.744321
2019-09-03T23:00:42
2019-09-03T23:00:42
112,222,207
0
0
null
2022-12-29T11:38:41
2017-11-27T16:45:57
HTML
UTF-8
Python
false
false
454
py
# -*- coding: utf-8 -*- # Generated by Django 1.9 on 2017-12-10 00:17 from __future__ import unicode_literals from django.db import migrations import markdownx.models class Migration(migrations.Migration): dependencies = [ ('posts', '0004_post_tags'), ] operations = [ migrations.AlterField( model_name='post', name='content', field=markdownx.models.MarkdownxField(), ), ]
[ "peterhironaka@mac.com" ]
peterhironaka@mac.com
58c2012eb70833208bfab5762221cf5e8d53507f
cac9c211a4eeb55cfd61d8e5c54a1d4082c4de33
/survol/sources_types/sqlserver/dsn/sqlserver_dsn_sessions.py
3077d7ca241faa34737ee30d6296b68d5e35fe70
[ "BSD-3-Clause" ]
permissive
vchateauneu/survol
8c8b5db67f81c6400c3e2f4b84b57fb83d69fb1f
2b5be9d28115f8f9b1dd91bf05449c92bf9a9926
refs/heads/master
2020-03-21T09:11:37.765314
2018-07-03T20:40:16
2018-07-03T20:40:16
138,387,051
1
0
null
2018-06-23T09:05:45
2018-06-23T09:05:45
null
UTF-8
Python
false
false
3,054
py
#!/usr/bin/python """ Sessions in SQL Server database (ODBC) """ import sys import lib_common from lib_properties import pc from sources_types.odbc import dsn as survol_odbc_dsn from sources_types.sqlserver import dsn as survol_sqlserver_dsn from sources_types.sqlserver import session try: import pyodbc except ImportError: lib_common.ErrorMessageHtml("pyodbc Python library not installed") def Main(): cgiEnv = lib_common.CgiEnv() grph = cgiEnv.GetGraph() dsnNam = survol_odbc_dsn.GetDsnNameFromCgi(cgiEnv) sys.stderr.write("dsn=(%s)\n" % dsnNam) nodeDsn = survol_sqlserver_dsn.MakeUri(dsnNam) ODBC_ConnectString = survol_odbc_dsn.MakeOdbcConnectionString(dsnNam) try: cnxn = pyodbc.connect(ODBC_ConnectString) sys.stderr.write("Connected: %s\n" % dsnNam) cursorSessions = cnxn.cursor() qrySessions = """ SELECT host_name,host_process_id,session_id,program_name,client_interface_name,original_login_name,nt_domain,nt_user_name FROM sys.dm_exec_sessions """ propSqlServerSession = lib_common.MakeProp("SqlServer session") propSqlServerHostProcess = lib_common.MakeProp("Host process") propSqlServerProgramName = lib_common.MakeProp("Program name") propSqlServerClientInterface = lib_common.MakeProp("Client Interface") propSqlServerOriginalLoginName = lib_common.MakeProp("original_login_name") propSqlServerNTDomain = lib_common.MakeProp("nt_domain") propSqlServerNTUserName = lib_common.MakeProp("nt_user_name") for rowSess in cursorSessions.execute(qrySessions): sys.stderr.write("rowSess.session_id=(%s)\n" % rowSess.session_id) nodeSession = session.MakeUri(dsnNam, rowSess.session_id) grph.add((nodeDsn, propSqlServerSession, nodeSession)) if rowSess.host_process_id: node_process = lib_common.RemoteBox(rowSess.host_name).PidUri(rowSess.host_process_id) grph.add((node_process, pc.property_pid, lib_common.NodeLiteral(rowSess.host_process_id))) grph.add((nodeSession, propSqlServerHostProcess, node_process)) if rowSess.program_name: grph.add((nodeSession, propSqlServerProgramName, lib_common.NodeLiteral(rowSess.program_name))) if rowSess.client_interface_name: grph.add((nodeSession, propSqlServerClientInterface, lib_common.NodeLiteral(rowSess.client_interface_name))) # TODO: Make nodes with these: if rowSess.original_login_name: grph.add((nodeSession, propSqlServerOriginalLoginName, lib_common.NodeLiteral(rowSess.original_login_name))) if rowSess.nt_domain: grph.add((nodeSession, propSqlServerNTDomain, lib_common.NodeLiteral(rowSess.nt_domain))) if rowSess.nt_user_name: grph.add((nodeSession, propSqlServerNTUserName, lib_common.NodeLiteral(rowSess.nt_user_name))) except Exception: exc = sys.exc_info()[0] lib_common.ErrorMessageHtml( "nodeDsn=%s Unexpected error:%s" % (dsnNam, str(sys.exc_info()))) # cgiEnv.OutCgiRdf() cgiEnv.OutCgiRdf("LAYOUT_RECT",[propSqlServerSession,propSqlServerHostProcess]) if __name__ == '__main__': Main() # http://www.easysoft.com/developer/languages/python/pyodbc.html
[ "remi.chateauneu@gmail.com" ]
remi.chateauneu@gmail.com
bfc091a392d75815015f901cd1b8fe44eb78dd3a
c4c159a21d2f1ea0d7dfaa965aeff01c8ef70dce
/flask/flaskenv/Lib/site-packages/tensorflow/contrib/distributions/python/ops/bijectors/ordered.py
acb356d50e500ad649f626bb57b53dd2254739bd
[]
no_license
AhsonAslam/webapi
54cf7466aac4685da1105f9fb84c686e38f92121
1b2bfa4614e7afdc57c9210b0674506ea70b20b5
refs/heads/master
2020-07-27T06:05:36.057953
2019-09-17T06:35:33
2019-09-17T06:35:33
208,895,450
0
0
null
null
null
null
UTF-8
Python
false
false
129
py
version https://git-lfs.github.com/spec/v1 oid sha256:6bf5b33839d1904f951bf6c90d6121fb8d3fc6788c52baa16e30a171aca54574 size 4965
[ "github@cuba12345" ]
github@cuba12345
3a9ec80a07429d3fb5044b350f7ded7d8eb73c1c
660b2e940ccee2b729aa7d00ef0453cdac9dbf6a
/student_and_information/student_and_information/settings.py
e11cea4a0923eda94711f365b8cd761791263ded
[]
no_license
iversongit/20180427
68c251b6acfef91252d7e622cffae8450dbcdb3f
9a9f1f1abaa3b620a000b2c8f5f91d8acfd77d8a
refs/heads/master
2020-03-13T17:51:55.044089
2018-04-27T13:30:31
2018-04-27T13:30:31
131,225,513
0
0
null
null
null
null
UTF-8
Python
false
false
3,391
py
""" Django settings for student_and_information project. Generated by 'django-admin startproject' using Django 1.11. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '(i)-_gb_!rp+*ul5&xjauxgs*)q#a142neptc4$$w()vnle3mn' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'student', 'information', 'uauth' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', # 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'student_and_information.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR,"templates")], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'student_and_information.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'HOST': 'localhost', 'NAME': 'student_and_information', 'USER': 'root', 'PASSWORD': '5201314', 'PORT': '3306' } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR,'static') ]
[ "1564329410@qq.com" ]
1564329410@qq.com
953e6c31eaa9ae2ad009a56edbd9f5e7f5e4d829
05ad6d839ba95001c8f861a5b1cd619eef8ae9b1
/tseo/Programmers/level2/42577_전화번호 목록-1.py
0686a079eb4905d2bdb2f715c4114ce89982efc4
[]
no_license
Raziel-JKM/ps_study
cdcacdabf14e3236af96d20276459e51a0c09100
07602a8af7e23ca3d406ee1db2a5deab01087268
refs/heads/master
2023-08-11T12:20:19.830183
2021-09-28T23:57:03
2021-09-28T23:57:03
null
0
0
null
null
null
null
UTF-8
Python
false
false
548
py
def solution(phone_book): phone_book.sort() # 일단 사전 순으로 정렬해준다 for i in range(len(phone_book) - 1): # 사전순으로 정렬했으니 바로 뒤에것만 체크하면 된다 if phone_book[i] == phone_book[i + 1][: len(phone_book[i])]: return False return True phone_book_1 = ["119", "97674223", "1195524421"] phone_book_2 = ["123", "456", "789"] phone_book_3 = ["12", "123", "1235", "567", "88"] print(solution(phone_book_1)) print(solution(phone_book_2)) print(solution(phone_book_3))
[ "t1won.seo@gmail.com" ]
t1won.seo@gmail.com
ed5c57419016c681c918b6ecaadad48709a3df39
c97ae1cc922a037484c5d4794d0a657561cf47f3
/migrations/versions/ec38b22c3a6d_init.py
35756ee88ac5529887eac689e3424ce5aef72002
[]
no_license
AlenAlic/clubpromoters
3059078b02b77745e7a1e49d998f9d24554082e8
f44b3b20c20d5669c1658036cea35fb9a4f223fc
refs/heads/master
2022-12-11T14:38:37.824769
2019-09-08T19:02:49
2019-09-08T19:02:49
190,430,315
0
0
null
2022-12-09T22:02:49
2019-06-05T16:29:25
JavaScript
UTF-8
Python
false
false
5,333
py
"""init Revision ID: ec38b22c3a6d Revises: Create Date: 2019-07-15 15:30:44.791994 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'ec38b22c3a6d' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('configuration', sa.Column('lock_id', sa.Integer(), nullable=False), sa.Column('mollie_api_key', sa.String(length=128), nullable=True), sa.PrimaryKeyConstraint('lock_id') ) op.create_table('party', sa.Column('party_id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=128), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('party_start_datetime', sa.DateTime(), nullable=True), sa.Column('party_end_datetime', sa.DateTime(), nullable=True), sa.Column('status', sa.String(length=128), nullable=False), sa.Column('num_available_tickets', sa.Integer(), nullable=False), sa.Column('ticket_price', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('party_id') ) op.create_index(op.f('ix_party_is_active'), 'party', ['is_active'], unique=False) op.create_index(op.f('ix_party_status'), 'party', ['status'], unique=False) op.create_index(op.f('ix_party_title'), 'party', ['title'], unique=False) op.create_table('users', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('reset_index', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=64), nullable=True), sa.Column('email', sa.String(length=128), nullable=True), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.Column('access', sa.Integer(), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('last_seen', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('user_id') ) op.create_index(op.f('ix_users_access'), 'users', ['access'], unique=False) op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) op.create_index(op.f('ix_users_is_active'), 'users', ['is_active'], unique=False) op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True) op.create_table('code', sa.Column('code_id', sa.Integer(), nullable=False), sa.Column('code', sa.String(length=8), nullable=False), sa.Column('active', sa.Boolean(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], ), sa.PrimaryKeyConstraint('code_id') ) op.create_table('purchase', sa.Column('purchase_id', sa.Integer(), nullable=False), sa.Column('price', sa.Integer(), nullable=False), sa.Column('status', sa.String(length=128), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('email', sa.String(length=128), nullable=False), sa.Column('hash', sa.String(length=160), nullable=False), sa.Column('mollie_payment_id', sa.String(length=128), nullable=False), sa.Column('purchase_datetime', sa.DateTime(), nullable=True), sa.Column('party_id', sa.Integer(), nullable=True), sa.Column('code_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['code_id'], ['code.code_id'], ), sa.ForeignKeyConstraint(['party_id'], ['party.party_id'], ), sa.PrimaryKeyConstraint('purchase_id') ) op.create_index(op.f('ix_purchase_email'), 'purchase', ['email'], unique=False) op.create_table('refund', sa.Column('refund_id', sa.Integer(), nullable=False), sa.Column('price', sa.Integer(), nullable=False), sa.Column('refund_datetime', sa.DateTime(), nullable=True), sa.Column('purchase_id', sa.Integer(), nullable=True), sa.Column('mollie_refund_id', sa.String(length=128), nullable=False), sa.ForeignKeyConstraint(['purchase_id'], ['purchase.purchase_id'], ), sa.PrimaryKeyConstraint('refund_id') ) op.create_table('ticket', sa.Column('ticket_id', sa.Integer(), nullable=False), sa.Column('used', sa.Boolean(), nullable=False), sa.Column('purchase_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['purchase_id'], ['purchase.purchase_id'], ), sa.PrimaryKeyConstraint('ticket_id') ) op.create_index(op.f('ix_ticket_used'), 'ticket', ['used'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_ticket_used'), table_name='ticket') op.drop_table('ticket') op.drop_table('refund') op.drop_index(op.f('ix_purchase_email'), table_name='purchase') op.drop_table('purchase') op.drop_table('code') op.drop_index(op.f('ix_users_username'), table_name='users') op.drop_index(op.f('ix_users_is_active'), table_name='users') op.drop_index(op.f('ix_users_email'), table_name='users') op.drop_index(op.f('ix_users_access'), table_name='users') op.drop_table('users') op.drop_index(op.f('ix_party_title'), table_name='party') op.drop_index(op.f('ix_party_status'), table_name='party') op.drop_index(op.f('ix_party_is_active'), table_name='party') op.drop_table('party') op.drop_table('configuration') # ### end Alembic commands ###
[ "aalic89@gmail.com" ]
aalic89@gmail.com
a2e3d46a76ecffe410096ef5678a5b238a26f353
fee03d6cfbea0803ce0bddb0beb9d447def2a59f
/crypten/mpc/primitives/beaver.py
21758b97dbfbcf97cc53cf87688751d72b088d06
[ "MIT" ]
permissive
QQ1230/CrypTen
548c83a57da8570aeb5f7072e2373e98a2302314
e11c8bfafee6b1d2ebdc43328c2fb487d48070e3
refs/heads/master
2023-06-25T21:30:20.988374
2021-07-26T16:24:06
2021-07-26T16:25:26
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,546
py
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import crypten import crypten.communicator as comm import torch from crypten.common.util import count_wraps class IgnoreEncodings: """Context Manager to ignore tensor encodings""" def __init__(self, list_of_tensors): self.list_of_tensors = list_of_tensors self.encodings_cache = [tensor.encoder.scale for tensor in list_of_tensors] def __enter__(self): for tensor in self.list_of_tensors: tensor.encoder._scale = 1 def __exit__(self, exc_type, exc_value, exc_traceback): for i, tensor in enumerate(self.list_of_tensors): tensor.encoder._scale = self.encodings_cache[i] def __beaver_protocol(op, x, y, *args, **kwargs): """Performs Beaver protocol for additively secret-shared tensors x and y 1. Obtain uniformly random sharings [a],[b] and [c] = [a * b] 2. Additively hide [x] and [y] with appropriately sized [a] and [b] 3. Open ([epsilon] = [x] - [a]) and ([delta] = [y] - [b]) 4. Return [z] = [c] + (epsilon * [b]) + ([a] * delta) + (epsilon * delta) """ assert op in { "mul", "matmul", "conv1d", "conv2d", "conv_transpose1d", "conv_transpose2d", } if x.device != y.device: raise ValueError(f"x lives on device {x.device} but y on device {y.device}") provider = crypten.mpc.get_default_provider() a, b, c = provider.generate_additive_triple( x.size(), y.size(), op, device=x.device, *args, **kwargs ) from .arithmetic import ArithmeticSharedTensor if crypten.mpc.config.active_security: """ Reference: "Multiparty Computation from Somewhat Homomorphic Encryption" Link: https://eprint.iacr.org/2011/535.pdf """ f, g, h = provider.generate_additive_triple( x.size(), y.size(), op, device=x.device, *args, **kwargs ) t = ArithmeticSharedTensor.PRSS(a.size(), device=x.device) t_plain_text = t.get_plain_text() rho = (t_plain_text * a - f).get_plain_text() sigma = (b - g).get_plain_text() triples_check = t_plain_text * c - h - sigma * f - rho * g - rho * sigma triples_check = triples_check.get_plain_text() if torch.any(triples_check != 0): raise ValueError("Beaver Triples verification failed!") # Vectorized reveal to reduce rounds of communication with IgnoreEncodings([a, b, x, y]): epsilon, delta = ArithmeticSharedTensor.reveal_batch([x - a, y - b]) # z = c + (a * delta) + (epsilon * b) + epsilon * delta c._tensor += getattr(torch, op)(epsilon, b._tensor, *args, **kwargs) c._tensor += getattr(torch, op)(a._tensor, delta, *args, **kwargs) c += getattr(torch, op)(epsilon, delta, *args, **kwargs) return c def mul(x, y): return __beaver_protocol("mul", x, y) def matmul(x, y): return __beaver_protocol("matmul", x, y) def conv1d(x, y, **kwargs): return __beaver_protocol("conv1d", x, y, **kwargs) def conv2d(x, y, **kwargs): return __beaver_protocol("conv2d", x, y, **kwargs) def conv_transpose1d(x, y, **kwargs): return __beaver_protocol("conv_transpose1d", x, y, **kwargs) def conv_transpose2d(x, y, **kwargs): return __beaver_protocol("conv_transpose2d", x, y, **kwargs) def square(x): """Computes the square of `x` for additively secret-shared tensor `x` 1. Obtain uniformly random sharings [r] and [r2] = [r * r] 2. Additively hide [x] with appropriately sized [r] 3. Open ([epsilon] = [x] - [r]) 4. Return z = [r2] + 2 * epsilon * [r] + epsilon ** 2 """ provider = crypten.mpc.get_default_provider() r, r2 = provider.square(x.size(), device=x.device) with IgnoreEncodings([x, r]): epsilon = (x - r).reveal() return r2 + 2 * r * epsilon + epsilon * epsilon def wraps(x): """Privately computes the number of wraparounds for a set a shares To do so, we note that: [theta_x] = theta_z + [beta_xr] - [theta_r] - [eta_xr] Where [theta_i] is the wraps for a variable i [beta_ij] is the differential wraps for variables i and j [eta_ij] is the plaintext wraps for variables i and j Note: Since [eta_xr] = 0 with probability 1 - |x| / Q for modulus Q, we can make the assumption that [eta_xr] = 0 with high probability. """ provider = crypten.mpc.get_default_provider() r, theta_r = provider.wrap_rng(x.size(), device=x.device) beta_xr = theta_r.clone() beta_xr._tensor = count_wraps([x._tensor, r._tensor]) with IgnoreEncodings([x, r]): z = x + r theta_z = comm.get().gather(z._tensor, 0) theta_x = beta_xr - theta_r # TODO: Incorporate eta_xr if x.rank == 0: theta_z = count_wraps(theta_z) theta_x._tensor += theta_z return theta_x def AND(x, y): """ Performs Beaver protocol for binary secret-shared tensors x and y 1. Obtain uniformly random sharings [a],[b] and [c] = [a & b] 2. XOR hide [x] and [y] with appropriately sized [a] and [b] 3. Open ([epsilon] = [x] ^ [a]) and ([delta] = [y] ^ [b]) 4. Return [c] ^ (epsilon & [b]) ^ ([a] & delta) ^ (epsilon & delta) """ from .binary import BinarySharedTensor provider = crypten.mpc.get_default_provider() a, b, c = provider.generate_binary_triple(x.size(), y.size(), device=x.device) # Stack to vectorize reveal eps_del = BinarySharedTensor.reveal_batch([x ^ a, y ^ b]) epsilon = eps_del[0] delta = eps_del[1] return (b & epsilon) ^ (a & delta) ^ (epsilon & delta) ^ c def B2A_single_bit(xB): """Converts a single-bit BinarySharedTensor xB into an ArithmeticSharedTensor. This is done by: 1. Generate ArithmeticSharedTensor [rA] and BinarySharedTensor =rB= with a common 1-bit value r. 2. Hide xB with rB and open xB ^ rB 3. If xB ^ rB = 0, then return [rA], otherwise return 1 - [rA] Note: This is an arithmetic xor of a single bit. """ if comm.get().get_world_size() < 2: from .arithmetic import ArithmeticSharedTensor return ArithmeticSharedTensor(xB._tensor, precision=0, src=0) provider = crypten.mpc.get_default_provider() rA, rB = provider.B2A_rng(xB.size(), device=xB.device) z = (xB ^ rB).reveal() rA = rA * (1 - 2 * z) + z return rA
[ "facebook-github-bot@users.noreply.github.com" ]
facebook-github-bot@users.noreply.github.com
25152339dccd4089309cb91e8af60d00c3605f34
e2e08d7c97398a42e6554f913ee27340226994d9
/pyautoTest-master(ICF-7.5.0)/test_case/scg_old/scg_Route/test_c37571.py
1158f49307441ecff7d1f09b3e73a6a8e5c50136
[]
no_license
lizhuoya1111/Automated_testing_practice
88e7be512e831d279324ad710946232377fb4c01
b3a532d33ddeb8d01fff315bcd59b451befdef23
refs/heads/master
2022-12-04T08:19:29.806445
2020-08-14T03:51:20
2020-08-14T03:51:20
287,426,498
0
0
null
null
null
null
UTF-8
Python
false
false
1,261
py
import pytest import time import sys from page_obj.scg.scg_def import * from page_obj.scg.scg_def_log import * from page_obj.common.rail import * from os.path import dirname, abspath from page_obj.scg.scg_def_multi_isp import * sys.path.insert(0, dirname(dirname(abspath(__file__)))) test_id = 37571 # ISP导入目的ip file文件名包含空格、$#&?> def test_route_wxw(browser): try: login_web(browser, url="10.2.2.82") add_multi_isp_save_wxw(browser, name='isp571', desc='miaoshu') import_ip_config_file_wxw(browser, name='isp571', save='yes', cancel='no', file='isp_37571.txt') time.sleep(1) alert = browser.find_element_by_xpath('//*[@id="box"]/div[3]/ul/li[2]').text # print(alert) del_multi_isp_byname(browser, name='isp571') try: assert "导入IP格式错误" in alert rail_pass(test_run_id, test_id) except: rail_fail(test_run_id, test_id) assert "导入IP格式错误" in alert except Exception as err: # 如果上面的步骤有报错,重新设备,恢复配置 reload(hostip="10.2.2.82") print(err) rail_fail(test_run_id, test_id) time.sleep(70) assert False if __name__ == '__main__': pytest.main(["-v", "-s", "test_c"+str(test_id)+".py"])
[ "15501866985@163.com" ]
15501866985@163.com
e29a6774ea3bcc2d0740acaac977a47a5a39885f
f2dde3b0cc30ebce0210fd2e69a6ee0d91274d6f
/semana_09/aula_01/ascii.py
b9988602b70cd063262fb133fcd23786f0ce98b6
[]
no_license
valeriacavalcanti/IP-2021.1
26ecbe187b32d067666e732f7dd918375e6152f8
39da35eaceec0a34cccfcc1731ffa9b94b5231f7
refs/heads/main
2023-08-06T16:00:09.393942
2021-09-27T14:16:07
2021-09-27T14:16:07
383,632,386
0
1
null
null
null
null
UTF-8
Python
false
false
226
py
print('Símbolos numéricos') for i in range(48, 58): print(i, chr(i)) print('Alfabeto maiúsculo') for i in range(65, 91): print(i, chr(i)) print('Alfabeto minúsculo') for i in range(97, 123): print(i, chr(i))
[ "valeria.cavalcanti@ifpb.edu.br" ]
valeria.cavalcanti@ifpb.edu.br
b8d21479535f1a8d006151ded11a93eee5587ff8
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2221/60720/297812.py
8a81748de60eb70d69e1f8fa57deb946c91feb4c
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
530
py
lst=input().split() n=int(lst[0]) m=int(lst[1]) gra=[[0 for i in range(n)]for j in range(n)] for i in range(m): list0=input().split() list0=list(map(int,list0)) gra[list0[0]-1][list0[1]-1]=1 for k in range(n): for i in range(n): for j in range(n): if gra[i][k]==1 and gra[k][j]==1: gra[i][j]=1 count=0 for i in range(n): isF=False for j in range(n): if gra[j][i]==0 and j!=i: isF=True break if not isF: count+=1 print(count)
[ "1069583789@qq.com" ]
1069583789@qq.com
84079750838c6620e35b81b29ccde5c2c4f1b3cd
0f9ffc69d45fdbfcb2cfac7b674cf2260efaf11a
/prediction/src/tests/test_crop_patches.py
24b09006b80edb7d42073d5ab7a2323a388e951c
[ "LicenseRef-scancode-generic-cla", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
aslam/concept-to-clinic
5835a8f6edb0abcd6aa700ac94878dab804f76ee
b69a6631ad007c5eca5280169c1db96444fd39ff
refs/heads/master
2021-07-16T22:42:47.041110
2017-10-23T21:27:41
2017-10-24T12:36:14
106,923,326
0
1
null
2017-10-14T11:45:02
2017-10-14T11:45:01
null
UTF-8
Python
false
false
656
py
import pytest from src.preprocess import load_ct, crop_patches @pytest.fixture def ct_path(): return '../images/LUNA-0001/'\ + '1.3.6.1.4.1.14519.5.2.1.6279.6001.102133688497886810253331438797' def test_patches_from_ct(ct_path): centroids = [[556, 101, -70], [556, 121, -20], [556, 221, -77]] centroids = [{'z': centroid[0], 'y': centroid[1], 'x': centroid[2]} for centroid in centroids] patches = crop_patches.patches_from_ct(*load_ct.load_ct(ct_path), patch_shape=12, centroids=centroids) assert isinstance(patches, list) assert len(patches) == 3 assert all([patch.shape == (12, 12, 12) for patch in patches])
[ "chris@chris-lamb.co.uk" ]
chris@chris-lamb.co.uk
6a07c28a16fa5a2def63726f6197bdc76895054f
46544b5f01eed38d69be41aabe83c6d6089cad52
/classifier_system/model_training/BERT/bert3.py
64e889dc04f05420ecf77958a43d8933b265a129
[]
no_license
eirikdahlen/MSc-Computer-Science-2021
042a7c5a5b9bb19567ca301b427c872a209c25ee
bbb1264bbc3305b1357772f4e434ff987ad2c919
refs/heads/main
2023-05-15T01:47:13.907667
2021-06-05T09:05:32
2021-06-05T09:05:32
374,064,083
0
0
null
null
null
null
UTF-8
Python
false
false
6,894
py
import pandas as pd import numpy as np import time from matplotlib import pyplot import tensorflow as tf from transformers import BertTokenizerFast, TFBertForSequenceClassification from sklearn.model_selection import train_test_split from sklearn.metrics import classification_report import os def load_and_setup_dataset(filename: str): df = pd.read_csv(filename) return df['text'].values.tolist(), df["label"].values.tolist() def create_test_val_dataset(X, y, train_size: float, test_size=None, random_state: int = 42): X_train, X_val, y_train, y_val = train_test_split(X, y, train_size=train_size, test_size=test_size, random_state=random_state, stratify=y) return X_train, X_val, y_train, y_val def to_categorical_labels(y_train, y_val, y_test, binary: bool = False): labels_dict = {'unrelated': 0, 'proED': 1, 'prorecovery': 0 if binary else 2} for i in range(len(y_train)): y_train[i] = labels_dict[y_train[i]] for i in range(len(y_val)): y_val[i] = labels_dict[y_val[i]] for i in range(len(y_test)): y_test[i] = labels_dict[y_test[i]] return np.array(y_train), np.array(y_val), np.array(y_test) def tokenize(tokenizer, X_train, X_val, X_test, truncation: bool = True, padding: bool = True): train_encodings = tokenizer(X_train, truncation=truncation, padding=padding) val_encodings = tokenizer(X_val, truncation=truncation, padding=padding) test_encodings = tokenizer(X_test, truncation=truncation, padding=padding) train_encodings = np.array(list(dict(train_encodings).values())) val_encodings = np.array(list(dict(val_encodings).values())) test_encodings = np.array(list(dict(test_encodings).values())) return train_encodings, val_encodings, test_encodings def train_model(model, train_encodings, y_train, val_encodings, y_val, batch_size: int, learning_rate: float, epochs: int, checkpoint_path: str, save_model_weights: bool = True): if save_model_weights: cp_callback = tf.keras.callbacks.ModelCheckpoint( filepath=checkpoint_path, verbose=1, save_weights_only=True) optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate) model.compile(optimizer=optimizer, loss=model.compute_loss, metrics=['accuracy']) history = model.fit( x=train_encodings[0], y=y_train, validation_data=(val_encodings[0], y_val), epochs=epochs, batch_size=batch_size, callbacks=[cp_callback] if save_model_weights else None ) print(history.history) return model, history def load_weights(model, cp_dir: str): latest = tf.train.latest_checkpoint(cp_dir) model.load_weights(latest) return model def predict(model, test_data, test_labels, softmax: bool = False): print("Performing predictions...") logits = model.predict(test_data[0])["logits"] if softmax: predictions_probabilities = tf.nn.softmax(logits, axis=1) print(predictions_probabilities) classes = np.argmax(logits, axis=-1) score = classification_report(test_labels, classes, digits=3) print(score) return predictions_probabilities if softmax else logits def plot_stats(history, should_show=True): # plot loss during training pyplot.figure(1) pyplot.title('Loss') pyplot.plot(history.history['loss'], label='train') pyplot.plot(history.history['val_loss'], label='validation') pyplot.legend() pyplot.savefig('loss_bert3.png') # plot accuracy during training pyplot.figure(2) pyplot.title('Accuracy') pyplot.plot(history.history['accuracy'], label='train') pyplot.plot(history.history['val_accuracy'], label='validation') pyplot.legend() pyplot.savefig('acc_bert3.png') if should_show: pyplot.show() def main(args): start = time.time() use_idun = args.idun load_model = args.loadmodel training_data_path = '/cluster/home/eirida/masteroppgave/Masteroppgave/data/dataset_training.csv' if use_idun else '../../data/dataset_training.csv' test_data_path = '/cluster/home/eirida/masteroppgave/Masteroppgave/data/dataset_test.csv' if use_idun else '../../data/dataset_test.csv' X, y = load_and_setup_dataset(training_data_path) X_test, y_test = load_and_setup_dataset(test_data_path) X_train, X_val, y_train, y_val = create_test_val_dataset(X, y, train_size=0.95) y_train, y_val, y_test = to_categorical_labels(y_train, y_val, y_test) train_encodings, val_encodings, test_encodings = tokenize( tokenizer=BertTokenizerFast.from_pretrained('bert-base-uncased'), X_train=X_train, X_val=X_val, X_test=X_test ) model = TFBertForSequenceClassification.from_pretrained('bert-base-uncased', num_labels=3, return_dict=True) checkpoint_path = "bert3_ckpt/cp-{epoch:04d}.ckpt" checkpoint_dir = os.path.dirname(checkpoint_path) if not load_model: trained_model, history = train_model(model=model, train_encodings=train_encodings, y_train=y_train, val_encodings=val_encodings, y_val=y_val, batch_size=16, learning_rate=2e-5, epochs=4, checkpoint_path=checkpoint_path, save_model_weights=True) plot_stats(history, should_show=not use_idun) trained_model.summary() else: trained_model = load_weights(model=model, cp_dir=checkpoint_dir) predictions = predict(model=trained_model, test_data=test_encodings, test_labels=y_test, softmax=False) print(f"Used {time.time() - start} seconds") if __name__ == "__main__": print("Starting run...") os.environ['TF_XLA_FLAGS'] = '--tf_xla_enable_xla_devices' print(f"Number of GPUs Avail: {len(tf.config.list_physical_devices('GPU'))}") print(f"GPU Name: {tf.test.gpu_device_name()}") print(f"Cuda: {tf.test.is_built_with_cuda()}") from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument("--idun", default=False, type=bool) parser.add_argument("--loadmodel", default=False, type=bool) args = parser.parse_args() main(args)
[ "dahleneirik@gmail.com" ]
dahleneirik@gmail.com
293d7a125b5c351ae3078beeb9539e8d7d5fcc6c
551b75f52d28c0b5c8944d808a361470e2602654
/huaweicloud-sdk-cce/huaweicloudsdkcce/v3/model/update_node_response.py
7b48171cdbec154f61e09ae2d0a43a66df3320fe
[ "Apache-2.0" ]
permissive
wuchen-huawei/huaweicloud-sdk-python-v3
9d6597ce8ab666a9a297b3d936aeb85c55cf5877
3683d703f4320edb2b8516f36f16d485cff08fc2
refs/heads/master
2023-05-08T21:32:31.920300
2021-05-26T08:54:18
2021-05-26T08:54:18
370,898,764
0
0
NOASSERTION
2021-05-26T03:50:07
2021-05-26T03:50:07
null
UTF-8
Python
false
false
5,575
py
# coding: utf-8 import pprint import re import six from huaweicloudsdkcore.sdk_response import SdkResponse class UpdateNodeResponse(SdkResponse): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ sensitive_list = [] openapi_types = { 'kind': 'str', 'api_version': 'str', 'metadata': 'NodeMetadata', 'spec': 'V3NodeSpec', 'status': 'V3NodeStatus' } attribute_map = { 'kind': 'kind', 'api_version': 'apiVersion', 'metadata': 'metadata', 'spec': 'spec', 'status': 'status' } def __init__(self, kind=None, api_version=None, metadata=None, spec=None, status=None): """UpdateNodeResponse - a model defined in huaweicloud sdk""" super(UpdateNodeResponse, self).__init__() self._kind = None self._api_version = None self._metadata = None self._spec = None self._status = None self.discriminator = None if kind is not None: self.kind = kind if api_version is not None: self.api_version = api_version if metadata is not None: self.metadata = metadata if spec is not None: self.spec = spec if status is not None: self.status = status @property def kind(self): """Gets the kind of this UpdateNodeResponse. API类型,固定值“Node”,该值不可修改。 :return: The kind of this UpdateNodeResponse. :rtype: str """ return self._kind @kind.setter def kind(self, kind): """Sets the kind of this UpdateNodeResponse. API类型,固定值“Node”,该值不可修改。 :param kind: The kind of this UpdateNodeResponse. :type: str """ self._kind = kind @property def api_version(self): """Gets the api_version of this UpdateNodeResponse. API版本,固定值“v3”,该值不可修改。 :return: The api_version of this UpdateNodeResponse. :rtype: str """ return self._api_version @api_version.setter def api_version(self, api_version): """Sets the api_version of this UpdateNodeResponse. API版本,固定值“v3”,该值不可修改。 :param api_version: The api_version of this UpdateNodeResponse. :type: str """ self._api_version = api_version @property def metadata(self): """Gets the metadata of this UpdateNodeResponse. :return: The metadata of this UpdateNodeResponse. :rtype: NodeMetadata """ return self._metadata @metadata.setter def metadata(self, metadata): """Sets the metadata of this UpdateNodeResponse. :param metadata: The metadata of this UpdateNodeResponse. :type: NodeMetadata """ self._metadata = metadata @property def spec(self): """Gets the spec of this UpdateNodeResponse. :return: The spec of this UpdateNodeResponse. :rtype: V3NodeSpec """ return self._spec @spec.setter def spec(self, spec): """Sets the spec of this UpdateNodeResponse. :param spec: The spec of this UpdateNodeResponse. :type: V3NodeSpec """ self._spec = spec @property def status(self): """Gets the status of this UpdateNodeResponse. :return: The status of this UpdateNodeResponse. :rtype: V3NodeStatus """ return self._status @status.setter def status(self, status): """Sets the status of this UpdateNodeResponse. :param status: The status of this UpdateNodeResponse. :type: V3NodeStatus """ self._status = status def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, UpdateNodeResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
[ "hwcloudsdk@huawei.com" ]
hwcloudsdk@huawei.com
1125c5f8f204bacc054f119057d2ae918dd56e1c
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2686/61132/297976.py
adbfd0fc47f662b3b54e6cb919a6f0d04f66126e
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
326
py
def mmgp(l,k_2): if k_2==0 or not l:return 0 ans=[] for i in range(len(l)): ans.append((l[i] if k_2%2==1 else -l[i])+max(0,mmgp(l[i+1:],k_2-1))) return max(0,max(ans)) t = int(input()) for j in range(t): k=int(input()) m=int(input()) l=list(map(int,input().split())) print(mmgp(l,k*2))
[ "1069583789@qq.com" ]
1069583789@qq.com
7ff3024f8e259f0946690e895e06a842fa367d9c
898f547bbeb7d1da27bc40e2d594a363c0d1a75a
/Advert of code 2020/day_15_rambunctious_recitation_2.py
d2571e70b1d689a3e7d57b3c97e366efd177ca3b
[]
no_license
TerryLun/Code-Playground
4e069e28c457309329f003ea249be83d7578a4a3
708ad69594cf5b9edc9ff1189716cad70916574c
refs/heads/master
2023-06-20T14:03:43.924472
2021-07-23T05:27:48
2021-07-23T05:27:48
237,375,279
0
0
null
null
null
null
UTF-8
Python
false
false
336
py
nums = [6, 13, 1, 15, 2, 0] end_turn = 30000000 num_dict = {nums[i]: i for i in range(len(nums))} num_dict.pop(nums[-1]) i = len(nums) while i != end_turn: num = nums[-1] if num not in num_dict: nums.append(0) else: nums.append(i - 1 - num_dict[num]) num_dict[num] = i - 1 i += 1 print(nums[-1])
[ "tianweilun@yahoo.com" ]
tianweilun@yahoo.com
53e678355a32e3cece4761413ca5260da6ca75a9
ac0894b411507bfd027696b6bf11b5e384ed68fc
/need-to-do/python3------download-problem--of--leetcode/740.delete-and-earn.py
cd2f4072af4ce906fcecd6fd076b6305f25f9559
[]
no_license
mkzpd/leetcode-solution
1d19554628c34c74012fa52582c225e6dccb345c
60c9b218683bcdee86477a910c58ec702185c726
refs/heads/master
2020-05-31T05:56:48.985529
2019-09-20T09:10:49
2019-09-20T09:10:49
190,128,627
0
0
null
null
null
null
UTF-8
Python
false
false
1,265
py
# # @lc app=leetcode id=740 lang=python3 # # [740] Delete and Earn # # https://leetcode.com/problems/delete-and-earn/description/ # # algorithms # Medium (46.66%) # Total Accepted: 25.3K # Total Submissions: 54.3K # Testcase Example: '[3,4,2]' # # Given an array nums of integers, you can perform operations on the array. # # In each operation, you pick any nums[i] and delete it to earn nums[i] points. # After, you must delete every element equal to nums[i] - 1 or nums[i] + 1. # # You start with 0 points. Return the maximum number of points you can earn by # applying such operations. # # Example 1: # # # Input: nums = [3, 4, 2] # Output: 6 # Explanation: # Delete 4 to earn 4 points, consequently 3 is also deleted. # Then, delete 2 to earn 2 points. 6 total points are earned. # # # # # Example 2: # # # Input: nums = [2, 2, 3, 3, 3, 4] # Output: 9 # Explanation: # Delete 3 to earn 3 points, deleting both 2's and the 4. # Then, delete 3 again to earn 3 points, and 3 again to earn 3 points. # 9 total points are earned. # # # # # Note: # # # The length of nums is at most 20000. # Each element nums[i] is an integer in the range [1, 10000]. # # # # # class Solution: def deleteAndEarn(self, nums: List[int]) -> int:
[ "sodgso262@gmail.com" ]
sodgso262@gmail.com
7b3b70fb89631556f950e1f7dfb7b72bcd1be568
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/46/usersdata/112/17793/submittedfiles/funcoes1.py
3d6b39846fd6d1dbca5db4e88c86720de38cd906
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
1,287
py
# -*- coding: utf-8 -*- from __future__ import division n=input('Digite a quantidade de números das listas:') a=[] b=[] c=[] for i in range (0,n,1): a.append(input('Digite um valor da lista A:')) def crescente(lista): p=0 cont=0 for i in range (0,len(lista)-1,1): if lista[i]<lista[i+1]: cont=cont+1 if lista[i]==lista[i+1]: p=p+1 if cont>0 and p==0: return True else: return False def decrescente(lista): d=0 cont2=0 for i in range (0,len(lista)-1,1): if lista[i]>lista[i+1]: cont2=cont2+1 if lista[i]==lista[i+1]: d=d+1 if cont2>0 and d==0: return True else: return False def elementosiguais(lista): cont3=0 for i in range (0,len(lista)-1,1): if lista[i]==lista[i+1] or lista[i]==lista[i-1]: cont3=cont3+1 if cont3>0: return True else: return False if crescente(a): print 'S' else: print 'N' if decrescente(a): print 'S' else: print 'N' if elementosiguais(a): print 'S' else: print 'N'
[ "rafael.mota@ufca.edu.br" ]
rafael.mota@ufca.edu.br
f9b6c72db9ba6a1cccfb7d004703079c2ee42d1d
2c5073c0140b3366b94866d50f8b975c926a529b
/venv/lib/python3.9/site-packages/mediapipe/calculators/core/concatenate_vector_calculator_pb2.py
6b016b1af3cf664281fd9ea73b2cb9b9b1874a9c
[]
no_license
geekboi777/Volumegesture
435c2752d107ac6915919e79bcb63fb0b85f6e9e
3cc35f74533e26588a606154897f9ded4801f0ce
refs/heads/master
2023-06-24T19:09:07.138900
2021-07-30T23:22:18
2021-07-30T23:22:18
390,512,309
1
0
null
null
null
null
UTF-8
Python
false
true
3,970
py
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mediapipe/calculators/core/concatenate_vector_calculator.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from mediapipe.framework import calculator_pb2 as mediapipe_dot_framework_dot_calculator__pb2 try: mediapipe_dot_framework_dot_calculator__options__pb2 = mediapipe_dot_framework_dot_calculator__pb2.mediapipe_dot_framework_dot_calculator__options__pb2 except AttributeError: mediapipe_dot_framework_dot_calculator__options__pb2 = mediapipe_dot_framework_dot_calculator__pb2.mediapipe.framework.calculator_options_pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='mediapipe/calculators/core/concatenate_vector_calculator.proto', package='mediapipe', syntax='proto2', serialized_options=b'\242\002\tMediaPipe', create_key=_descriptor._internal_create_key, serialized_pb=b'\n>mediapipe/calculators/core/concatenate_vector_calculator.proto\x12\tmediapipe\x1a$mediapipe/framework/calculator.proto\"\xaa\x01\n\"ConcatenateVectorCalculatorOptions\x12\'\n\x18only_emit_if_all_present\x18\x01 \x01(\x08:\x05\x66\x61lse2[\n\x03\x65xt\x12\x1c.mediapipe.CalculatorOptions\x18\xcf\xb1\xd8{ \x01(\x0b\x32-.mediapipe.ConcatenateVectorCalculatorOptionsB\x0c\xa2\x02\tMediaPipe' , dependencies=[mediapipe_dot_framework_dot_calculator__pb2.DESCRIPTOR,]) _CONCATENATEVECTORCALCULATOROPTIONS = _descriptor.Descriptor( name='ConcatenateVectorCalculatorOptions', full_name='mediapipe.ConcatenateVectorCalculatorOptions', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='only_emit_if_all_present', full_name='mediapipe.ConcatenateVectorCalculatorOptions.only_emit_if_all_present', index=0, number=1, type=8, cpp_type=7, label=1, has_default_value=True, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ _descriptor.FieldDescriptor( name='ext', full_name='mediapipe.ConcatenateVectorCalculatorOptions.ext', index=0, number=259397839, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=116, serialized_end=286, ) DESCRIPTOR.message_types_by_name['ConcatenateVectorCalculatorOptions'] = _CONCATENATEVECTORCALCULATOROPTIONS _sym_db.RegisterFileDescriptor(DESCRIPTOR) ConcatenateVectorCalculatorOptions = _reflection.GeneratedProtocolMessageType('ConcatenateVectorCalculatorOptions', (_message.Message,), { 'DESCRIPTOR' : _CONCATENATEVECTORCALCULATOROPTIONS, '__module__' : 'mediapipe.calculators.core.concatenate_vector_calculator_pb2' # @@protoc_insertion_point(class_scope:mediapipe.ConcatenateVectorCalculatorOptions) }) _sym_db.RegisterMessage(ConcatenateVectorCalculatorOptions) _CONCATENATEVECTORCALCULATOROPTIONS.extensions_by_name['ext'].message_type = _CONCATENATEVECTORCALCULATOROPTIONS mediapipe_dot_framework_dot_calculator__options__pb2.CalculatorOptions.RegisterExtension(_CONCATENATEVECTORCALCULATOROPTIONS.extensions_by_name['ext']) DESCRIPTOR._options = None # @@protoc_insertion_point(module_scope)
[ "geekboi777@github.com" ]
geekboi777@github.com
339cec3cf7dc028d22cebf879426d42bccc5791f
b49a162f6a3d9983d0254f4154a1542774800654
/dreamrs/dreamrs/urls.py
dfa7198e2a674b3fa2074c07bd35eee300bbcaa8
[]
no_license
franckeric96/dreamrs_template
16b254b61938a4acd11e54cdeb98b0a90f9e24e2
762999ee73ee9cdea8b3058f417c5aa22de467b0
refs/heads/master
2022-10-16T23:32:11.307371
2020-06-16T19:41:57
2020-06-16T19:41:57
272,797,078
0
0
null
null
null
null
UTF-8
Python
false
false
1,156
py
"""dreamrs URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.2/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path, include from django.conf import settings from django.conf.urls.static import static urlpatterns = [ path('admin/', admin.site.urls), path('', include('website.urls')), path('blog/', include('blog.urls')), path('dreamr/', include('dreamr.urls')) ] if settings.DEBUG : urlpatterns += static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT) urlpatterns += static(settings.STATIC_URL, document_root = settings.STATIC_ROOT)
[ "franck@gmail.com" ]
franck@gmail.com
ccf7a8892ce2951ca0d34da4186b761f109b75ee
57d67ed3f24279e3da746192a7852870afb8b726
/utils/writeLog.py
604fe5cac8e97b66927a597d382ecc70f88ebc04
[]
no_license
Gaoyang0/BMS
febbdb457a8bc03620069d41d0a9d787d2b00f2f
d4563f41d68a51c1b81eb3d84fc91dcc8d363d69
refs/heads/master
2020-03-22T08:38:26.086386
2018-07-05T01:31:50
2018-07-05T01:31:50
139,780,199
0
0
null
null
null
null
UTF-8
Python
false
false
336
py
# -*- coding:utf-8 -*- # Author:DaoYang def log(str, type): if type == 'book': f = open('log/books.log', 'a', encoding='utf-8') f.write('\n'+str) f.close() elif type == 'user': f = open('log/users.log', 'a', encoding='utf-8') f.write('\n' + str) f.close() else: pass
[ "=" ]
=
b3e881b1e0cd83e2b37ae3ce8706c5822dec27c8
624155f764a54b78a73c9e830a6b27cd9945a3e9
/selling1/selling1/doctype/quotation_info/quotation_info.py
c841d16a1b6dbc5387c3576704de3e708d3d261a
[]
no_license
reddymeghraj/selling1
f94400c9b3fa4c65fdcbae33fbea7f9dbad66dfa
5a8154699ed824a71948f927ae4ae026854dea3c
refs/heads/master
2020-05-30T10:24:42.376586
2015-04-18T06:12:39
2015-04-18T06:12:39
33,288,830
0
0
null
null
null
null
UTF-8
Python
false
false
232
py
# Copyright (c) 2013, Wayzon and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document class QuotationInfo(Document): pass
[ "reddymeghraj@gmail.com" ]
reddymeghraj@gmail.com
65bccc73ed6d22219f439804dbfc24af418827a8
f13acd0d707ea9ab0d2f2f010717b35adcee142f
/AtCoder_Virtual_Contest/green_diff_400_500_points/abc296/d/main.py
6a57b4cf333b63023933d82e1878c93a8861beba
[ "CC0-1.0", "LicenseRef-scancode-public-domain" ]
permissive
KATO-Hiro/AtCoder
126b9fe89fa3a7cffcbd1c29d42394e7d02fa7c7
bf43320bc1af606bfbd23c610b3432cddd1806b9
refs/heads/master
2023-08-18T20:06:42.876863
2023-08-17T23:45:21
2023-08-17T23:45:21
121,067,516
4
0
CC0-1.0
2023-09-14T21:59:38
2018-02-11T00:32:45
Python
UTF-8
Python
false
false
458
py
# -*- coding: utf-8 -*- def main(): import sys from math import ceil input = sys.stdin.readline n, m = map(int, input().split()) inf = 10**30 ans = inf # a <= bと仮定 for a in range(1, 10**6 + 1): b = ceil(m / a) if not (1 <= a <= n) or not (1 <= b <= n): continue ans = min(ans, a * b) if ans == inf: ans = -1 print(ans) if __name__ == "__main__": main()
[ "k.hiro1818@gmail.com" ]
k.hiro1818@gmail.com
607042d8083df04551809e6a17ecf223f81a393b
89284da682f723c6aaad8ef6bba37ac31cd30c92
/PythonTutorial/Advance/decorator/code/decorator_with_return_value.py
8ecc0cb2700bb83f49939f1a459ff2a4f2471ac3
[]
no_license
Danielyan86/Python-Study
9d9912e0385c5b4d2b7272e9eaca542ff556dc1a
782c1638eb9733a4be4acbc4c805a78f0fe77546
refs/heads/master
2023-03-17T13:26:31.865927
2023-03-05T12:30:07
2023-03-05T12:30:07
26,902,349
28
25
null
null
null
null
UTF-8
Python
false
false
317
py
def do_twice(func): def wrapper_do_twice(*args, **kwargs): func(*args, **kwargs) return func(*args, **kwargs) return wrapper_do_twice @do_twice def return_greeting(name): print("Creating greeting") return f"Hi {name}" if __name__ == '__main__': print(return_greeting('adam'))
[ "516495459@qq.com" ]
516495459@qq.com
90b1ee7827958b4eed671bd126633cbfe34ab5ca
7bededcada9271d92f34da6dae7088f3faf61c02
/pypureclient/flasharray/FA_2_17/models/replica_link_performance_replication.py
e11661abb181788226a8aa744f4ad12d88df9607
[ "BSD-2-Clause" ]
permissive
PureStorage-OpenConnect/py-pure-client
a5348c6a153f8c809d6e3cf734d95d6946c5f659
7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e
refs/heads/master
2023-09-04T10:59:03.009972
2023-08-25T07:40:41
2023-08-25T07:40:41
160,391,444
18
29
BSD-2-Clause
2023-09-08T09:08:30
2018-12-04T17:02:51
Python
UTF-8
Python
false
false
6,750
py
# coding: utf-8 """ FlashArray REST API No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: 2.17 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.flasharray.FA_2_17 import models class ReplicaLinkPerformanceReplication(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'bytes_per_sec_from_remote': 'int', 'bytes_per_sec_to_remote': 'int', 'bytes_per_sec_total': 'int', 'direction': 'str', 'local_pod': 'FixedReference', 'remote_pod': 'FixedReference', 'remotes': 'list[FixedReference]', 'time': 'int' } attribute_map = { 'bytes_per_sec_from_remote': 'bytes_per_sec_from_remote', 'bytes_per_sec_to_remote': 'bytes_per_sec_to_remote', 'bytes_per_sec_total': 'bytes_per_sec_total', 'direction': 'direction', 'local_pod': 'local_pod', 'remote_pod': 'remote_pod', 'remotes': 'remotes', 'time': 'time' } required_args = { } def __init__( self, bytes_per_sec_from_remote=None, # type: int bytes_per_sec_to_remote=None, # type: int bytes_per_sec_total=None, # type: int direction=None, # type: str local_pod=None, # type: models.FixedReference remote_pod=None, # type: models.FixedReference remotes=None, # type: List[models.FixedReference] time=None, # type: int ): """ Keyword args: bytes_per_sec_from_remote (int): The number of bytes received per second from a remote array. bytes_per_sec_to_remote (int): The number of bytes transmitted per second to a remote array. bytes_per_sec_total (int): Total bytes transmitted and received per second. direction (str): The direction of replication. Valid values are `inbound` and `outbound`. local_pod (FixedReference): Reference to a local pod. remote_pod (FixedReference): Reference to a remote pod. remotes (list[FixedReference]): Reference to a remote array. time (int): Sample time in milliseconds since the UNIX epoch. """ if bytes_per_sec_from_remote is not None: self.bytes_per_sec_from_remote = bytes_per_sec_from_remote if bytes_per_sec_to_remote is not None: self.bytes_per_sec_to_remote = bytes_per_sec_to_remote if bytes_per_sec_total is not None: self.bytes_per_sec_total = bytes_per_sec_total if direction is not None: self.direction = direction if local_pod is not None: self.local_pod = local_pod if remote_pod is not None: self.remote_pod = remote_pod if remotes is not None: self.remotes = remotes if time is not None: self.time = time def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ReplicaLinkPerformanceReplication`".format(key)) if key == "bytes_per_sec_from_remote" and value is not None: if value < 0: raise ValueError("Invalid value for `bytes_per_sec_from_remote`, must be a value greater than or equal to `0`") if key == "bytes_per_sec_to_remote" and value is not None: if value < 0: raise ValueError("Invalid value for `bytes_per_sec_to_remote`, must be a value greater than or equal to `0`") if key == "bytes_per_sec_total" and value is not None: if value < 0: raise ValueError("Invalid value for `bytes_per_sec_total`, must be a value greater than or equal to `0`") self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): raise AttributeError else: return value def __getitem__(self, key): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ReplicaLinkPerformanceReplication`".format(key)) return object.__getattribute__(self, key) def __setitem__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ReplicaLinkPerformanceReplication`".format(key)) object.__setattr__(self, key, value) def __delitem__(self, key): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ReplicaLinkPerformanceReplication`".format(key)) object.__delattr__(self, key) def keys(self): return self.attribute_map.keys() def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(ReplicaLinkPerformanceReplication, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ReplicaLinkPerformanceReplication): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
[ "noreply@github.com" ]
PureStorage-OpenConnect.noreply@github.com
fa4b50adc69c5c509df058204821cc4fad6cfbc3
fa9500caa931204fffd16634528b475fe0372170
/translate/translate.py
f617048700d70a9089f7790e50bfb13d13705cf0
[ "MIT" ]
permissive
monospacedmagic/Trusty-cogs
7e2c36d1da2055b34ea222894f399054d0697ee4
764c9282775575f5791d38b744a5df932acac156
refs/heads/master
2020-05-05T07:40:10.787031
2019-04-03T22:32:27
2019-04-03T22:32:27
179,833,321
1
0
MIT
2019-04-06T12:54:59
2019-04-06T12:54:59
null
UTF-8
Python
false
false
6,889
py
import logging from redbot.core import commands, Config, checks from redbot.core.i18n import Translator, cog_i18n from .api import GoogleTranslateAPI, FlagTranslation from .errors import GoogleTranslateAPIError """ Translator cog Cog credit to aziz#5919 for the idea and Links Wiki https://goo.gl/3fxjSA Github https://goo.gl/oQAQde Support the developer https://goo.gl/Brchj4 Invite the bot to your guild https://goo.gl/aQm2G7 Join the official development guild https://discord.gg/uekTNPj """ BASE_URL = "https://translation.googleapis.com" _ = Translator("Translate", __file__) log = logging.getLogger("red.Translate") @cog_i18n(_) class Translate(GoogleTranslateAPI, commands.Cog): """ Translate messages using google translate """ __version__ = "2.0.1" def __init__(self, bot): self.bot = bot self.config = Config.get_conf(self, 156434873547, force_registration=True) default_guild = { "reaction": False, "text": False } default = { "api_key": None, "cooldown": { "past_flags": [], "timeout": 0, "multiple": False, } } self.config.register_guild(**default_guild) self.config.register_global(**default) self.cache = {"translations": []} self.clear_cache = self.bot.loop.create_task(self.cleanup_cache()) @commands.command() async def translate(self, ctx, to_language: FlagTranslation, *, message: str): """ Translate messages with google translate `to_language` is the language you would like to translate `message` is the message to translate """ if await self.config.api_key() is None: msg = _("The bot owner needs to set an api key first!") await ctx.send(msg) return try: detected_lang = await self.detect_language(message) except GoogleTranslateAPIError as e: await ctx.send(str(e)) return from_lang = detected_lang[0][0]["language"] original_lang = detected_lang[0][0]["language"] if to_language == original_lang: return await ctx.send( _("I cannot translate `{from_lang}` to `{to}`").format( from_lang=from_lang, to=to_language ) ) try: translated_text = await self.translate_text(original_lang, to_language, message) except GoogleTranslateAPIError as e: await ctx.send(str(e)) return author = ctx.message.author if ctx.channel.permissions_for(ctx.me).embed_links: translation = (translated_text, from_lang, to_language) em = await self.translation_embed(author, translation) await ctx.send(embed=em) else: await ctx.send(translated_text) @commands.group() @checks.mod_or_permissions(manage_channels=True) async def translateset(self, ctx): """ Toggle the bot auto translating """ pass @translateset.command(aliases=["reaction", "reactions"]) @checks.mod_or_permissions(manage_channels=True) @commands.guild_only() async def react(self, ctx): """ Toggle translations to flag emoji reactions """ guild = ctx.message.guild toggle = not await self.config.guild(guild).reaction() if toggle: verb = _("on") else: verb = _("off") await self.config.guild(guild).reaction.set(toggle) msg = _("Reaction translations have been turned ") await ctx.send(msg + verb) @translateset.command(aliases=["multi"]) @checks.is_owner() @commands.guild_only() async def multiple(self, ctx): """ Toggle multiple translations for the same message This will also ignore the translated message from being translated into another language """ toggle = not await self.config.cooldown.multiple() if toggle: verb = _("on") else: verb = _("off") await self.config.cooldown.multiple.set(toggle) msg = _("Multiple translations have been turned ") await ctx.send(msg + verb) @translateset.command(aliases=["cooldown"]) @checks.is_owner() @commands.guild_only() async def timeout(self, ctx, time: int): """ Set the cooldown before a message can be reacted to again for translation `<time>` Number of seconds until that message can be reacted to again Note: If multiple reactions are not allowed the timeout setting is ignored until the cache cleanup ~10 minutes. """ await self.config.cooldown.timeout.set(time) msg = _("Translation timeout set to {time}s.").format(time=time) await ctx.send(msg) @translateset.command(aliases=["flags"]) @checks.mod_or_permissions(manage_channels=True) @commands.guild_only() async def flag(self, ctx): """ Toggle translations with flag emojis in text """ guild = ctx.message.guild toggle = not await self.config.guild(guild).text() if toggle: verb = _("on") else: verb = _("off") await self.config.guild(guild).text.set(toggle) msg = _("Flag emoji translations have been turned ") await ctx.send(msg + verb) @translateset.command() @checks.is_owner() async def creds(self, ctx, api_key): """ You must get an API key from google to set this up Note: Using this cog costs money, current rates are $20 per 1 million characters. 1. Go to Google Developers Console and log in with your Google account. (https://console.developers.google.com/) 2. You should be prompted to create a new project (name does not matter). 3. Click on Enable APIs and Services at the top. 4. In the list of APIs choose or search for Cloud Translate API and click on it. Choose Enable. 5. Click on Credentials on the left navigation bar. 6. Click on Create Credential at the top. 7. At the top click the link for \"API key\". 8. No application restrictions are needed. Click Create at the bottom. 9. You now have a key to add to `[p]translateset` """ await self.config.api_key.set(api_key) await ctx.send(_("API key set.")) def __unload(self): self.clear_cache.cancel()
[ "TrustyJAID@gmail.com" ]
TrustyJAID@gmail.com
1a4e3c696ae9c091199a668fa86295965e325d04
132c7b0c8ba606a249fbdfe24f9d73e7e224d260
/pages/urls.py
98943af34a40c86efdabd88a747edabf6f65f189
[]
no_license
sanyuOnline/sanyu-webapp
dafa3505d7f3d6eca225ca6b4dce3fa683d5e9fe
c8e3824146bb9eb4dcf971a1cdef2bc4475385f1
refs/heads/main
2023-08-31T12:52:06.104078
2021-10-27T07:03:09
2021-10-27T07:03:09
406,589,964
0
0
null
null
null
null
UTF-8
Python
false
false
1,761
py
from django.urls import path from .views import * urlpatterns = [ path('', HomeView.as_view(), name='home'), path('about/', AboutView.as_view(), name='about'), path('contact-us/', ContactView.as_view(), name='contact_us'), path('projects/', ProjectsView.as_view(), name='projects'), path('donate/', DonateView.as_view(), name ='donate'), #projects path('projects/human-rights/', P1View.as_view(), name ='p1'), path('projects/rule-of-law/', P2View.as_view(), name ='p2'), path('projects/advocacy/', P3View.as_view(), name ='p3'), path('projects/lgbti-rights/', P4View.as_view(), name ='p4'), path('projects/transitional-justice/', P5View.as_view(), name ='p5'), path('projects/migrant-rights/', P6View.as_view(), name ='p6'), path('projects/human-trafficking/', P7View.as_view(), name ='p7'), path('projects/international-advocacy/', P8View.as_view(), name ='p8'), path('projects/civic-education/', P9View.as_view(), name ='p9'), path('projects/civil-society-building/', P10View.as_view(), name ='p10'), path('projects/community-mobilization/', P11View.as_view(), name ='p11'), path('projects/local-governance/', P12View.as_view(), name ='p12'), path('projects/displaced-persons/', P13View.as_view(), name ='p13'), path('projects/independent-media/', P14View.as_view(), name ='p14'), path('projects/sme-development/', P15View.as_view(), name ='p15'), path('projects/local-economic-development/', P16View.as_view(), name ='p16'), path('projects/agriculture-development/', P17View.as_view(), name ='p17'), path('projects/womens-rights/', P18View.as_view(), name ='p18'), path('projects/electoral-processes/', P19View.as_view(), name ='p19'), ]
[ "jordanrob709@gmail.com" ]
jordanrob709@gmail.com
746d034aa1d78b642514f31c81a889e12510a073
5963c12367490ffc01c9905c028d1d5480078dec
/homeassistant/components/dexcom/__init__.py
68622a2335080cb4aa4533d90e766a20a7c9031c
[ "Apache-2.0" ]
permissive
BenWoodford/home-assistant
eb03f73165d11935e8d6a9756272014267d7d66a
2fee32fce03bc49e86cf2e7b741a15621a97cce5
refs/heads/dev
2023-03-05T06:13:30.354545
2021-07-18T09:51:53
2021-07-18T09:51:53
117,122,037
11
6
Apache-2.0
2023-02-22T06:16:51
2018-01-11T16:10:19
Python
UTF-8
Python
false
false
2,550
py
"""The Dexcom integration.""" from datetime import timedelta import logging from pydexcom import AccountError, Dexcom, SessionError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_SERVER, COORDINATOR, DOMAIN, MG_DL, PLATFORMS, SERVER_OUS, UNDO_UPDATE_LISTENER, ) _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=180) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Dexcom from a config entry.""" try: dexcom = await hass.async_add_executor_job( Dexcom, entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], entry.data[CONF_SERVER] == SERVER_OUS, ) except AccountError: return False except SessionError as error: raise ConfigEntryNotReady from error if not entry.options: hass.config_entries.async_update_entry( entry, options={CONF_UNIT_OF_MEASUREMENT: MG_DL} ) async def async_update_data(): try: return await hass.async_add_executor_job(dexcom.get_current_glucose_reading) except SessionError as error: raise UpdateFailed(error) from error hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = { COORDINATOR: DataUpdateCoordinator( hass, _LOGGER, name=DOMAIN, update_method=async_update_data, update_interval=SCAN_INTERVAL, ), UNDO_UPDATE_LISTENER: entry.add_update_listener(update_listener), } await hass.data[DOMAIN][entry.entry_id][ COORDINATOR ].async_config_entry_first_refresh() hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]() if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok async def update_listener(hass, entry): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id)
[ "noreply@github.com" ]
BenWoodford.noreply@github.com
e9d92ec8c95f8b20f9de59c693ea7978caa23203
f0c402d3858f0643561886797578b1e64655b1b3
/utils/builder/shared/ctrl_file_template_strings.py
fa3d11192a6fc72636b8b5a9e473ff13560d179b
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
Leo-Wang-JL/force-riscv
39ad2a72abd814df4b63879ce9825b6b06a9391a
deee6acaaee092eb90ac2538de122303334e5be3
refs/heads/master
2023-01-28T00:06:58.135651
2020-11-18T02:54:10
2020-11-18T02:54:10
271,873,013
0
0
NOASSERTION
2020-06-28T00:51:26
2020-06-12T19:15:26
C++
UTF-8
Python
false
false
1,102
py
# # Copyright (C) [2020] Futurewei Technologies, Inc. # # FORCE-RISCV is licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR # FIT FOR A PARTICULAR PURPOSE. # See the License for the specific language governing permissions and # limitations under the License. # #ctrl file template strings ctrl_items_template = 'control_items = [ ' fname_template = '"fname":"{}*_force.py"' generator_template = ',"generator":{{{}}}' options_template = ',"options":{{{}}}' performance_template = ',"performance":{{{}}}' #option strings noiss = '"--noiss":None,' nosim = '"no-sim":True,' group = '"group":"{}",' options = '"--options":"\\"{}\\"",' #misc strings ctrl_item_separator ='\n ' #arch specific strings arch_genopts ='MMU=1,all_cacheable=1'
[ "jwang1@futurewei.com" ]
jwang1@futurewei.com
447168b09a1040cc0c9136e96eeca844264de0f3
7d90d2ce27c6ee0af74391b09909edbd45fdc2f0
/renix_py_api/api_gen/Y1564WriteDbCommand_Autogen.py
9ed8e710f2aa1c0339673ae3a451c12aa7ad3772
[]
no_license
gaoxingyu-hub/54testframework-master-e284
d7ea0d4a715b65c8652430e963a86b9522a7237a
57dd2197e7d91b8ad8fb2bd0e3503f10afa08544
refs/heads/master
2023-04-30T05:50:41.542402
2021-05-28T09:19:37
2021-05-28T09:19:37
309,922,838
0
0
null
null
null
null
UTF-8
Python
false
false
1,057
py
""" Auto-generated File Create Time: 2019-12-27 02:33:28 """ from .ROMEnum_Autogen import * from renix_py_api.renix_common_api import * from renix_py_api import rom_manager from .BenchmarkWriteDbCommand_Autogen import BenchmarkWriteDbCommand @rom_manager.rom class Y1564WriteDbCommand(BenchmarkWriteDbCommand): def __init__(self, LoadUnit=None, **kwargs): self._LoadUnit = LoadUnit # Load Unit properties = kwargs.copy() if LoadUnit is not None: properties['LoadUnit'] = LoadUnit # call base class function, and it will send message to renix server to create a class. super(Y1564WriteDbCommand, self).__init__(**properties) @property def LoadUnit(self): """ get the value of property _LoadUnit """ return self._LoadUnit @LoadUnit.setter def LoadUnit(self, value): self._LoadUnit = value def _set_loadunit_with_str(self, value): seperate = value.find(':') exec('self._LoadUnit = EnumLoadUnit.%s' % value[:seperate])
[ "gaoxingyu@example.com" ]
gaoxingyu@example.com
d2470fc2125cc86a23962c50483ae31d9ed0dfda
6c584706e6eab645e11357bde8f393013c69e4c9
/Ingestão de Dados/Aula 3/gerador_log_web.py
3f51aeffdcf1a17eef5f714f422ad73ba2f38853
[]
no_license
charlesartbr/fiap-mba-big-data-data-science
cce1b64c301187a049cd9929d5fafd7e6985503e
de4d8372a7ce26ac8e4556925416e5c9e1932020
refs/heads/master
2022-09-05T00:33:21.367281
2022-08-09T14:01:28
2022-08-09T14:01:28
185,289,505
0
1
null
2021-01-15T16:47:00
2019-05-07T00:10:35
Jupyter Notebook
UTF-8
Python
false
false
1,772
py
#!/usr/bin/python import time import datetime import random timestr = time.strftime("%Y%m%d-%H%M%S") f = open('/tmp/access_log','w',0) ips=["123.221.14.56","16.180.70.237","10.182.189.79","218.193.16.244","198.122.118.164","114.214.178.92","233.192.62.103","244.157.45.12","81.73.150.239","237.43.24.118"] referers=["-","http://www.casualcyclist.com","http://bestcyclingreviews.com/top_online_shops","http://bleater.com","http://searchengine.com"] resources=["/handle-bars","/stems","/wheelsets","/forks","/seatposts","/saddles","/shifters","/Store/cart.jsp?productID="] useragents=["Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)","Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36","Mozilla/5.0 (Linux; U; Android 2.3.5; en-us; HTC Vision Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1","Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25","Mozilla/5.0 (Windows; U; Windows NT 6.1; rv:2.2) Gecko/20110201","Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0","Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US))"] otime = datetime.datetime(2013,10,10) while 1: time.sleep(0.1) increment = datetime.timedelta(seconds=random.randint(30,300)) otime += increment uri = random.choice(resources) if uri.find("Store")>0: uri += `random.randint(1000,1500)` ip = random.choice(ips) useragent = random.choice(useragents) referer = random.choice(referers) f.write('%s - - [%s] "GET %s HTTP/1.0" 200 %s "%s" "%s"\n' % (random.choice(ips),otime.strftime('%d/%b/%Y:%H:%M:%S %z'),uri,random.randint(2000,5000),referer,useragent))
[ "e-mail@charles.art.br" ]
e-mail@charles.art.br
e7599f9ae060d3fafce863bf73c9090ed1280934
5a7b15eb2a3453475ee70bb56e19a7bb2751db89
/code/analysis/NOT_USED/quantiles/yStudyTradeoff_Bootstrap_Parallel_OnlyWordForms_BoundedVocab_BinomialTest_Single_UnimodalBoundOnQuantile_BothDirections_NoAssumption_All.py
ccb97759647aaf85ec002925f489aef6d4a151e4
[]
no_license
m-hahn/memory-surprisal
8db19bc86ada9c352feb66859f718749623700b6
1b3d680836ba87fb9186741a8d4f184fda35b122
refs/heads/master
2022-04-30T16:01:39.323884
2022-03-25T04:10:12
2022-03-25T04:10:12
156,466,125
1
0
null
null
null
null
UTF-8
Python
false
false
743
py
from ud_languages import languages import subprocess # ./python27 yStudyTradeoff_Bootstrap_Parallel_OnlyWordForms_BoundedVocab_HistogramsByMem_All.py > ../results/tradeoff/listener-curve-histogram_byMem.tsv with open("../../results/tradeoff/listener-curve-binomial-confidence-bound-quantile-noAssumption.tsv", "w") as outFile: print >> outFile, "\t".join(["Language", "Type", "Position", "LowerConfidenceBound", "Level", "Memory"]) for language in languages: print(language) print >> outFile, subprocess.check_output(["./python27", "yStudyTradeoff_Bootstrap_Parallel_OnlyWordForms_BoundedVocab_BinomialTest_Single_UnimodalBoundOnQuantile_BothDirections_NoAssumption.py", "--language", language, "--level", "0.001"]).strip()
[ "mhahn29@gmail.com" ]
mhahn29@gmail.com
3b15e2c536dd3217d18951bca339e724dc79bc96
250b997d715c168315a927e28124cf24c77048c0
/python3基础/3.Python修炼第三层/day3_预习/文件处理_预习.py
7df721ad73b651c2138d41d3c9be75f122680a17
[]
no_license
cuitianfeng/Python
c78077e5dcad01ee5fe44c0aa8b61bbc2fa388cf
9c9f10f13311116ce0bc60ec128f765ff2ca3078
refs/heads/master
2023-01-10T23:25:57.158141
2020-11-17T15:39:36
2020-11-17T15:39:36
null
0
0
null
null
null
null
UTF-8
Python
false
false
7,724
py
# python基础之文件处理 # http://www.cnblogs.com/linhaifeng/articles/5984922.html # 文件处理 # 读 # f=open('a.txt','r',encoding='utf-8') #打开文件 # res=f.read() #read 以光标读取文件所有内容 # print(res) # print('第二次',f.read()) # 第一次read光标已经读到文件末尾,所以光标无法继续读取 # print(f.readline(),end='') #一行一行的读取文件 # print(f.readlines(),end='') #把文件所有行读取出来 组成列表 #,end='' 取消读取文件末尾的换行符 # f.close() #打开文件操作后,如果不关闭 ,会一直占用资源,一定要关闭文件 #close掉的是操作系统的资源 f 变量依然存在,但是不能发起读写操作了 # del f #回收应用程序 中的 f , 并没有关闭操作系统的打开文件 #打开 操作 单个或者多个文件,操作完后 自动 close 文件 # with open('a.txt','r',encoding='utf-8') as f,open('b.txt') as f1: # pass # r 文本模式的读,在文件不存在,不会创建新文件 # f=open('a.txt','r',encoding='utf-8') #应用程序指定一个f 变量(对象), 操作系统打开一个文件 # f.read() #应用程序发起读取文件指令,操作系统来操作去硬盘读取内容然后返回给 f # print(f.read()) # f.readline() #读取一行 # f.readlines() #把所有行取出来,放入列表 # print(f.readable()) #判断文件是否 可读 # print(f.writable()) #判断问价是否 可写 # f.close() # 写 # f=open('a.txt','w',encoding='utf-8') # w 写 如果文件不存在就创建文件,如果存在就清空文件 # f.write('1111\n') #每次写都是先清空在 重新写 # f.write('2222\n') #每次写都是先清空在 重新写 # f.write('3333\n4444\n') #每次写都是先清空在 重新写 # f.writelines(['a\n','b\n','c\n']) # f.write() # f.close() # w 文本模式的写,文件存在则清空,不存在则创建 # f=open('a.txt','w',encoding='utf-8') # print(f.readable()) #判断文件是否 可读 # print(f.writable()) #判断问价是否 可写 # f.write('哈哈哈哈\n') #一行 添加 # f.write('哈哈哈哈\n') # # f.writelines(['1111\n','2222\n']) #列表添加 # f.close() # 追加 # a 文本模式的追加,文件存在光标跳到文件末尾,文件不存在创建, # f=open('b.txt','a',encoding='utf-8') # # print(f.writable()) #判断问价是否 可写 # # print(f.tell()) #查看文件光标的位置 # f.write('333\n') # f.write('4444\n') # f.close() # r+,w+,a+ 不常用的模式 # rb # rb 模式即直接从硬盘中读取bytes ,不用指定编码 # f=open('a.txt','rb') # # print(f.read()) # print(f.read().decode('utf-8')) # f.close() # wb # wb 模式,一定不用指定编码 # f=open('a.txt','wb') # f.write('你好啊'.encode('utf-8')) # f.close() # ab 模式,每次写都要 encode #回收、关闭文件 # f.close() #打开文件操作后,如果不关闭 ,会一直占用资源,一定要关闭文件 #close掉的是操作系统的资源 f 变量依然存在,但是不能发起读写操作了 # del f #回收应用程序 中的 f , 并没有关闭操作系统打开的文件 # f.close() #关闭操作系统打开的文件 # close之后 f 依然存在 # close之后 f.read 是无法操作的,因为read是往操作系统发请求,而系统已经关闭这个文件 #打开 操作 单个或者多个文件,操作完后 自动 close 文件 # with open('a.txt','r',encoding='utf-8') as f,open('b.txt') as f1: # pass # with open('file.txt','w',encoding='utf-8') as f: # f.write('111\n') #文本格式以外的文件 # f=open('test.jpg','rb') # print(f.read()) # # with open('test.jpg','rb') as read_f,open('test1.jpg','wb') as write_f: # # write_f.write(read_f.read()) # for line in read_f: # write_f.write(line) # 修改文件 # Vim 原理 修改文件 vim就是一次全部读取文件 # import os # with open('old.txt','r',encoding='utf-8') as read_f,\ # open('.old.txt.swap','w',encoding='utf-8') as write_f: # msg=read_f.read() # # print(msg,type(msg)) # msg=msg.replace('alex','SB') # # print(msg) # write_f.write(msg) # os.remove('old.txt') # os.rename('.old.txt.swap','old.txt') # 如果文件过大 推荐一行一行的读取 # 换成读取文件时 一行一行读取文件 再修改 # import os # with open('old.txt','r',encoding='utf-8') as read_f,\ # open('.old.txt.swap','w',encoding='utf-8') as write_f: # for line in read_f: # if 'SB' in line: # line=line.replace('SB','alex') # write_f.write(line) # os.remove('old.txt') # os.rename('.old.txt.swap','old.txt') # 文件读取 写入列表 转成字典 # l={} # f = open('a.txt','r',encoding='utf-8') # u = f.readlines() # print(u,type(u)) # for i in u: # i = i.strip() # print(i) # # print(i.split(' ')[0]) # # print(i.split(' ')[1]) # l[i.split(' ')[0]]={'金额':i.split(' ')[1]} # print(l) # print(l['www']['金额']) # 脚本传参实现拷贝文件 # import sys # # #python3 copy.py source.file target.file # if len(sys.argv) < 3: # print('usage:python3 copy.py source.file target.file') # sys.exit() # # #r'D:\python编码\py_s19\day3\old.txt' windows路径问题加r r是原生字符串 # with open(r'%s' %sys.argv[1],'rb') as read_f,\ # open(r'%s' %sys.argv[2],'wb') as write_f: # # for line in read_f: # write_f.write(line) # 文件其他操作 # f=open('a.txt','r',encoding='utf-8') # print(f.read(3)) # 读3个字符 # f=open('a.txt','rb') # print(f.read(3)) # 读3个字节 # print(f.read(3).decode('utf-8')) # 解码读 unicode 3个字节存的中文 # f=open('a.txt','r',encoding='utf-8') # print(f.read()) # # f.seek(0) # 定义光标位置 重置到0 # f.seek(3) # print(f.tell()) #以字节显示光标位置 # print(f.read()) # seek有三种移动方式0,1,2,其中1和2必须在b模式下进行,但无论哪种模式,都是以bytes为单位移动的 # 0 # f=open('a.txt','rb') # print(f.read(3)) # print(f.tell()) # f.seek(3,0) # print(f.tell()) # print(f.read(3).decode('utf-8')) # 1 # f=open('a.txt','rb') # print(f.read(3)) # print(f.tell()) # f.seek(3,1) # print(f.tell()) # print(f.read().decode('utf-8')) # 2 # f=open('a.txt','rb') # f.seek(0,2) #光标移动至末尾 # print(f.tell()) # python3 tail.py -f access.log # import time # import sys # # with open(r'%s' %sys.argv[2],'rb') as f: # f.seek(0,2) # # while True: # line=f.readline() # if line: # print(line.decode('utf-8'),end='') # else: # time.sleep(0.2) #模拟文件追加 # with open('access.log','a') as f: # f.write('1111\n') # truncate是截断文件,所以文件的打开方式必须可写,但是不能用w或w+等方式打开,因为那样直接清空文件了,所以truncate要在r+或a或a+等模式下测试效果 # with open('a.txt','r+',encoding='utf-8') as f: # f.truncate(9) #以字节 截取 截取从0到9以内 # 直接循环 一行一行读 内存里面只会存一行 # with open('a.txt','r',encoding='utf-8') as f: # # l=f.readlines() # # print(l) # # for line in l: # # print(line,end='') # for line in f: # print(line) # l=[1,2,3,4,5] # for index in range(len(l)): # # print(index) # print(l[index]) # for itme in l: # # print(index) # print(itme) l=[1,2,3,'a','b'] print(l[7],'123')
[ "zhang.hongyang@mydreamplus.com" ]
zhang.hongyang@mydreamplus.com
1635473539c093851ee9aa249ab11f4706c48386
963676ebcbb74cb40344aba93960ab288b75e4cd
/migrations/versions/e6f60380eb4c_add_post_id_columnin_comment_table.py
12032564a5da2387ff7e7abee4c9dc8999d72478
[ "MIT" ]
permissive
Anabella1109/MeBlog
6f93255b550322861cc020fe686beab85b68ac8f
4e3425a9558926208ad5ce66f83da55bb944f59e
refs/heads/master
2020-04-25T23:13:20.525790
2019-03-05T06:54:35
2019-03-05T06:54:35
173,137,672
0
1
null
null
null
null
UTF-8
Python
false
false
678
py
"""add post_id columnin comment table Revision ID: e6f60380eb4c Revises: a002d432a443 Create Date: 2019-03-02 18:05:32.082839 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'e6f60380eb4c' down_revision = 'a002d432a443' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('posts', sa.Column('image', sa.String(length=255), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('posts', 'image') # ### end Alembic commands ###
[ "bellaxbx1109@gmail.com" ]
bellaxbx1109@gmail.com
1ae2355c6ac4a55ad285c5f227692ebf7fea72a8
e1fada3a9846a5593e3d3d2fdc32b23b832e38b4
/tests/unit/algorithms/segmentation/adapters/mmseg/test_pipelines.py
d59987c3820cd54ceb70f18d63ed75cf203e1fe8
[ "Apache-2.0" ]
permissive
GalyaZalesskaya/openvino_training_extensions
fd1ebb189900008b16b85568449e5c62d8edbad5
6116639caeff100b06a6c10a96c7e7f5951f20c7
refs/heads/develop
2023-09-03T19:32:44.702497
2023-03-15T06:48:24
2023-03-15T06:48:24
202,568,309
0
0
Apache-2.0
2019-10-28T16:16:27
2019-08-15T15:41:59
Python
UTF-8
Python
false
false
4,386
py
# Copyright (C) 2023 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # import numpy as np import PIL.Image import pytest from otx.algorithms.segmentation.adapters.mmseg.data.pipelines import ( LoadAnnotationFromOTXDataset, NDArrayToPILImage, PILImageToNDArray, RandomResizedCrop, RandomSolarization, ) from otx.api.entities.annotation import ( Annotation, AnnotationSceneEntity, AnnotationSceneKind, ) from otx.api.entities.dataset_item import DatasetItemEntity from otx.api.entities.image import Image from otx.api.entities.label import Domain, LabelEntity from otx.api.entities.scored_label import ScoredLabel from otx.api.entities.shapes.rectangle import Rectangle from tests.test_suite.e2e_test_system import e2e_pytest_unit def label_entity(name="test label") -> LabelEntity: return LabelEntity(name=name, domain=Domain.SEGMENTATION) def dataset_item() -> DatasetItemEntity: image: Image = Image(data=np.random.randint(low=0, high=255, size=(10, 16, 3))) annotation: Annotation = Annotation(shape=Rectangle.generate_full_box(), labels=[ScoredLabel(label_entity())]) annotation_scene: AnnotationSceneEntity = AnnotationSceneEntity( annotations=[annotation], kind=AnnotationSceneKind.ANNOTATION ) return DatasetItemEntity(media=image, annotation_scene=annotation_scene) class TestLoadAnnotationFromOTXDataset: @pytest.fixture(autouse=True) def setUp(self) -> None: self.dataset_item: DatasetItemEntity = dataset_item() self.results: dict = { "dataset_item": self.dataset_item, "ann_info": {"labels": [label_entity("class_1")]}, "seg_fields": [], } self.pipeline: LoadAnnotationFromOTXDataset = LoadAnnotationFromOTXDataset() @e2e_pytest_unit def test_call(self) -> None: loaded_annotations: dict = self.pipeline(self.results) assert "gt_semantic_seg" in loaded_annotations assert loaded_annotations["dataset_item"] == self.dataset_item class TestNDArrayToPILImage: @pytest.fixture(autouse=True) def setUp(self) -> None: self.results: dict = {"img": np.random.randint(0, 255, (3, 3, 3), dtype=np.uint8)} self.nd_array_to_pil_image: NDArrayToPILImage = NDArrayToPILImage(keys=["img"]) @e2e_pytest_unit def test_call(self) -> None: converted_img: dict = self.nd_array_to_pil_image(self.results) assert "img" in converted_img assert isinstance(converted_img["img"], PIL.Image.Image) @e2e_pytest_unit def test_repr(self) -> None: assert str(self.nd_array_to_pil_image) == "NDArrayToPILImage" class TestPILImageToNDArray: @pytest.fixture(autouse=True) def setUp(self) -> None: self.results: dict = {"img": PIL.Image.new("RGB", (3, 3))} self.pil_image_to_nd_array: PILImageToNDArray = PILImageToNDArray(keys=["img"]) @e2e_pytest_unit def test_call(self) -> None: converted_array: dict = self.pil_image_to_nd_array(self.results) assert "img" in converted_array assert isinstance(converted_array["img"], np.ndarray) @e2e_pytest_unit def test_repr(self) -> None: assert str(self.pil_image_to_nd_array) == "PILImageToNDArray" class TestRandomResizedCrop: @pytest.fixture(autouse=True) def setUp(self) -> None: self.results: dict = {"img": PIL.Image.new("RGB", (10, 16)), "img_shape": (10, 16), "ori_shape": (10, 16)} self.random_resized_crop: RandomResizedCrop = RandomResizedCrop((5, 5), (0.5, 1.0)) @e2e_pytest_unit def test_call(self) -> None: cropped_img: dict = self.random_resized_crop(self.results) assert cropped_img["img_shape"] == (5, 5) assert cropped_img["ori_shape"] == (10, 16) class TestRandomSolarization: @pytest.fixture(autouse=True) def setUp(self) -> None: self.results: dict = {"img": np.random.randint(0, 255, (3, 3, 3), dtype=np.uint8)} self.random_solarization: RandomSolarization = RandomSolarization(p=1.0) @e2e_pytest_unit def test_call(self) -> None: solarized: dict = self.random_solarization(self.results) assert "img" in solarized assert isinstance(solarized["img"], np.ndarray) @e2e_pytest_unit def test_repr(self) -> None: assert str(self.random_solarization) == "RandomSolarization"
[ "noreply@github.com" ]
GalyaZalesskaya.noreply@github.com
b5d111e5042cf79311a90c8070f79acc4fb07e98
49e04eaa863f16f23378d628d8c8da3e2f4328ec
/types.py
6bbf5e9a8e558f93f9b1391cbcd8965f32cf5817
[]
no_license
cheery/hindley-milner-tutorial
5f58af47fff9686f5dfd23130a7bdf97ccae19ed
40debaa36b2cc2dd82a73cf8580c8e810698afb4
refs/heads/master
2021-01-01T18:38:32.396766
2013-07-23T14:14:08
2013-07-23T14:14:08
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,551
py
class Variable(object): def __init__(self): self.instance = None class Type(object): def __init__(self, name, types): self.name = name self.types = types class Generic(object): def __init__(self, kind): self.kind = kind def prune(t): while isinstance(t, Variable) and t.instance: t = t.instance return t def inside(v, t): if isinstance(t, Type): return any(inside(v, x) for x in t.types) return v == t def fresh(t, mappings=None): mappings = {} if mappings is None else mappings t = prune(t) if isinstance(t, Generic): if t.kind not in mappings: mappings[t.kind] = Variable() return mappings[t.kind] if isinstance(t, Variable): return t if isinstance(t, Type): return Type(t.name, [fresh(x, mappings) for x in t.types]) def unify_var(v, t): if v != t: if inside(v, t): raise Exception("recursive unification") v.instance = t def unify_types(a, b): if a.name != b.name or len(a.types) != len(b.types): raise Exception("type mismatch %s/%i != %s/%i" % (a.name, len(a.types), b.name, len(b.types)) for p, q in zip(a.types, b.types): unify(p, q) def unify(a, b): a = prune(a) b = prune(b) if isinstance(a, Variable): unify_var(a, b) if isinstance(b, Variable): unify_var(b, a) if isinstance(a, Type) and isinstance(b, Type): unify_types(a, b) ###Integer = op('Integer', []) ###Boolean = op('Boolean', [])
[ "cheery@boxbase.org" ]
cheery@boxbase.org
f015bf5f10b97ea6b3dda3c8716fabd25c42417d
3ffdea18c24ed90672cc8414cba6b769d757667d
/tests/test_api.py
a73f70aead6d4354ff8fb63f5dc4ab817ea93f8d
[ "BSD-3-Clause", "BSD-2-Clause" ]
permissive
msauria/cooler
0b26a07e3b42b0bddbc9fc15aef18dee65010b9b
d9257919d3aee1bbbe700e8154f9f73354e416cb
refs/heads/master
2021-01-22T06:18:27.266357
2017-05-25T04:15:31
2017-05-25T04:15:31
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,322
py
# -*- coding: utf-8 -*- from __future__ import division, print_function from scipy import sparse import numpy as np import pandas import h5py from nose.tools import assert_raises import cooler.api import mock class MockHDF5(dict): file = mock.Mock(['mode']) binsize = 100 n_bins = 20 r = sparse.random(n_bins, n_bins, density=1, random_state=1) r = sparse.triu(r, k=1).tocsr() r_full = r.toarray() + r.toarray().T mock_cooler = MockHDF5({ 'chroms': { 'name': np.array(['chr1', 'chr2'], dtype='S'), 'length': np.array([1000, 1000], dtype=np.int32), }, 'bins': { 'chrom': np.array([0,0,0,0,0,0,0,0,0,0, 1,1,1,1,1,1,1,1,1,1], dtype=int), 'start': np.array([0,100,200,300,400,500,600,700,800,900, 0,100,200,300,400,500,600,700,800,900], dtype=int), 'end': np.array([100,200,300,400,500,600,700,800,900,1000, 100,200,300,400,500,600,700,800,900,1000], dtype=int), 'mask': np.array([1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1], dtype=bool), 'bias': np.array([1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1], dtype=float), 'E1': np.zeros(20, dtype=float), }, 'pixels': { 'bin1_id': r.tocoo().row, 'bin2_id': r.indices, 'count': r.data, 'mask': np.ones(r.nnz, dtype=bool), }, 'indexes': { 'chrom_offset': np.array([0, 10, 20], dtype=np.int32), # nchroms + 1 'bin1_offset': r.indptr, # nbins + 1 }, }) mock_cooler.attrs = { 'bin-size': binsize, 'bin-type': 'fixed', 'nchroms': 2, 'nbins': n_bins, 'nnz': r.nnz, 'metadata': '{}', } mock_cooler.file = mock_cooler mock_cooler.file.mode = 'r' mock_cooler.file.filename = 'mock.cool' mock_cooler.name = '/' mock_cooler['/'] = mock_cooler chromID_lookup = pandas.Series({'chr1': 0, 'chr2': 1}) def test_get(): table = cooler.api.get(mock_cooler['chroms']) assert np.all(table['length'] == mock_cooler['chroms']['length']) def test_chromtable(): table = cooler.api.chroms(mock_cooler) assert np.all(table['length'] == mock_cooler['chroms']['length']) def test_bintable(): lo, hi = 2, 10 table = cooler.api.bins(mock_cooler, lo, hi) assert np.all(chromID_lookup[table['chrom']] == mock_cooler['bins']['chrom'][lo:hi]) assert np.all(table['start'] == mock_cooler['bins']['start'][lo:hi]) assert np.all(table['end'] == mock_cooler['bins']['end'][lo:hi]) def test_pixeltable(): lo, hi = 2, 10 table = cooler.api.pixels(mock_cooler, lo, hi, join=False) assert np.all(table['bin1_id'] == mock_cooler['pixels']['bin1_id'][lo:hi]) assert np.all(table['bin2_id'] == mock_cooler['pixels']['bin2_id'][lo:hi]) table = cooler.api.pixels(mock_cooler, lo, hi, join=True) assert table.shape == (hi-lo, len(mock_cooler['pixels']) + 4) def test_info(): pass def test_cooler(): c = cooler.Cooler(mock_cooler) # bin table table = c.bins().fetch('chr1') assert np.all(table['start'] == mock_cooler['bins']['start'][0:10]) assert np.all(table['end'] == mock_cooler['bins']['end'][0:10]) # offsets assert c.offset('chr1') == 0 assert c.extent('chr1') == (0, 10) # 2D range queries as rectangular or triangular A1 = np.triu(c.matrix(balance=False).fetch('chr2')) df = c.matrix(as_pixels=True, join=False, balance=False).fetch('chr2') i0 = c.offset('chr2') i, j, v = df['bin1_id'], df['bin2_id'], df['count'] mat = sparse.coo_matrix((v, (i-i0, j-i0)), (A1.shape)) A2 = np.triu(mat.toarray()) assert np.all(A1 == A2) def test_annotate(): c = cooler.Cooler(mock_cooler) # works with full bin table / view or only required bins df = c.matrix(as_pixels=True, balance=False).fetch('chr1') df1 = cooler.annotate(df, c.bins()[:]) df2 = cooler.annotate(df, c.bins()) df3 = cooler.annotate(df, c.bins().fetch('chr1')) assert np.all(df1 == df2) assert np.all(df1 == df3) # works on empty dataframe df4 = cooler.annotate(df[0:0], c.bins()[:]) assert np.all(df4.columns == df3.columns) assert len(df4) == 0
[ "nabdennur@gmail.com" ]
nabdennur@gmail.com
fe917e43a825ab3b35bc5f39d38ccea00d17fe35
64bf39b96a014b5d3f69b3311430185c64a7ff0e
/intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/cloudscale_ch/cloud/plugins/modules/server_group.py
ce68a6bcdeeb1be48c536fd6592cbd6dac7f9382
[ "GPL-1.0-or-later", "GPL-3.0-only", "MIT" ]
permissive
SimonFangCisco/dne-dna-code
7072eba7da0389e37507b7a2aa5f7d0c0735a220
2ea7d4f00212f502bc684ac257371ada73da1ca9
refs/heads/master
2023-03-10T23:10:31.392558
2021-02-25T15:04:36
2021-02-25T15:04:36
342,274,373
0
0
MIT
2021-02-25T14:39:22
2021-02-25T14:39:22
null
UTF-8
Python
false
false
4,241
py
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2019, René Moser <mail@renemoser.net> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type DOCUMENTATION = ''' --- module: server_group short_description: Manages server groups on the cloudscale.ch IaaS service description: - Create, update and remove server groups. author: - René Moser (@resmo) - Denis Krienbühl (@href) version_added: "1.0.0" options: name: description: - Name of the server group. - Either I(name) or I(uuid) is required. These options are mutually exclusive. type: str uuid: description: - UUID of the server group. - Either I(name) or I(uuid) is required. These options are mutually exclusive. type: str type: description: - Type of the server group. default: anti-affinity type: str zone: description: - Zone slug of the server group (e.g. C(lgp1) or C(rma1)). type: str state: description: - State of the server group. choices: [ present, absent ] default: present type: str tags: description: - Tags assosiated with the server groups. Set this to C({}) to clear any tags. type: dict extends_documentation_fragment: cloudscale_ch.cloud.api_parameters ''' EXAMPLES = ''' --- - name: Ensure server group exists cloudscale_ch.cloud.server_group: name: my-name type: anti-affinity api_token: xxxxxx - name: Ensure server group in a specific zone cloudscale_ch.cloud.server_group: name: my-rma-group type: anti-affinity zone: lpg1 api_token: xxxxxx - name: Ensure a server group is absent cloudscale_ch.cloud.server_group: name: my-name state: absent api_token: xxxxxx ''' RETURN = ''' --- href: description: API URL to get details about this server group returned: if available type: str sample: https://api.cloudscale.ch/v1/server-group/cfde831a-4e87-4a75-960f-89b0148aa2cc uuid: description: The unique identifier for this server returned: always type: str sample: cfde831a-4e87-4a75-960f-89b0148aa2cc name: description: The display name of the server group returned: always type: str sample: load balancers type: description: The type the server group returned: if available type: str sample: anti-affinity zone: description: The zone of the server group returned: success type: dict sample: { 'slug': 'rma1' } servers: description: A list of servers that are part of the server group. returned: if available type: list sample: [] state: description: State of the server group. returned: always type: str sample: present tags: description: Tags assosiated with the server group. returned: success type: dict sample: { 'project': 'my project' } ''' from ansible.module_utils.basic import AnsibleModule from ..module_utils.api import ( AnsibleCloudscaleBase, cloudscale_argument_spec, ) def main(): argument_spec = cloudscale_argument_spec() argument_spec.update(dict( name=dict(type='str'), uuid=dict(type='str'), type=dict(type='str', default='anti-affinity'), zone=dict(type='str'), tags=dict(type='dict'), state=dict(default='present', choices=['absent', 'present']), )) module = AnsibleModule( argument_spec=argument_spec, required_one_of=(('name', 'uuid'),), required_if=(('state', 'present', ('name',),),), supports_check_mode=True, ) cloudscale_server_group = AnsibleCloudscaleBase( module, resource_name='server-groups', resource_create_param_keys=[ 'name', 'type', 'zone', 'tags', ], resource_update_param_keys=[ 'name', 'tags', ], ) cloudscale_server_group.query_constraint_keys = [ 'zone', ] if module.params['state'] == 'absent': result = cloudscale_server_group.absent() else: result = cloudscale_server_group.present() module.exit_json(**result) if __name__ == '__main__': main()
[ "sifang@cisco.com" ]
sifang@cisco.com
dfeed478a0ba3d78ed18820bc893a7854be0d037
87278e84fb4cd4b8dedd4a42cf3a51d48e749ec4
/ch11/findall1.py
78647c0739f421f77730c7b484524a8eb6d5d006
[]
no_license
dykim822/Python
083a6fb9be51e6cb3725a73ea8184f813f572abc
f6bd67d14e3a3a16934650cff6234e9cbad9ebce
refs/heads/main
2023-07-09T23:50:05.059533
2021-07-28T05:07:35
2021-07-28T05:07:35
369,375,985
1
0
null
null
null
null
UTF-8
Python
false
false
261
py
import re match1 = re.findall('[0-9]', ' 1234') # findall 모든 문자 확인 print(match1) match1 = re.findall('\s[0-9]', ' 1234') # \s 첫글자가 공란 print(match1) match1 = re.search('[0-9]', ' 1234') # match는 첫번째 글자 확인 print(match1)
[ "dykim822@gmail.com" ]
dykim822@gmail.com
4b1eb2a9839f0bc71f07b4f2671320c5b7184044
bd55c7d73a95caed5f47b0031264ec05fd6ff60a
/apps/core/migrations/0027_auto_20180609_1100.py
73a5cbc1c8a09ce2cd89c76ee508f42436010ea0
[]
no_license
phonehtetpaing/ebdjango
3c8610e2d96318aff3b1db89480b2f298ad91b57
1b77d7662ec2bce9a6377690082a656c8e46608c
refs/heads/main
2023-06-26T13:14:55.319687
2021-07-21T06:04:58
2021-07-21T06:04:58
381,564,118
0
0
null
null
null
null
UTF-8
Python
false
false
3,373
py
# Generated by Django 2.0.5 on 2018-06-09 02:00 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('core', '0026_auto_20180609_1038'), ] operations = [ migrations.CreateModel( name='VendorEventSettings', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('work_start_time', models.TimeField(null=True, verbose_name='work start time')), ('work_end_time', models.TimeField(null=True, verbose_name='work end time')), ('day_off_csv', models.CharField(max_length=32, null=True, verbose_name='off day')), ('buffer_period', models.IntegerField(null=True, verbose_name='buffer minutes')), ('is_google_calender_oauth', models.BooleanField(default=0, verbose_name='google_calender_oauth_flg')), ('admin_text', models.TextField(null=True, verbose_name='memo for admin')), ('regist_dt', models.DateTimeField(auto_now_add=True, null=True, verbose_name='regist datetime')), ('update_dt', models.DateTimeField(auto_now=True, null=True, verbose_name='update datetime')), ('is_delete', models.BooleanField(default=0, verbose_name='delete flg')), ('vendor_branch', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='vendoreventsettings_vendor_branch', to='core.VendorBranch', verbose_name='vendor_branch')), ], options={ 'verbose_name': 'VendorEventSettings', 'permissions': (), }, ), migrations.RemoveField( model_name='vendorreservationsettings', name='vendor_branch', ), migrations.AlterField( model_name='automessagecontroller', name='auto_message_trigger', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='automessagecontroller_auto_message_trigger', to='core.AutoMessageTrigger', verbose_name='auto_message_trigger'), ), migrations.AlterField( model_name='automessagehistory', name='auto_message_condition', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='automessagehistory_auto_message_condition', to='core.AutoMessageCondition', verbose_name='auto_message_condition'), ), migrations.AlterField( model_name='automessagehistory', name='auto_message_trigger', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='automessagehistory_auto_message_trigger', to='core.AutoMessageTrigger', verbose_name='auto_message_trigger'), ), migrations.AlterField( model_name='automessagetrigger', name='auto_message_condition', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='automessagetrigger_auto_message_condition', to='core.AutoMessageCondition', verbose_name='auto_message_condition'), ), migrations.DeleteModel( name='VendorReservationSettings', ), ]
[ "phonehtetpaing1221@gmail.com" ]
phonehtetpaing1221@gmail.com
8ea056e89d91ca1179dcefd67ff5980681224ffb
446d9c9e98bac9bb7d6ba9d6f2639fd1ab0e68af
/pythonBook/chapter07/exercise7-11.py
7b56a91556b954028aedbaff335d986266f8c8bb
[]
no_license
thiagofb84jp/python-exercises
062d85f4f95332549acd42bf98de2b20afda5239
88ad7365a0f051021034ac6f0683b3df2de57cdb
refs/heads/main
2023-07-19T21:15:08.689041
2021-08-17T10:59:09
2021-08-17T10:59:09
308,311,723
0
0
null
null
null
null
UTF-8
Python
false
false
1,542
py
# 7.11. Jogo da forca utilizando lista de strings para desenhar boneco da forca palavras = ["casa", "bola", "mangueira", "uva", "quiabo", "computador", "cobra", "lentilha", "arroz"] indice = int(input("Digite um número: ")) palavra = palavras[(indice * 776) % len(palavras)] for x in range(100): print() digitadas = [] acertos = [] erros = 0 linhasTxt = """ X==:== X : X X X X ====== """ linhas = [] for linha in linhasTxt.splitlines(): linhas.append(list(linha)) while True: senha = "" for letra in palavra: senha += letra if letra in acertos else "." print(senha) if senha == palavra: print("Você acertou!") break tentativa = input("\nDigite uma letra: ").lower().strip() if tentativa in digitadas: print("Você já tentou esta letra!") continue else: digitadas += tentativa if tentativa in palavra: acertos += tentativa else: erros += 1 print("Você errou!") print("X==:==\nX : ") print("X 0 " if erros >= 1 else "X") linha2 = "" if erros == 2: linha2 = r" | " elif erros == 3: linha2 = r" \| " elif erros >= 4: linha2 = r" \|/ " print(f"X{linha2}") linha3 = "" if erros == 5: linha3 += r" / " elif erros >= 6: linha3 += r" / \ " print(f"X{linha3}") print("X\n==========") if erros == 6: print("Enforcado!") print(f"A palavra secreta era: {palavra}")
[ "thiagofb84jp@gmail.com" ]
thiagofb84jp@gmail.com
5c3cd8ad47b46a000dbb5da79b49de3fc8d4f40c
0cdcee391e178092d7073734957075c72681f037
/hackerrank/si/si-finding-frequency.py
fd7b5d044d2333049511cc5159fc814aaea62281
[]
no_license
hrishikeshtak/Coding_Practises_Solutions
6b483bbf19d5365e18f4ea1134aa633ff347a1c1
86875d7436a78420591a60b716acd2780287b4a8
refs/heads/master
2022-10-06T18:44:56.992451
2022-09-25T03:29:03
2022-09-25T03:29:03
125,744,102
0
0
null
null
null
null
UTF-8
Python
false
false
1,573
py
#!/usr/bin/python3 # Quick Sort + Binary Search => O(NlogN + QlogN) def partition(arr, lo, hi): pivot = arr[hi-1] i = lo - 1 for j in range(lo, hi-1): if arr[j] <= pivot: i += 1 arr[i], arr[j] = arr[j], arr[i] arr[i+1], arr[hi-1] = arr[hi-1], arr[i+1] return i+1 def quick_sort(arr, lo, hi): if lo < hi: p = partition(arr, lo, hi) # print(arr, p) quick_sort(arr, lo, p) quick_sort(arr, p+1, hi) def BS1(arr, K): # Return First index of element lo = 0 hi = len(arr)-1 ans = -1 while lo <= hi: mid = (lo + hi) // 2 if arr[mid] == K: ans = mid hi = mid - 1 elif arr[mid] < K: lo = mid + 1 else: hi = mid - 1 return ans def BS2(arr, K): # Return Last index of element lo = 0 hi = len(arr)-1 ans = -1 while lo <= hi: mid = (lo + hi) // 2 if arr[mid] == K: ans = mid lo = mid + 1 elif arr[mid] < K: lo = mid + 1 else: hi = mid - 1 return ans def finding_frequency(arr, N, K): x = BS1(arr, K) y = BS2(arr, K) # print(x, y) if x == -1 or y == -1: return 0 else: return (y - x) + 1 if __name__ == "__main__": N = int(input()) arr = list(map(int, input().split())) # print(arr) quick_sort(arr, 0, N) # print(arr) for Q in range(int(input())): K = int(input()) print(finding_frequency(arr, N, K))
[ "hrishikesh.tak@oneconvergence.com" ]
hrishikesh.tak@oneconvergence.com
78ad07c06308d98770e27a8af86b6748553e3938
4b80b53d42cf3c303a58d6234291aaf5a8bc7a4f
/examples/webcam/webcam.py
dba875abb57bae78263cbc399a5fdd8ae8a32869
[ "BSD-3-Clause" ]
permissive
caboteria/aiortc
8ea9e869cbc7bc9ef677e4e2f5bf30bc94d259f3
f85f7133435b54ce9de5f2f391c0c0ef0014e820
refs/heads/master
2020-09-12T19:37:07.477686
2019-11-04T14:50:47
2019-11-04T14:50:47
222,529,347
3
0
BSD-3-Clause
2019-11-18T19:38:13
2019-11-18T19:38:12
null
UTF-8
Python
false
false
3,137
py
import argparse import asyncio import json import logging import os import platform import ssl from aiohttp import web from aiortc import RTCPeerConnection, RTCSessionDescription from aiortc.contrib.media import MediaPlayer ROOT = os.path.dirname(__file__) async def index(request): content = open(os.path.join(ROOT, "index.html"), "r").read() return web.Response(content_type="text/html", text=content) async def javascript(request): content = open(os.path.join(ROOT, "client.js"), "r").read() return web.Response(content_type="application/javascript", text=content) async def offer(request): params = await request.json() offer = RTCSessionDescription(sdp=params["sdp"], type=params["type"]) pc = RTCPeerConnection() pcs.add(pc) @pc.on("iceconnectionstatechange") async def on_iceconnectionstatechange(): print("ICE connection state is %s" % pc.iceConnectionState) if pc.iceConnectionState == "failed": await pc.close() pcs.discard(pc) # open media source if args.play_from: player = MediaPlayer(args.play_from) else: options = {"framerate": "30", "video_size": "640x480"} if platform.system() == "Darwin": player = MediaPlayer("default:none", format="avfoundation", options=options) else: player = MediaPlayer("/dev/video0", format="v4l2", options=options) await pc.setRemoteDescription(offer) for t in pc.getTransceivers(): if t.kind == "audio" and player.audio: pc.addTrack(player.audio) elif t.kind == "video" and player.video: pc.addTrack(player.video) answer = await pc.createAnswer() await pc.setLocalDescription(answer) return web.Response( content_type="application/json", text=json.dumps( {"sdp": pc.localDescription.sdp, "type": pc.localDescription.type} ), ) pcs = set() async def on_shutdown(app): # close peer connections coros = [pc.close() for pc in pcs] await asyncio.gather(*coros) pcs.clear() if __name__ == "__main__": parser = argparse.ArgumentParser(description="WebRTC webcam demo") parser.add_argument("--cert-file", help="SSL certificate file (for HTTPS)") parser.add_argument("--key-file", help="SSL key file (for HTTPS)") parser.add_argument("--play-from", help="Read the media from a file and sent it."), parser.add_argument( "--port", type=int, default=8080, help="Port for HTTP server (default: 8080)" ) parser.add_argument("--verbose", "-v", action="count") args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) if args.cert_file: ssl_context = ssl.SSLContext() ssl_context.load_cert_chain(args.cert_file, args.key_file) else: ssl_context = None app = web.Application() app.on_shutdown.append(on_shutdown) app.router.add_get("/", index) app.router.add_get("/client.js", javascript) app.router.add_post("/offer", offer) web.run_app(app, port=args.port, ssl_context=ssl_context)
[ "jeremy.laine@m4x.org" ]
jeremy.laine@m4x.org
10eb48208a23e05214dce95000fdaa72926fe379
858ccfa59703f5c6b822c0a88d72ac84610f6353
/Day 6/exercise.py
a89cb1cfb0151fde47f367a63432eca3810dd577
[]
no_license
purusottam234/Python-Class
7db8d6084bc2271c00bd0bb88e70768fb86fcc3e
df09421e3a1a110ef592b0e0c971ca824854a4d8
refs/heads/main
2023-05-09T07:35:51.931483
2021-06-05T15:45:56
2021-06-05T15:45:56
364,840,604
0
0
null
null
null
null
UTF-8
Python
false
false
571
py
# 1. program to determine the first power of 7 greater than 1000 product = 7 while product <= 1000: product = product * 7 print(product) # 2.Use the range function and a for statement to calculate the total of the integer from 0 to 1000000 total = 0 for number in range(1000001): total += number print(total) # 3.Display f-string in which you insert the value of the variables number1(7) and number2(5) and their product. display string should be: # 7 times 5 is 35 number1 = 7 number2 = 5 mul = number1*number2 print(f'{number1} times {number2} is {mul}')
[ "purusottamadhikari234@gmail.com" ]
purusottamadhikari234@gmail.com
7c7968948bbb512805492e446d8cc02e2418e385
d257ddf7e6959d0989d76080a8a048e82393657f
/002_TemplateMatching/001_template_match_provided.py
2ee8a6ec9e5ed11e52303834c1d83472c132e0c5
[ "MIT" ]
permissive
remichartier/027_selfDrivingCarND_ObjectDetectionExercises
d210f37b7baf306dd034c09f62e125b263f8270d
ccd853c975d35df5f31e1a445a1a8757b8bd13f5
refs/heads/main
2023-04-17T08:09:55.465143
2021-05-03T07:11:16
2021-05-03T07:11:16
362,013,745
0
0
null
null
null
null
UTF-8
Python
false
false
1,509
py
import numpy as np import cv2 import matplotlib.pyplot as plt import matplotlib.image as mpimg image = mpimg.imread('bbox-example-image.jpg') #image = mpimg.imread('temp-matching-example-2.jpg') templist = ['cutout1.jpg', 'cutout2.jpg', 'cutout3.jpg', 'cutout4.jpg', 'cutout5.jpg', 'cutout6.jpg'] # Here is your draw_boxes function from the previous exercise def draw_boxes(img, bboxes, color=(0, 0, 255), thick=6): # Make a copy of the image imcopy = np.copy(img) # Iterate through the bounding boxes for bbox in bboxes: # Draw a rectangle given bbox coordinates cv2.rectangle(imcopy, bbox[0], bbox[1], color, thick) # Return the image copy with boxes drawn return imcopy # Define a function that takes an image and a list of templates as inputs # then searches the image and returns the a list of bounding boxes # for matched templates def find_matches(img, template_list): # Make a copy of the image to draw on # Define an empty list to take bbox coords bbox_list = [] # Iterate through template list # Read in templates one by one # Use cv2.matchTemplate() to search the image # using whichever of the OpenCV search methods you prefer # Use cv2.minMaxLoc() to extract the location of the best match # Determine bounding box corners for the match # Return the list of bounding boxes return bbox_list bboxes = find_matches(image, templist) result = draw_boxes(image, bboxes) plt.imshow(result)
[ "remipr.chartier@gmail.com" ]
remipr.chartier@gmail.com
2e3b493d91df12e0a3baa4ceaf9e41d9bfec86ea
e8f99a162207cba82d4e0f969d7bcdb2b9d8b522
/dev_demo/tmp_file_demo2.py
39d04c5233f6ba6a8a18ad7a744ff59847bfbcf3
[]
no_license
TesterCC/Python3Scripts
edb5446278ebf13edb64336001081941ca27d67d
58be67e1ffc74ef50289a885aa4ad05f58e2c383
refs/heads/master
2023-08-30T21:16:38.328045
2023-08-17T11:23:08
2023-08-17T11:23:08
93,401,996
6
3
null
null
null
null
UTF-8
Python
false
false
1,176
py
# -*- coding: utf-8 -*- # @Time : 2022/8/28 # @Author : SecCodeCat # 创建临时文件 import tempfile ''' http://t.zoukankan.com/liuhui0308-p-12464003.html https://blog.csdn.net/weixin_37926734/article/details/123563067 https://docs.python.org/zh-tw/dev/library/tempfile.html ''' # fp = tempfile.TemporaryFile() # print(fp.name) # fp.write('两情若是久长时,'.encode('utf-8')) # fp.write('又岂在朝朝暮暮。'.encode('utf-8')) # # 将文件指针移到开始处,准备读取文件 # fp.seek(0) # print(fp.read().decode('utf-8')) # 输出刚才写入的内容 # # 关闭文件,该文件将会被自动删除 # fp.close() # 通过with语句创建临时文件,with会自动关闭临时文件 # with tempfile.TemporaryFile() as fp: with tempfile.NamedTemporaryFile() as fp: # 写入内容 fp.write(b'I Love Security, Python and Go!') # 将文件指针移到开始处,准备读取文件 fp.seek(0) # 读取文件内容 print(fp.read()) # b'I Love Python!' print("temp file name: ", fp.name) # 通过with语句创建临时目录 with tempfile.TemporaryDirectory() as tmpdirname: print('创建临时目录', tmpdirname)
[ "testerlyx@foxmail.com" ]
testerlyx@foxmail.com
80d9c3ff54ab696e355e64cbd68bbafd5b1c6aeb
08353419541e9f3be586a7e575585a55c98b976b
/src/pythonfinder/__init__.py
25fd2ed74524c8053dee4975d7dac772f70b6875
[ "MIT" ]
permissive
immerrr/pythonfinder
df9a923644c2fb5f91115aa54d2e12b5d50bff99
f072cf19cfebff73229a19e24bfffd378716d742
refs/heads/master
2020-04-01T00:56:27.668668
2018-10-11T09:45:12
2018-10-11T09:45:12
152,719,560
0
0
MIT
2018-10-12T08:35:20
2018-10-12T08:35:20
null
UTF-8
Python
false
false
284
py
from __future__ import print_function, absolute_import __version__ = '1.1.1.dev0' __all__ = ["Finder", "WindowsFinder", "SystemPath", "InvalidPythonVersion"] from .pythonfinder import Finder from .models import SystemPath, WindowsFinder from .exceptions import InvalidPythonVersion
[ "dan@danryan.co" ]
dan@danryan.co
80c4495cf6e71d58bc92bfe8640a7d25193c3d2a
487ce91881032c1de16e35ed8bc187d6034205f7
/codes/CodeJamCrawler/16_0_4_neat/16_0_4_coolbouy_codejam4.py
3d006c88b9ccd44321ca82d0760a06418d9ad546
[]
no_license
DaHuO/Supergraph
9cd26d8c5a081803015d93cf5f2674009e92ef7e
c88059dc66297af577ad2b8afa4e0ac0ad622915
refs/heads/master
2021-06-14T16:07:52.405091
2016-08-21T13:39:13
2016-08-21T13:39:13
49,829,508
2
0
null
2021-03-19T21:55:46
2016-01-17T18:23:00
Python
UTF-8
Python
false
false
302
py
# your code goes here import sys T=int(input()) num=T for k in range(0,num): K,C,S = map(int,sys.stdin.readline().split()) print("Case #",end="") print(k+1,end=": ") for i in range(0,K,1): if i!=K-1: temp=i*pow(K,C-1)+1 print(temp,end=" ") else: temp=i*pow(K,C-1)+1 print(temp)
[ "[dhuo@tcd.ie]" ]
[dhuo@tcd.ie]
716723e46591c1620beeabe94cce5492e920b866
651a296c8f45b5799781fd78a6b5329effe702a0
/legendre_product_polynomial/polynomial_sort.py
1ec578df1da3c92db966c42cc5e401a2b22fa2e5
[]
no_license
pdhhiep/Computation_using_Python
095d14370fe1a01a192d7e44fcc81a52655f652b
407ed29fddc267950e9860b8bbd1e038f0387c97
refs/heads/master
2021-05-29T12:35:12.630232
2015-06-27T01:05:17
2015-06-27T01:05:17
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,215
py
#!/usr/bin/env python def polynomial_sort ( o, c, e ): #*****************************************************************************80 # ## POLYNOMIAL_SORT sorts the information in a polynomial. # # Discussion: # # The coefficients C and exponents E are rearranged so that # the elements of E are in ascending order. # # Licensing: # # This code is distributed under the GNU LGPL license. # # Modified: # # 27 October 2014 # # Author: # # John Burkardt # # Parameters: # # Input, integer O, the "order" of the polynomial. # # Input, real C[O], the coefficients of the scaled polynomial. # # Input, integer E[O], the indices of the exponents of # the scaled polynomial. # # Output, real C[O], the coefficients of the sorted polynomial. # # Output, integer E[O], the indices of the exponents of # the sorted polynomial. # from i4vec_permute import i4vec_permute from i4vec_sort_heap_index_a import i4vec_sort_heap_index_a from r8vec_permute import r8vec_permute indx = i4vec_sort_heap_index_a ( o, e ) e = i4vec_permute ( o, indx, e ) c = r8vec_permute ( o, indx, c ) return c, e def polynomial_sort_test ( ): #*****************************************************************************80 # ## POLYNOMIAL_SORT_TEST tests POLYNOMIAL_SORT. # # Licensing: # # This code is distributed under the GNU LGPL license. # # Modified: # # 27 October 2014 # # Author: # # John Burkardt # from polynomial_print import polynomial_print import numpy as np m = 3 o = 6 c = np.array ( [ 0.0, 9.0, -5.0, - 13.0, 7.0, 11.0 ], dtype = np.float64 ) e = np.array ( [ 12, 4, 2, 33, 1, 5 ], dtype = np.int32 ) print '' print 'POLYNOMIAL_SORT_TEST' print ' POLYNOMIAL_SORT sorts a polynomial by exponent index.' print '' title = ' Unsorted polynomial:' polynomial_print ( m, o, c, e, title ) c, e = polynomial_sort ( o, c, e ) print '' title = ' Sorted polynomial:' polynomial_print ( m, o, c, e, title ) print '' print 'POLYNOMIAL_SORT_TEST:' print ' Normal end of execution.' return if ( __name__ == '__main__' ): from timestamp import timestamp timestamp ( ) polynomial_sort_test ( ) timestamp ( )
[ "siplukabir@gmail.com" ]
siplukabir@gmail.com
acda951565a8ca9395ca49144cb5fab259a066b7
f68afe06e4bbf3d523584852063e767e53441b2b
/Toontown/toontown/hood/BossbotHQAI.py
3b10ecb5ff983612b9311f78f57786a5d65e665e
[]
no_license
DankMickey/Toontown-Offline-Squirting-Flower-Modded-
eb18908e7a35a5f7fc95871814207858b94e2600
384754c6d97950468bb62ddd8961c564097673a9
refs/heads/master
2021-01-19T17:53:36.591832
2017-01-15T02:00:04
2017-01-15T02:00:04
34,639,744
1
1
null
null
null
null
UTF-8
Python
false
false
2,548
py
from toontown.building import DistributedBBElevatorAI from toontown.building import FADoorCodes from toontown.building.DistributedBoardingPartyAI import DistributedBoardingPartyAI from toontown.coghq import DistributedCogKartAI from toontown.hood import CogHQAI from toontown.suit import DistributedBossbotBossAI from toontown.suit import DistributedSuitPlannerAI from toontown.toonbase import ToontownGlobals class BossbotHQAI(CogHQAI.CogHQAI): def __init__(self, air): CogHQAI.CogHQAI.__init__( self, air, ToontownGlobals.BossbotHQ, ToontownGlobals.BossbotLobby, FADoorCodes.BB_DISGUISE_INCOMPLETE, DistributedBBElevatorAI.DistributedBBElevatorAI, DistributedBossbotBossAI.DistributedBossbotBossAI) self.cogKarts = [] self.courseBoardingParty = None self.suitPlanners = [] self.startup() def startup(self): CogHQAI.CogHQAI.startup(self) self.createCogKarts() if simbase.config.GetBool('want-boarding-groups', True): self.createCourseBoardingParty() if simbase.config.GetBool('want-suit-planners', True): self.createSuitPlanners() def createCogKarts(self): posList = ((-26.5658, 237.459, 0), (132.197, 227.845, 0), (-28.725, -235.706, 0)) hprList = ((-159, 0, 0), (172, 0, 0), (-21, 0, 0)) mins = ToontownGlobals.FactoryLaffMinimums[3] for cogCourse in xrange(len(posList)): pos = posList[cogCourse] hpr = hprList[cogCourse] cogKart = DistributedCogKartAI.DistributedCogKartAI( self.air, cogCourse, pos[0], pos[1], pos[2], hpr[0], hpr[1], hpr[2], self.air.countryClubMgr, minLaff=mins[cogCourse]) cogKart.generateWithRequired(self.zoneId) self.cogKarts.append(cogKart) def createCourseBoardingParty(self): cogKartIdList = [] for cogKart in self.cogKarts: cogKartIdList.append(cogKart.doId) self.courseBoardingParty = DistributedBoardingPartyAI(self.air, cogKartIdList, 4) self.courseBoardingParty.generateWithRequired(self.zoneId) def createSuitPlanners(self): suitPlanner = DistributedSuitPlannerAI.DistributedSuitPlannerAI(self.air, self.zoneId) suitPlanner.generateWithRequired(self.zoneId) suitPlanner.d_setZoneId(self.zoneId) suitPlanner.initTasks() self.suitPlanners.append(suitPlanner) self.air.suitPlanners[self.zoneId] = suitPlanner
[ "jareddarty96@gmail.com" ]
jareddarty96@gmail.com
26a8369da37cc8e23ab8a102609a8d5ead3ac030
bcf0e03ebd7e55588dcf48ab5d990534f8d9ab0c
/Hackerrank/Archive 2019/ginortS.py
ad58d6a3130006607742eb1cc5781973998896a6
[]
no_license
nsky80/competitive_programming
731321aaf42d9ae546f1d13bbb05215a1fbcfe45
9b0c0ffccf092d4d4bbf50cac1746f44dd977d57
refs/heads/master
2022-02-06T11:58:44.313635
2022-01-30T09:20:15
2022-01-30T09:20:15
199,516,791
1
2
null
2022-01-30T09:20:16
2019-07-29T19:43:17
Python
UTF-8
Python
false
false
558
py
# Sample Input # # Sorting1234 # Sample Output # # ginortS1324 import re if __name__ == "__main__": b = input() n = list(map(int, re.findall(r'[0-9]', b))) even_lst = [] odd_lst = [] for j in n: if j % 2 == 0: even_lst.append(j) else: odd_lst.append(j) s = re.findall(r'[a-z]', b) u = re.findall(r'[A-Z]', b) s.sort() u.sort() even_lst.sort() odd_lst.sort() res = "".join(["".join(s), "".join(u), "".join(list(map(str, sum([odd_lst, even_lst], []))))]) print(res)
[ "satishkumary80@gmail.com" ]
satishkumary80@gmail.com
37798f2901a1126fcf537c806d09590b0e0ad4ec
255e19ddc1bcde0d3d4fe70e01cec9bb724979c9
/dockerized-gists/2944123/snippet.py
66b8345386111bdad2c832d22d7126998d38f57a
[ "MIT" ]
permissive
gistable/gistable
26c1e909928ec463026811f69b61619b62f14721
665d39a2bd82543d5196555f0801ef8fd4a3ee48
refs/heads/master
2023-02-17T21:33:55.558398
2023-02-11T18:20:10
2023-02-11T18:20:10
119,861,038
76
19
null
2020-07-26T03:14:55
2018-02-01T16:19:24
Python
UTF-8
Python
false
false
236
py
# http://www.linuxhomenetworking.com/forums/showthread.php/1095-Linux-console-Colors-And-Other-Trick-s def printWarning(input): print("\033[31m%s\033[0m" % input) def funkyprint(input): print("\033[36m%s\033[0m" % input)
[ "gistshub@gmail.com" ]
gistshub@gmail.com
1f6b0db4d2ed04dd37abbe540807efd13d8e76bb
cb324391aa08aea41faeff9ae58f9ad81ef5ac30
/ssseg/cfgs/ocrnet/cfgs_ade20k_hrnetv2w18s.py
8d42127b4a14a73294e44d227751ca32c3c505a7
[ "MIT" ]
permissive
suyanzhou626/sssegmentation
841205baf6b1edb0f94c91fe347550886c3aea49
55084161c216e9c8c0a7bb1d154200ab81eb2522
refs/heads/main
2023-08-15T06:35:41.836575
2021-09-14T01:33:50
2021-09-14T01:33:50
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,424
py
'''define the config file for ade20k and hrnetv2w-18-small''' from .base_cfg import * # modify dataset config DATASET_CFG = DATASET_CFG.copy() DATASET_CFG['train'].update( { 'type': 'ade20k', 'rootdir': 'data/ADE20k', } ) DATASET_CFG['test'].update( { 'type': 'ade20k', 'rootdir': 'data/ADE20k', } ) # modify dataloader config DATALOADER_CFG = DATALOADER_CFG.copy() # modify optimizer config OPTIMIZER_CFG = OPTIMIZER_CFG.copy() OPTIMIZER_CFG.update( { 'max_epochs': 130 } ) # modify losses config LOSSES_CFG = LOSSES_CFG.copy() # modify model config MODEL_CFG = MODEL_CFG.copy() MODEL_CFG.update( { 'num_classes': 150, 'backbone': { 'type': 'hrnetv2_w18_small', 'series': 'hrnet', 'pretrained': True, 'selected_indices': (0, 0), }, } ) # modify inference config INFERENCE_CFG = INFERENCE_CFG.copy() # modify common config COMMON_CFG = COMMON_CFG.copy() COMMON_CFG['train'].update( { 'backupdir': 'ocrnet_hrnetv2w18s_ade20k_train', 'logfilepath': 'ocrnet_hrnetv2w18s_ade20k_train/train.log', } ) COMMON_CFG['test'].update( { 'backupdir': 'ocrnet_hrnetv2w18s_ade20k_test', 'logfilepath': 'ocrnet_hrnetv2w18s_ade20k_test/test.log', 'resultsavepath': 'ocrnet_hrnetv2w18s_ade20k_test/ocrnet_hrnetv2w18s_ade20k_results.pkl' } )
[ "1159254961@qq.com" ]
1159254961@qq.com
86d2ce44f553a827461a97282430593362029aea
0049d7959ff872e2ddf6ea3ce83b6c26512425a6
/django_demo_applications/djangoprojectsot/blog_project/blog/migrations/0002_comment.py
c5696599e3d51a93da698aeed82aa74f6c965f53
[]
no_license
srazor09/Django_projects
9806ab25d966af780cdabe652a1792220c7806a8
8d664ba4c9478bd93c8e5bcbcaf594e8ffe6ce93
refs/heads/master
2023-04-18T02:13:15.993393
2021-05-04T20:34:05
2021-05-04T20:34:05
364,379,605
0
0
null
null
null
null
UTF-8
Python
false
false
1,146
py
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-11-05 15:48 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('blog', '0001_initial'), ] operations = [ migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=32)), ('email', models.EmailField(max_length=254)), ('body', models.TextField()), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('active', models.BooleanField(default=True)), ('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.Post')), ], options={ 'ordering': ('-created',), }, ), ]
[ "sourabhaws09@gmail.com" ]
sourabhaws09@gmail.com
0dbb943458f2c86fae6f65b0ea378179e0299942
c5200b0cab496328cb5e37f2b3a51d5536ae3458
/CRUD_App/models.py
4f6d8deaa62dbe513d37f0aa7bcfbf58580e7e62
[]
no_license
Touhid7051/Toll-system-Django
b426047719f077cd1534d5a4d12abdc271ba1c6d
d5fbbc1af6e245f3814b95a1a2c5fa44ef5e1260
refs/heads/main
2023-01-25T04:19:02.453007
2020-12-07T11:37:47
2020-12-07T11:37:47
315,890,095
0
0
null
null
null
null
UTF-8
Python
false
false
1,180
py
from django.contrib.auth.models import User from django.db import models # Create your models here. class Student_Admission(models.Model): name = models.CharField(max_length=200) father_name = models.CharField(max_length=200) mother_name = models.CharField(max_length=200) number = models.IntegerField() email = models.EmailField(max_length=200) student_image = models.ImageField(upload_to="Student_Admission") created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) def __str__(self): return self.name def image_url(self): if self.student_image: return self.student_image.url else: return "" class User_Profile(models.Model): user = models.OneToOneField(User, null=True ,on_delete=models.CASCADE, blank=True) name = models.CharField(max_length=200, null=True , blank=True) phone = models.IntegerField( null=True , blank=True) email = models.EmailField(max_length=200, null=True , blank=True) date_created = models.DateTimeField(auto_now_add=True ,null=True ,blank=True) def __str__(self): return self.name
[ "touhidul15-7051@diu.edu.bd" ]
touhidul15-7051@diu.edu.bd
f56064abeee2161d2d22b4585280c087ffce1296
822d3cd484b54f0531fc205520c765a8321c0613
/pyFile/16/rabbitmq/2一对多默认交换/消费者.py
427ca4fbba7d68155fc35c5bf5cb55bbfaaec3a0
[]
no_license
mghxy123/learnPython
31d1cc18deeed5a89864ca0333fe488e0dbf08b4
00740e87d55a4dffd78773deaff8689485df31e8
refs/heads/master
2021-07-21T14:31:02.421788
2020-06-27T11:28:01
2020-06-27T11:28:01
187,751,182
0
0
null
2020-06-07T05:14:05
2019-05-21T02:58:35
Python
UTF-8
Python
false
false
1,236
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # File : 消费者.py # Author: HuXianyong # Date : 2019/7/28 15:07 import pika queue_name = 'hello' params = pika.URLParameters('amqp://hxy:hxy@192.168.18.181:5672/test') connection = pika.BlockingConnection(params) channel = connection.channel() # 队列声明 channel.queue_declare(queue = queue_name) # 声明一个Q,存在就是用,不存在就创建 def call_back(ch,method,properties,body): print(1,body) def call_back1(ch,method,properties,body): print(2,body) with connection: # 每一个消费者使用一个basic_consume channel.basic_consume(queue=queue_name, auto_ack=True, on_message_callback=call_back) ## 这里模拟一生产者对二个消费者的情况,可以是一个信道对应连个consume ## 也可以是开启两个进程两个信道对应两个consume # channel.basic_consume(queue=queue_name, # auto_ack=True, # on_message_callback=call_back1) print(' [*] Waiting for messages. To exit press CTRL+C') channel.start_consuming() # 启动所有消费,直到所有消费都结束,才能退出,它是阻塞的.
[ "mghxy123@163.com" ]
mghxy123@163.com
2ec3264972ba16783b405b5a1c54edbefc7bed13
cdc1705a813eeb17f8f17caff1aeb5b6a6f5e686
/project/scanner/models.py
d55b823671fa19c5bcbf0a2fa42de49e8e185a31
[ "MPL-2.0", "BSD-3-Clause" ]
permissive
mozilla/minion-frontend-old
13791dc7b0c6cf464a89fb44b002a7e84ff49929
a9af49f7e7c130820056f9ca4977d59161e5211a
refs/heads/master
2023-07-03T19:18:50.694581
2013-01-31T00:05:23
2013-01-31T00:05:23
7,925,548
0
1
BSD-3-Clause
2022-01-18T18:55:42
2013-01-30T22:43:10
JavaScript
UTF-8
Python
false
false
548
py
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.contrib.auth.models import User from django.db import models class Scan(models.Model): scan_id = models.CharField(max_length=100, primary_key = True) scan_creator = models.ForeignKey(User) scan_date = models.DateTimeField(auto_now_add=True) scan_url = models.URLField() scan_plan = models.CharField(max_length=100)
[ "stefan@arentz.ca" ]
stefan@arentz.ca
11092818351d13c04af03ec8b765666ea7587db3
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/420/usersdata/314/88743/submittedfiles/exe11.py
a8131def3845324b6d19c1007b1e118b03b47fa0
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
135
py
# -*- coding: utf-8 -*- n=8 soma=0 n=int(input('Digite um numero com 8 digitos: ')) while(t==8):
[ "rafael.mota@ufca.edu.br" ]
rafael.mota@ufca.edu.br
76b12f773db2d2363e82999e4bf1956601dd6424
27545601844006324ba9126089389fe2cd6aa742
/Liushui/garbage/analysis.py
08fd81d7d2f0d56e9c7a318d945d9ac07d736bae
[]
no_license
AllenLiuX/Aitai-Bill-Analysis-with-NN
8e6d7aef5b6bd43b620c95b7e13db44a634fcdb9
e69198ed4ce30033481f28fd948dd42780d8c75a
refs/heads/master
2023-03-13T19:52:07.244617
2021-03-01T15:54:38
2021-03-01T15:54:38
307,239,057
1
0
null
null
null
null
UTF-8
Python
false
false
7,694
py
# -- coding:UTF-8 -- import pandas as pd import numpy as np import time import Modules.mongodb as mongo import Modules.public_module as md class Analyzer: def __init__(self, name): self.name = name self.company_name = '' self.file_paths = [] self.dates = [] self.self_accounts = [] self.path2account = {} def get_paths(self): return self.file_paths def get_infos(self): forms = mongo.show_datas(self.name, {'type': 'form'}, 'mapping') if not forms: return False for form in forms: self.file_paths.append(form['path']) self.dates.append(form['dates']) self.self_accounts.append(form['account']) self.path2account[form['path']] = form['account'] self.company_name = forms[0]['company_name'] # make dates from str to int for d in range(len(self.dates)): self.dates[d][0] = int(self.dates[d][0]) self.dates[d][1] = int(self.dates[d][1]) print(self.file_paths) print(self.dates) print(self.self_accounts) return True """ @:param error_tolerance 是交易后算余额所能容忍的误差值。建议设置大于1 """ def balance_check(self, error_tolerance, file_path): cur_df = pd.read_excel(file_path) invalid = [] income = cur_df['流入金额'].values out = cur_df['流出金额'].values balance = cur_df['交易后余额'].values for i in range(1, len(income)): if (not np.isnan(income[i])) and income[i] != 0: if abs(balance[i-1] + income[i] - balance[i]) > error_tolerance: invalid.append(i) elif (not np.isnan(out[i])) and out[i] != 0: if abs(balance[i-1] - out[i] != balance[i]) > error_tolerance: invalid.append(i) # else: # invalid.append(i) # print(cur_df.loc[invalid]['交易日期'].values[:5]) invalid_dates = cur_df.loc[invalid]['交易日期'].values.tolist() # 提取所有不正确余额对应的日期 <class 'numpy.ndarray'> print('ratio of invalid balance: ', len(invalid_dates)/len(income)) return invalid_dates def benford_check(self, file_path): cur_df = pd.read_excel(file_path) income = cur_df['流入金额'].values out = cur_df['流出金额'].values # balance = cur_df['交易后余额'].values income2, out2, balance2 = [], [], [] # print(income) for i in range(len(income)): if not np.isnan(income[i]): income2.append(income[i]) if not np.isnan(out[i]): out2.append(out[i]) all = income2 + out2 res = md.benford(all) print('benford coefficient: ', res[0]) print('total samples: ', len(all)) return res[0], len(all) def info_missing_check(self, file_path): cur_df = pd.read_excel(file_path) abstract = cur_df['摘要'].values receiver_name = cur_df['对方名称'].values abstract_num = 0 receiver_num = 0 for i in range(len(abstract)): if type(abstract[i]) != str: abstract_num += 1 if type(receiver_name[i]) != str: receiver_num += 1 print('缺失的对方名称有:', receiver_num) print('缺失的摘要有:', abstract_num) return [abstract_num, receiver_num] def dates_check(self): merged_dates = md.merge_dates(self.dates) print('the merged dates are:', merged_dates) return merged_dates def inner_account_check(self): invalid_accounts = [] for path in self.file_paths: cur_df = pd.read_excel(path) accounts = [] # 对方账号 for index in cur_df.index: # 逐行找向自己公司转账的条目,并提取账号 if cur_df.loc[index, '对方名称'] == self.company_name: cur_account = cur_df.loc[index, '对方账号'] accounts.append(cur_account) if cur_account not in self.self_accounts: invalid_accounts.append(cur_account) print('missing accounts:', invalid_accounts) return invalid_accounts def cross_validation(self): invalid_accounts = [] account2df = {} # 先把账号下表格都打开 for path in self.file_paths: cur_df = pd.read_excel(path) account2df[self.path2account[path]] = cur_df account2trans = {} for account in self.self_accounts: cur_df = account2df[account] accounts = [] # 对方账号 for index in cur_df.index: # 逐行找向自己公司转账的条目,并提取账号 if cur_df.loc[index, '对方名称'] == self.company_name: cur_account = cur_df.loc[index, '对方账号'] accounts.append(cur_account) if cur_account not in self.self_accounts: invalid_accounts.append(cur_account) cur_trans = cur_df.loc[index] if account not in account2trans: account2trans[account] = [cur_trans] else: account2trans[account].append(cur_trans) unmatched_trans = [] for from_acc, trans in account2trans.items(): for tran in trans: tran_date = tran.loc['交易日期'] tran_in = tran.loc['流入金额'] tran_out = tran.loc['流出金额'] out_acc = tran.loc['对方账号'] if out_acc in account2df: to_df = account2df[out_acc] else: print('not existed account: ', out_acc) continue matched = False for index in cur_df.index: if cur_df.loc[index, '对方账号'] == from_acc and cur_df.loc[index, '交易日期'] == tran_date: if cur_df.loc[index, '流入金额'] == tran_out or cur_df.loc[index, '流出金额'] == tran_in: print('Get one matched transaction.', from_acc, out_acc) matched = True break if not matched: print('---- not matched!----\n', tran) unmatched_trans.append(tran) # print('missing accounts:', invalid_accounts) return unmatched_trans def run(name): analyst = Analyzer(name) infostatus = analyst.get_infos() if not infostatus: return 'invalid name' res = {} print('------ Reports ------') for path in analyst.get_paths(): cur_info = {} print('----- '+path+' ------') cur_info['balence_error_dates'] = analyst.balance_check(0, path) cur_info['benford'] = analyst.benford_check(path) infomiss = analyst.info_missing_check(path) cur_info['abstract_missing'] = infomiss[0] cur_info['receiver_missing'] = infomiss[1] res[path] = cur_info print('----- overall report -----') res['dates_coverage'] = analyst.dates_check() res['missing_accounts'] = analyst.inner_account_check() res['unmatched_trans'] = analyst.cross_validation() return res if __name__ == '__main__': start_time = time.time() res = run('tongpu') print(res) print("--- %s seconds ---" % (time.time() - start_time))
[ "13120200491@163.com" ]
13120200491@163.com
12d1d66a6c71abe3a1b39f8ed964b38b52784cc2
88a5d63f9e4843f6a607d8f9f3d95a9972e91253
/rhos_bootstrap/distribution.py
07e498ac7eb99ae00ed76bba86a32a2fe335c31a
[ "Apache-2.0" ]
permissive
strider/rhos-bootstrap
a416964cfb5ddc06dd1922406726fed92a180cba
ef3dd6b6ca102219e991acecc6b8476404deb323
refs/heads/main
2023-03-30T05:36:01.295870
2021-03-31T20:40:49
2021-03-31T20:40:49
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,872
py
# Copyright 2020 Red Hat, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import logging import os import subprocess import yaml from rhos_bootstrap import constants from rhos_bootstrap import exceptions from rhos_bootstrap.utils import repos from rhos_bootstrap.utils import dnf from rhos_bootstrap.utils import rhsm LOG = logging.getLogger(__name__) class DistributionInfo: """Distribution information""" def __init__( self, distro_id: str = None, distro_version_id: str = None, distro_name: str = None, ): """Distribution Information class""" _id, _version_id, _name = (None, None, None) if not distro_id or not distro_version_id or not distro_name: output = subprocess.Popen( "source /etc/os-release && " 'echo -e -n "$ID\n$VERSION_ID\n$NAME"', shell=True, stdout=subprocess.PIPE, stderr=open(os.devnull, "w"), executable="/bin/bash", universal_newlines=True, ).communicate() _id, _version_id, _name = output[0].split("\n") self._distro_id = distro_id or _id self._distro_version_id = distro_version_id or _version_id self._distro_name = distro_name or _name self._is_stream = "stream" in self._distro_name.lower() self._load_data() def _load_data(self): data_path = os.path.join(constants.RHOS_VERSIONS_DIR, f"{self.distro_id}.yaml") if not os.path.exists(data_path): LOG.error("%s does not exist", data_path) raise exceptions.DistroNotSupported(self.distro_id) with open(data_path, "r") as data: self._distro_data = yaml.safe_load(data.read()) @property def distro_data(self): return self._distro_data @property def distro_id(self): return self._distro_id @property def distro_version_id(self): return self._distro_version_id @property def distro_major_version_id(self): return self._distro_version_id.split(".")[0] @property def distro_minor_version_id(self): if len(self._distro_version_id.split(".")) < 2: # CentOS Stream doesn't have a minor version return "" return self._distro_version_id.split(".")[1] @property def is_stream(self): return self._is_stream @property def distro_name(self): return self._distro_name @property def distros(self): return self._distro_data.get("distros", {}) @property def versions(self): return self._distro_data.get("versions", {}) @property def distro_normalized_id(self): ver = [ self.distro_id, self.distro_major_version_id, ] if self.distro_minor_version_id: # handle period before minor version if exists ver.append("." + self.distro_minor_version_id) if self.is_stream: ver.append("-stream") return "".join(ver) def __str__(self): return self.distro_normalized_id def validate_distro(self, version) -> bool: if version not in self.versions: LOG.warning( "%s not in defined in release information", version, ) return False # make sure distro is in the listed distributions distros = self.versions[version].get("distros", []) if self.distro_normalized_id not in distros: LOG.warning( "%s not in %s", self.distro_normalized_id, distros, ) return False # make sure subscription manager is at least registered and base os locked if "rhel" in self.distro_id: submgr = rhsm.SubscriptionManager.instance() submgr.status() _, out, _ = submgr.release() ver = f"{self.distro_major_version_id}.{self.distro_minor_version_id}" # The output will be "Release not set" or "Release: X.Y" if "not set" in out or f": {ver}" not in out: LOG.warning( "System not currently locked to the correct release. " "Please run subscription-manager release --set=%s", ver, ) raise exceptions.SubscriptionManagerConfigError() return True def get_version(self, version) -> dict: if version not in self.versions: LOG.error("%s is not available in version list", version) raise exceptions.VersionNotSupported(version) return self.versions.get(version, {}) def construct_repo(self, repo_type, version, name): # RHEL only supports rhsm if "rhel" in self.distro_id: return repos.RhsmRepo(name) if "centos" in repo_type: return repos.TripleoCentosRepo(repo_type, name) if "ceph" in repo_type: return repos.TripleoCephRepo(self.distro_normalized_id, name) if "delorean" in repo_type: dlrn_dist = f"{self.distro_id}{self.distro_major_version_id}" return repos.TripleoDeloreanRepos(dlrn_dist, version, name) raise exceptions.RepositoryNotSupported(repo_type) def get_repos(self, version, enable_ceph: bool = False) -> list: r = [] dist = self.distro_normalized_id version_data = self.get_version(version) if dist not in version_data["repos"]: LOG.warning("%s missing from version repos", dist) # handle distro specific repos for name in version_data["repos"].get(dist, []): r.append(self.construct_repo(dist, version, name)) # handle other software related repos for repo in constants.SUPPORTED_REPOS: for name in version_data["repos"].get(repo, []): if not enable_ceph and "ceph" in name: continue r.append(self.construct_repo(dist, version, name)) return r def get_modules(self, version) -> list: r = [] module_data = self.get_version(version).get("modules", {}) for item in module_data.items(): r.append(dnf.DnfModule(*item)) return r
[ "aschultz@redhat.com" ]
aschultz@redhat.com
ffd1407a0f604f8cb4df10becc9e15b94189fb82
19b0bae543c1effc1bca1605aefe41f4903ed401
/python/l1TRun3Ntuplizer_cfi.py
3b9450f6004c19b729f057a7c955c9acc0147cfb
[]
no_license
skkwan/boostedTauRun3Ntuplizer
2bd88b3a943509c1cccc5b93ad2d968e34f33b3a
d7bb8445b6847aaa2a0f17e9c432e91b066f8a08
refs/heads/main
2023-03-14T09:14:22.748535
2021-03-04T21:38:08
2021-03-04T21:38:08
344,615,799
0
0
null
null
null
null
UTF-8
Python
false
false
1,799
py
import FWCore.ParameterSet.Config as cms l1NtupleProducer = cms.EDAnalyzer("Run3Ntuplizer", ecalDigis = cms.InputTag( 'l1tCaloLayer1Digis'), hcalDigis = cms.InputTag( 'l1tCaloLayer1Digis'), recoJets = cms.InputTag("slimmedJets"), recoJetsAK8 = cms.InputTag("slimmedJetsAK8"), miniTaus = cms.InputTag("slimmedTaus"), genParticles = cms.InputTag("genParticles", "", "HLT"), recoPtCut = cms.double(10), UCTRegion = cms.InputTag('uct2016EmulatorDigis'), l1UCTCentralJets = cms.InputTag("uct2016EmulatorDigis","Central"), l1UCTForwardJets = cms.InputTag("uct2016EmulatorDigis","Forward"), stage2Taus = cms.InputTag("l1extraParticles","Tau"), stage2IsoTaus = cms.InputTag("l1extraParticles","IsoTau"), stage2DigisTaus = cms.InputTag("caloStage2Digis", "Tau"), gmtStage2Digis = cms.InputTag("simGmtStage2Digis"), genJets = cms.InputTag("slimmedGenJets"), isData = cms.bool(True), folderName = cms.untracked.string("Stage3Regions") ) #BXVector<l1t::Muon> "simGmtStage2Digis" "" "HLT"
[ "ojalvo@wisc.edu" ]
ojalvo@wisc.edu
48389b6fdbf2094b93689fa804fe01a7d8ab1541
853d4cec42071b76a80be38c58ffe0fbf9b9dc34
/venv/Lib/site-packages/joblib/externals/cloudpickle/compat.py
5c9b64fa44e2b5c465aaace68f869fedec8d04c4
[]
no_license
msainTesting/TwitterAnalysis
5e1646dbf40badf887a86e125ef30a9edaa622a4
b1204346508ba3e3922a52380ead5a8f7079726b
refs/heads/main
2023-08-28T08:29:28.924620
2021-11-04T12:36:30
2021-11-04T12:36:30
424,242,582
0
0
null
null
null
null
UTF-8
Python
false
false
367
py
import sys if sys.version_info < (3, 8): try: import pickle5 as pickle # noqa: F401 from pickle5 import Pickler # noqa: F401 except ImportError: import pickle # noqa: F401 from pickle import _Pickler as Pickler # noqa: F401 else: import pickle # noqa: F401 from _pickle import Pickler # noqa: F401
[ "msaineti@icloud.com" ]
msaineti@icloud.com
589542efedef49898e64d4db77cf1f9299ac88b1
bf99b1b14e9ca1ad40645a7423f23ef32f4a62e6
/AtCoder/abc/159f_2.py
049a6b840d367d62419a04a1c193478f8eb82049
[]
no_license
y-oksaku/Competitive-Programming
3f9c1953956d1d1dfbf46d5a87b56550ff3ab3db
a3ff52f538329bed034d3008e051f30442aaadae
refs/heads/master
2021-06-11T16:14:12.635947
2021-05-04T08:18:35
2021-05-04T08:18:35
188,639,647
0
0
null
null
null
null
UTF-8
Python
false
false
258
py
import numpy as np N, S = map(int, input().split()) A = list(map(int, input().split())) MOD = 998244353 ans = 0 dp = np.zeros(S + 1, dtype=np.int64) for a in A: dp[0] += 1 dp[a:] += dp[: -a].copy() dp %= MOD ans += dp[S] print(ans % MOD)
[ "y.oksaku@stu.kanazawa-u.ac.jp" ]
y.oksaku@stu.kanazawa-u.ac.jp
faecb62da3b6501321d2d5f7f697bfab19b9eac1
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02632/s582472876.py
7e39caf5135fbed12b3d6f7935c28c146a39507d
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
481
py
from sys import stdin input = stdin.readline def nCr(n,r): return fact[n]*(inv[n-r]%M)*(inv[r]%M) M = 1000000007 k = int(input()) s = len(input().strip()) t = k+s fact = [1] p25 = [1] for i in range(1,t+1): fact += [(fact[-1] * i)%M] if i<=k: p25 += [(p25[-1] * 25) % M] inv = [1]*(t+1) inv[t] = pow(fact[t], M-2, M) for i in range(t-1, -1, -1): inv[i] = inv[i+1] * (i+1) % M res = 0 for i in range(k+1): res+=nCr(t,t-i)*p25[i] res%=M print(res)
[ "66529651+Aastha2104@users.noreply.github.com" ]
66529651+Aastha2104@users.noreply.github.com
d10d4cb19fb71334d599eb552493f71b6dfcfe64
1714aec212ce8132239dd94d47f1b5dee6986d2b
/nutrition/handlers/base.py
4ec694f354a11914916a6824695dd95d54f95a12
[ "BSD-3-Clause" ]
permissive
caktus/rapidsms-nutrition
7030c45638f1931ff60f5ad8ea439fe48948bc30
b8cbce5bf0a8d9b0f725bf0ec4d7a8e46e5f5be5
refs/heads/master
2020-06-04T05:57:10.561254
2013-04-03T13:43:17
2013-04-04T08:33:08
8,168,464
2
1
null
2013-04-04T13:54:39
2013-02-12T22:07:20
Python
UTF-8
Python
false
false
3,498
py
from __future__ import unicode_literals import logging import re from django.utils.translation import ugettext_lazy as _ __all__ = ['NutritionHandlerBase'] logger = logging.getLogger(__name__) class NutritionHandlerBase(object): prefix = 'nutrition' # Common prefix for all Nutrition messages. keyword = None form_class = None # Form used to process data. _common_messages = { # Messages common to most or all Nutrition handlers. 'form_error': _('Sorry, an error occurred while processing your ' 'message: {message}'), 'error': _('Sorry, an unexpected error occurred while processing your ' 'message. Please contact your administrator if this ' 'continues to occur.'), } _messages = {} # Handler-specific messages. @classmethod def _colloquial_keyword(cls): """If the class has multiple keyword choices, return the first.""" return cls.keyword.split('|')[0] def _get_form(self, data): return self.form_class(data, raw_text=self.raw_text, connection=self.connection) @classmethod def _keyword(cls): """Override the KeywordHandler method to also require prefix.""" args = (cls.prefix, cls.keyword) pattern = r'^\s*(?:%s)\s*(?:%s)(?:[\s,;:]+(.+))?$' % args return re.compile(pattern, re.IGNORECASE) def _parse(self, raw_text): """Tokenize message text and return parsed data. Raises ValueError if the message cannot be parsed. """ raise NotImplemented('Subclass must define _parse method.') def _process(self, parsed): """Validate and act upon parsed message data.""" raise NotImplemented('Subclass must define _process method.') def _respond(self, msg_type, **kwargs): """Shortcut to retrieve and format a message.""" data = { # Some common data. 'prefix': self.prefix.upper(), 'keyword': self._colloquial_keyword().upper(), } data.update(**kwargs) if msg_type in self._messages: return self.respond(self._messages[msg_type].format(**data)) if msg_type in self._common_messages: return self.respond(self._common_messages[msg_type].format(**data)) raise KeyError('Message type {0} not found.'.format(msg_type)) def handle(self, text): """ Entry point of the handler. This method takes care of a few common tasks then calls the subclass-specific process method. """ self.raw_text = self.msg.text # The reporter will be determined from the message connection. self.connection = self.msg.connection logger.debug('Received {keyword} message from {connection}.'.format( keyword=self._colloquial_keyword(), connection=self.connection)) # Parse the message into its components. try: parsed = self._parse(text) except ValueError as e: logger.exception('An exception occurred while parsing the message') self._respond('format_error') return else: data = ', '.join([': '.join((k, v)) for k, v in parsed.items()]) logger.debug('Parsed {keyword} data: {data}'.format( keyword=self._colloquial_keyword(), data=data)) self._process(parsed) # Subclasses must process parsed data. def help(self): self._respond('help')
[ "rebecca@caktusgroup.com" ]
rebecca@caktusgroup.com
8bf454ff2505eacbbc560edcb0a50c187edc4223
077c91b9d5cb1a6a724da47067483c622ce64be6
/load_balancer_fuzzer_mcs/interreplay_51_l_5/replay_config.py
0fe6e986e1ae1294d9acaed7ea131f16807b2ac6
[]
no_license
Spencerx/experiments
0edd16398725f6fd9365ddbb1b773942e4878369
aaa98b0f67b0d0c0c826b8a1565916bf97ae3179
refs/heads/master
2020-04-03T10:11:40.671606
2014-06-11T23:55:11
2014-06-11T23:55:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,393
py
from config.experiment_config_lib import ControllerConfig from sts.topology import * from sts.control_flow import Replayer from sts.simulation_state import SimulationConfig from sts.input_traces.input_logger import InputLogger simulation_config = SimulationConfig(controller_configs=[ControllerConfig(start_cmd='./pox.py --verbose --unthreaded-sh misc.ip_loadbalancer --ip=123.123.1.3 --servers=123.123.2.3,123.123.1.3 sts.util.socket_mux.pox_monkeypatcher openflow.discovery openflow.of_01 --address=__address__ --port=__port__', label='c1', address='127.0.0.1', cwd='dart_pox')], topology_class=MeshTopology, topology_params="num_switches=3", patch_panel_class=BufferedPatchPanel, multiplex_sockets=True, kill_controllers_on_exit=True) control_flow = Replayer(simulation_config, "experiments/load_balancer_fuzzer_mcs/interreplay_51_l_5/events.trace", input_logger=InputLogger(), wait_on_deterministic_values=False, allow_unexpected_messages=False, delay_flow_mods=False, default_dp_permit=False, pass_through_whitelisted_messages=False, invariant_check_name='check_for_ofp_error', bug_signature="ERROR_SENT")
[ "jefflai2@gmail.com" ]
jefflai2@gmail.com
2ed0662fb580440d9985f67a8cf23f795d1d85a2
5a5e0a01efa6ef0961992e53bb4f64840f93150b
/RegressionVisualizer/RegressionVisualizer/settings.py
760cbb64ec65faed0bc8c07e525ddb8b3182923f
[]
no_license
scotteskridge/RegressionApp
ed059e3205ab54061129779404345b55c0dee75c
68932a9c94235a1e8bd6cd71a765b545f2266189
refs/heads/master
2021-01-19T20:48:13.495541
2017-04-25T02:39:49
2017-04-25T02:39:56
88,555,025
0
0
null
null
null
null
UTF-8
Python
false
false
3,330
py
""" Django settings for RegressionVisualizer project. Generated by 'django-admin startproject' using Django 1.10.6. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'ez!hio8_m#71hdjl@#4efwg1(zgy!n7qtmk(ctst)9a$7ae+f=' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', u'localhost'] # Application definition INSTALLED_APPS = [ 'apps.regressions', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'jquery', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'RegressionVisualizer.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'RegressionVisualizer.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, "static"), ) STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn")
[ "scott.eskridge@gmail.com" ]
scott.eskridge@gmail.com
ffbc2549ab313f9a12e700c6a5e08d7cd3342fc4
fa380310206f7e0c015be610dd9f74f7ba62e8f9
/day2/part2.py
e358436c06caa6a400dee3ff4084cecc5d3f9b12
[ "MIT" ]
permissive
sharkbound/advent_of_code_2016
71c666ce6f7e7e816dbb6e76795650ecd9f1cb48
e655974b2dea422af4ec1debad296ee6c22d690a
refs/heads/master
2020-12-05T03:12:53.883458
2020-01-13T01:24:25
2020-01-13T01:24:25
231,993,705
0
0
null
null
null
null
UTF-8
Python
false
false
774
py
import numpy as np from read import read_lines move_offsets = { 'L': (-1, 0), 'R': (1, 0), 'U': (0, -1), 'D': (0, 1), } keys = np.array([ list(' 1 '), list(' 234 '), list('56789'), list(' ABC '), list(' D '), ]) def is_valid(x, y): return all(i in range(5) for i in (x, y)) and keys[y, x] != ' ' def solve(lines): ans = [] x, y = 0, 2 for line in lines: for char in line: if is_valid(x + (xoff := move_offsets[char][0]), y): x += xoff if is_valid(x, y + (yoff := move_offsets[char][1])): y += yoff ans.append(keys[y, x]) print(''.join(ans)) def main(): data = read_lines() solve(data) if __name__ == '__main__': main()
[ "laptopblaster@gmail.com" ]
laptopblaster@gmail.com
7d1fab8330262b4f73ac33d7b013f395a6508bee
3312b5066954cbf96c79ef3e1f3d582b31ebc5ae
/colegend/academy/models.py
54bd2e62cf15f774ccb6431e331552ad3aa95654
[]
no_license
Eraldo/colegend
d3f3c2c37f3bade7a3a1e10d307d49db225fe7f5
2e7b9d27887d7663b8d0d1930c2397c98e9fa1fc
refs/heads/master
2021-01-16T23:32:09.245967
2020-10-07T12:12:14
2020-10-07T12:12:14
21,119,074
4
2
null
null
null
null
UTF-8
Python
false
false
6,199
py
from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models from django.db.models import Avg, Q from django.db.models.signals import post_save, post_delete from django.dispatch import receiver from django.shortcuts import redirect from wagtail.core.models import Page from colegend.core.fields import MarkdownField from colegend.core.models import TimeStampedBase, OwnedBase from django.utils.translation import ugettext_lazy as _ class BookTag(models.Model): """ A django model representing a book's text-tag. """ name = models.CharField( _('name'), max_length=255, unique=True ) class Meta: verbose_name = _('Tag') verbose_name_plural = _('Tags') ordering = ['name'] default_related_name = 'tags' def __str__(self): return self.name class BookQuerySet(models.QuerySet): def search(self, query): queryset = self.filter(Q(name__icontains=query) | Q(author__icontains=query) | Q(content__icontains=query)) return queryset class Book(TimeStampedBase): name = models.CharField( _('name'), max_length=255, unique=True ) author = models.CharField( _('author'), max_length=255, ) image_url = models.URLField( _('image url'), max_length=1000, blank=True ) url = models.URLField( _('url'), max_length=1000, blank=True ) content = MarkdownField( blank=True ) public = models.BooleanField( default=False ) featured = models.BooleanField( default=False ) tags = models.ManyToManyField( to=BookTag, blank=True, ) notes = models.TextField( verbose_name=_("notes"), help_text=_("Staff notes."), blank=True ) rating = models.FloatField( _('rating'), default=0 ) def calculate_rating(self): rating = self.book_reviews.aggregate(Avg('rating')).get('rating__avg') return round(rating, 2) if rating else 0 def update_rating(self): self.rating = self.calculate_rating() @property def area_ratings(self): return self.book_reviews.aggregate( area_1=Avg('area_1'), area_2=Avg('area_2'), area_3=Avg('area_3'), area_4=Avg('area_4'), area_5=Avg('area_5'), area_6=Avg('area_6'), area_7=Avg('area_7'), ) objects = BookQuerySet.as_manager() class Meta: default_related_name = 'books' ordering = ['name'] def __str__(self): return self.name def save(self, *args, **kwargs): # Making sure only one book can be featured. if self.featured: try: temp = Book.objects.get(featured=True) if self != temp: temp.featured = False temp.save() except Book.DoesNotExist: pass return super().save(*args, **kwargs) class BookReview(OwnedBase, TimeStampedBase): book = models.ForeignKey( to=Book, on_delete=models.CASCADE ) rating = models.PositiveSmallIntegerField( _('rating'), validators=[MinValueValidator(1), MaxValueValidator(5)] ) area_1 = models.PositiveSmallIntegerField( _('area 1'), validators=[MaxValueValidator(100)] ) area_2 = models.PositiveSmallIntegerField( _('area 2'), validators=[MaxValueValidator(100)] ) area_3 = models.PositiveSmallIntegerField( _('area 3'), validators=[MaxValueValidator(100)] ) area_4 = models.PositiveSmallIntegerField( _('area 4'), validators=[MaxValueValidator(100)] ) area_5 = models.PositiveSmallIntegerField( _('area 5'), validators=[MaxValueValidator(100)] ) area_6 = models.PositiveSmallIntegerField( _('area 6'), validators=[MaxValueValidator(100)] ) area_7 = models.PositiveSmallIntegerField( _('area 7'), validators=[MaxValueValidator(100)] ) content = MarkdownField() class Meta: default_related_name = 'book_reviews' unique_together = ['owner', 'book'] def __str__(self): return 'Book review' @receiver(post_save, sender=BookReview) @receiver(post_delete, sender=BookReview) def reset_book_rating(sender, instance, *args, **kwargs): instance.book.update_rating() instance.book.save() class AcademyPage(Page): template = 'academy/base.html' def serve(self, request, *args, **kwargs): return redirect(self.get_first_child().url) parent_page_types = ['cms.RootPage'] subpage_types = ['CoursesPage', 'BookClubPage', 'QuizzesPage', 'resources.ResourcesPage'] class CoursesPage(Page): template = 'academy/courses.html' parent_page_types = ['AcademyPage'] subpage_types = [] def get_context(self, request, *args, **kwargs): context = super().get_context(request, *args, **kwargs) return context def __str__(self): return self.title class BookClubPage(Page): template = 'academy/book_club.html' parent_page_types = ['AcademyPage'] subpage_types = [] def get_context(self, request, *args, **kwargs): context = super().get_context(request, *args, **kwargs) return context def __str__(self): return self.title class QuizzesPage(Page): template = 'academy/quizzes.html' parent_page_types = ['AcademyPage'] subpage_types = [] def get_context(self, request, *args, **kwargs): context = super().get_context(request, *args, **kwargs) return context def __str__(self): return self.title # class ResourcesPage(Page): # template = 'academy/resources.html' # # parent_page_types = ['AcademyPage'] # subpage_types = [] # # def get_context(self, request, *args, **kwargs): # context = super().get_context(request, *args, **kwargs) # return context # # def __str__(self): # return self.title
[ "eraldo@eraldo.org" ]
eraldo@eraldo.org
306b962664a9d20947605344247f11e3ae9b082a
674f5dde693f1a60e4480e5b66fba8f24a9cb95d
/armulator/armv6/opcodes/concrete/orn_immediate_t1.py
106ef9da7793ef683dc7f799764d54ac9acacd72
[ "MIT" ]
permissive
matan1008/armulator
75211c18ebc9cd9d33a02890e76fc649483c3aad
44f4275ab1cafff3cf7a1b760bff7f139dfffb07
refs/heads/master
2023-08-17T14:40:52.793120
2023-08-08T04:57:02
2023-08-08T04:57:02
91,716,042
29
7
MIT
2023-08-08T04:55:59
2017-05-18T16:37:55
Python
UTF-8
Python
false
false
804
py
from armulator.armv6.bits_ops import substring, bit_at, chain from armulator.armv6.opcodes.abstract_opcodes.orn_immediate import OrnImmediate from armulator.armv6.shift import thumb_expand_imm_c class OrnImmediateT1(OrnImmediate): @staticmethod def from_bitarray(instr, processor): imm8 = substring(instr, 7, 0) rd = substring(instr, 11, 8) imm3 = substring(instr, 14, 12) rn = substring(instr, 19, 16) setflags = bit_at(instr, 20) i = bit_at(instr, 26) imm32, carry = thumb_expand_imm_c(chain(i, chain(imm3, imm8, 8), 11), processor.registers.cpsr.c) if rd in (13, 15) or rn == 13: print('unpredictable') else: return OrnImmediateT1(instr, setflags=setflags, d=rd, n=rn, imm32=imm32, carry=carry)
[ "matan1008@gmail.com" ]
matan1008@gmail.com
f68283bb9341a87ddbd9066b3c3b1132379fc85c
ec0b8bfe19b03e9c3bb13d9cfa9bd328fb9ca3f1
/res/packages/scripts/scripts/client/gui/Scaleform/daapi/view/lobby/boosters/BoostersPanelComponent.py
0af565b33b7e60018dbab49813b2dd2d499360f8
[]
no_license
webiumsk/WOT-0.9.20.0
de3d7441c5d442f085c47a89fa58a83f1cd783f2
811cb4e1bca271372a1d837a268b6e0e915368bc
refs/heads/master
2021-01-20T22:11:45.505844
2017-08-29T20:11:38
2017-08-29T20:11:38
101,803,045
0
1
null
null
null
null
WINDOWS-1250
Python
false
false
5,913
py
# 2017.08.29 21:46:41 Střední Evropa (letní čas) # Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/boosters/BoostersPanelComponent.py from gui.ClientUpdateManager import g_clientUpdateManager from gui.Scaleform.daapi.view.meta.SlotsPanelMeta import SlotsPanelMeta from gui.Scaleform.genConsts.BOOSTER_CONSTANTS import BOOSTER_CONSTANTS from gui.Scaleform.genConsts.TOOLTIPS_CONSTANTS import TOOLTIPS_CONSTANTS from gui.Scaleform.locale.RES_ICONS import RES_ICONS from gui.Scaleform.locale.TOOLTIPS import TOOLTIPS from gui.goodies.goodie_items import MAX_ACTIVE_BOOSTERS_COUNT from gui.shared.utils.functions import makeTooltip from gui.shared.utils.requesters.ItemsRequester import REQ_CRITERIA from helpers import dependency from skeletons.gui.game_control import IBoostersController from skeletons.gui.goodies import IGoodiesCache _GUI_SLOTS_PROPS = {'slotsCount': MAX_ACTIVE_BOOSTERS_COUNT, 'slotWidth': 50, 'paddings': 64, 'groupPadding': 18, 'ySlotPosition': 5, 'offsetSlot': 13, 'useOnlyLeftBtn': True} ADD_BOOSTER_ID = 'add' _ADD_AVAILABLE_BOOSTER_ID = 'addAvailable' _EMPTY_BOOSTER_ID = 'empty' class BoostersPanelComponent(SlotsPanelMeta): boosters = dependency.descriptor(IBoostersController) goodiesCache = dependency.descriptor(IGoodiesCache) def __init__(self): super(BoostersPanelComponent, self).__init__() self._isPanelInactive = True self._wasPopulated = False self._slotsMap = {} def setSettings(self, isPanelInactive = True): self._isPanelInactive = isPanelInactive if self._wasPopulated: self._buildList() def getBoosterSlotID(self, idx): return self._slotsMap.get(int(idx), None) def getSlotTooltipBody(self, slotIdx): boosterID = self._slotsMap.get(int(slotIdx), None) tooltip = '' if boosterID in (ADD_BOOSTER_ID, _ADD_AVAILABLE_BOOSTER_ID): if not self._isPanelInactive: body = TOOLTIPS.BOOSTERSPANEL_OPENBOOSTERSWINDOW_BODY tooltip = makeTooltip(None, body) else: tooltip = TOOLTIPS_CONSTANTS.BOOSTERS_BOOSTER_INFO return tooltip def _populate(self): super(BoostersPanelComponent, self)._populate() g_clientUpdateManager.addCallbacks({'goodies': self.__onUpdateGoodies}) self.boosters.onBoosterChangeNotify += self.__onUpdateGoodies self._buildList() self._wasPopulated = True def _dispose(self): self._isPanelInactive = None self._wasPopulated = None self._slotsMap = None self.boosters.onBoosterChangeNotify -= self.__onUpdateGoodies g_clientUpdateManager.removeObjectCallbacks(self) super(BoostersPanelComponent, self)._dispose() return def __getAvailableBoosters(self): criteria = REQ_CRITERIA.BOOSTER.IS_READY_TO_ACTIVATE return self.goodiesCache.getBoosters(criteria=criteria) def _buildList(self): result = [] activeBoosters = self.goodiesCache.getBoosters(criteria=REQ_CRITERIA.BOOSTER.ACTIVE) activeBoostersList = sorted(activeBoosters.values(), key=lambda b: b.getUsageLeftTime(), reverse=True) availableBoostersCount = len(self.__getAvailableBoosters()) activeBoostersCount = min(len(activeBoostersList), MAX_ACTIVE_BOOSTERS_COUNT) freeSlotsCount = MAX_ACTIVE_BOOSTERS_COUNT - min(activeBoostersCount, MAX_ACTIVE_BOOSTERS_COUNT) addBoostersSlotsCount = min(freeSlotsCount, availableBoostersCount) self._slotsMap = {} for idx in range(0, activeBoostersCount): booster = activeBoostersList[idx] self._slotsMap[idx] = booster.boosterID result.append(self.__makeBoosterVO(idx, booster)) icon = '' if not self._isPanelInactive: icon = RES_ICONS.MAPS_ICONS_ARTEFACT_EMPTYORDER addAndActiveBoostersCount = activeBoostersCount + addBoostersSlotsCount for idx in range(activeBoostersCount, MAX_ACTIVE_BOOSTERS_COUNT): self._slotsMap[idx], slotLinkage = self.getEmptySlotParams(idx, addAndActiveBoostersCount) result.append(self.__makeEmptyBoosterVO(idx, slotLinkage, icon)) self.as_setPanelPropsS(_GUI_SLOTS_PROPS) self.as_setSlotsS(result) def getEmptySlotParams(self, idx, addAndActiveBoostersCount): if idx < addAndActiveBoostersCount and not self._isPanelInactive: slotLinkage = BOOSTER_CONSTANTS.SLOT_ADD_UI emptyBoosterID = _ADD_AVAILABLE_BOOSTER_ID else: slotLinkage = BOOSTER_CONSTANTS.SLOT_UI emptyBoosterID = ADD_BOOSTER_ID return (emptyBoosterID, slotLinkage) def __makeBoosterVO(self, idx, booster): return {'boosterId': booster.boosterID, 'id': str(idx), 'icon': booster.icon, 'inCooldown': booster.inCooldown, 'cooldownPercent': booster.getCooldownAsPercent(), 'leftTime': booster.getUsageLeftTime(), 'leftTimeText': booster.getShortLeftTimeStr(), 'showLeftTime': True, 'isDischarging': True, 'isInactive': self._isPanelInactive, 'isEmpty': False, 'qualityIconSrc': booster.getQualityIcon(), 'slotLinkage': BOOSTER_CONSTANTS.SLOT_UI} def __makeEmptyBoosterVO(self, idx, slotLinkage, icon): return {'id': str(idx), 'isInactive': self._isPanelInactive, 'isEmpty': True, 'icon': icon, 'slotLinkage': slotLinkage, 'showLeftTime': False} def __onUpdateGoodies(self, *args): self._buildList() # okay decompyling c:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\daapi\view\lobby\boosters\BoostersPanelComponent.pyc # decompiled 1 files: 1 okay, 0 failed, 0 verify failed # 2017.08.29 21:46:41 Střední Evropa (letní čas)
[ "info@webium.sk" ]
info@webium.sk
b906c1f40751b8ff47c98535098a44742864a010
fd625e2ea155455c96261c8656a51be22fe420c8
/Python/euler020.py
c7a2ebd25c8bf108eaf07104c5248867c84a68d6
[ "MIT" ]
permissive
AnuragAnalog/project_euler
9b84a6aa0061ad4582c8d0059c3c1eaddd844fd2
8babbefbd5b7008ad24509f24a9d5f50ba208f45
refs/heads/master
2021-12-12T12:07:29.338791
2021-11-01T04:26:44
2021-11-01T04:26:44
210,749,964
6
16
MIT
2021-11-01T04:26:45
2019-09-25T03:44:37
Python
UTF-8
Python
false
false
519
py
#!/usr/bin/python3 """ n! means n × (n − 1) × ... × 3 × 2 × 1 For example, 10! = 10 × 9 × ... × 3 × 2 × 1 = 3628800, and the sum of the digits in the number 10! is 3 + 6 + 2 + 8 + 8 + 0 + 0 = 27. Find the sum of the digits in the number 100! """ def factorial(n: int) -> int: if n == 1: return 1 else: return n * factorial(n-1) def euler20() -> int: fac100 = factorial(100) tot = sum(list(map(int, list(str(fac100))))) return tot total = euler20() print(total)
[ "anurag.peddi1998@gmail.com" ]
anurag.peddi1998@gmail.com
1a02f71bb63f32d51a0da5cc51f580f610b2c315
f0d713996eb095bcdc701f3fab0a8110b8541cbb
/hPaBJ7KJZ8fZtjJgL_0.py
0c9342e275cdefa6e408278bec1c2b60c1f2d484
[]
no_license
daniel-reich/turbo-robot
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
a7a25c63097674c0a81675eed7e6b763785f1c41
refs/heads/main
2023-03-26T01:55:14.210264
2021-03-23T16:08:01
2021-03-23T16:08:01
350,773,815
0
0
null
null
null
null
UTF-8
Python
false
false
444
py
""" Write a function that takes an integer and returns a string with the given number of `"a"`s in Edabit. ### Examples how_many_times(5) ➞ "Edaaaaabit" how_many_times(0) ➞ "Edbit" how_many_times(12) ➞ "Edaaaaaaaaaaaabit" ### Notes * The string must start with "Ed" and end with "bit". * You'll only be given integers as test input. """ def how_many_times(num): return "Ed{}bit".format("a" * num)
[ "daniel.reich@danielreichs-MacBook-Pro.local" ]
daniel.reich@danielreichs-MacBook-Pro.local
5a28d2544c5264e815a06bdcb09331ba41cfba06
2ba6775c96a2c17de5949f7c0dc476548447f9fd
/flaskPet/management/__init__.py
da0dd1078b1b2c92a47c511d8f7e659ec848d658
[]
no_license
reakain/flaskPet
d6f004058b8c92eb354760d8cad39ac3218a8287
e41f2db382fc18bd0b5f84215df24856c125a237
refs/heads/master
2020-03-24T19:24:47.124669
2019-02-09T22:21:38
2019-02-09T22:21:38
142,924,846
0
0
null
null
null
null
UTF-8
Python
false
false
391
py
# -*- coding: utf-8 -*- """ flaskpet.management ~~~~~~~~~~~~~~~~~~ This module contains models, forms and views relevant for managing FlaskPet :copyright: (c) 2014 by the FlaskPet Team. :license: BSD, see LICENSE for more details. """ import logging # force plugins to be loaded from . import plugins __all__ = ('plugins', ) logger = logging.getLogger(__name__)
[ "reakain@users.noreply.github.com" ]
reakain@users.noreply.github.com
9c3898401073d0b579095b6d8def453376e220b0
85c7be8904ce443eb7666aa338f4a03aec73a8d8
/test/run_test.py
a8de9a716a8134201df89fc2940c6c560f0133a5
[ "MIT" ]
permissive
naz947/nlpaug
6c9fc8223e7933775b628ede220d8119dd9240ac
ee508e88a2dbe66ca7b05eb2491c20ca49e3aef7
refs/heads/master
2020-05-20T13:16:35.618003
2019-05-02T02:50:16
2019-05-02T02:50:16
null
0
0
null
null
null
null
UTF-8
Python
false
false
376
py
import unittest if __name__ == '__main__': test_dirs = [ # 'test/augmenter/char/', # 'test/augmenter/word/', 'test/augmenter/spectrogram/', 'test/flow/' ] runner = unittest.TextTestRunner() for test_dir in test_dirs: loader = unittest.TestLoader() suite = loader.discover(test_dir) runner.run(suite)
[ "makcedward@gmail.com" ]
makcedward@gmail.com