blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b118f7c657f941a6bddce32f35f2d83780071954
|
d5fcafedcd49a666e960eff3c61cfc1bfe9e081c
|
/setup.py
|
e2c764c5e29695769d00c9b9812696849ab732a7
|
[
"BSD-2-Clause"
] |
permissive
|
IndustriaTech/filebrowser-safe
|
29155ef662604d5b4f6ec8a9a94002d4c0e82858
|
2eda8ae594617a58455bca63f88689f2f0579aad
|
refs/heads/master
| 2021-05-29T07:49:26.957672
| 2014-02-05T09:13:42
| 2014-02-05T09:13:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 649
|
py
|
from setuptools import setup, find_packages
setup(
name="filebrowser_safe",
version="0.3.1",
description="A snapshot of the filebrowser_3 branch of django-filebrowser, "
"packaged as a dependency for the Mezzanine CMS for Django.",
long_description=open("README.rst").read(),
author="Patrick Kranzlmueller, Axel Swoboda (vonautomatisch)",
author_email="werkstaetten@vonautomatisch.at",
maintainer="Stephen McDonald",
maintainer_email="stephen.mc@gmail.com",
url="http://github.com/stephenmcd/filebrowser-safe",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
|
[
"steve@jupo.org"
] |
steve@jupo.org
|
903b3cf6c2069424d4e7db0124097fd3a7ef999a
|
5d6ff6a6dc2174a6362d2d2782470aab7c72b909
|
/quokka/core/tests/test_models.py
|
e09141ef787b87e931b2da032962caee0c496ede
|
[
"MIT"
] |
permissive
|
imgugu/quokka
|
41afb4abe095223ba6f1c53aa5e695213832426e
|
301a1300bbd76b6f53569decdf3b3999ba87543f
|
refs/heads/master
| 2020-05-20T22:23:43.282781
| 2013-11-21T21:06:21
| 2013-11-21T21:06:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,876
|
py
|
# coding: utf-8
from . import BaseTestCase
from ..models import Channel, Config
class TestChannel(BaseTestCase):
def setUp(self):
# Create method was not returning the created object with
# the create() method
self.parent, new = Channel.objects.get_or_create(
title=u'Father',
)
self.channel, new = Channel.objects.get_or_create(
title=u'Monkey Island',
description=u'The coolest pirate history ever',
parent=self.parent,
tags=['tag1', 'tag2'],
)
def tearDown(self):
self.channel.delete()
self.parent.delete()
def test_channel_fields(self):
self.assertEqual(self.channel.title, u'Monkey Island')
self.assertEqual(self.channel.slug, u'monkey-island')
self.assertEqual(self.channel.long_slug, u'father/monkey-island')
self.assertEqual(self.channel.mpath, u',father,monkey-island,')
self.assertEqual(self.channel.description,
u'The coolest pirate history ever')
self.assertEqual(self.channel.tags, ['tag1', 'tag2'])
self.assertEqual(self.channel.parent, self.parent)
self.assertEqual(unicode(self.channel), u'father/monkey-island')
def test_get_ancestors(self):
self.assertEqual(list(self.channel.get_ancestors()), [self.channel,
self.parent])
def test_get_ancestors_slug(self):
self.assertEqual(self.channel.get_ancestors_slugs(),
[u'father/monkey-island', u'father'])
def test_get_children(self):
self.assertEqual(list(self.parent.get_children()), [self.channel])
def test_get_descendants(self):
self.assertEqual(list(self.parent.get_descendants()),
[self.parent, self.channel])
def test_absolute_urls(self):
self.assertEqual(self.channel.get_absolute_url(),
'/father/monkey-island/')
self.assertEqual(self.parent.get_absolute_url(),
'/father/')
def test_get_canonical_url(self):
self.assertEqual(self.channel.get_canonical_url(),
'/father/monkey-island/')
self.assertEqual(self.parent.get_canonical_url(),
'/father/')
class TestConfig(BaseTestCase):
def setUp(self):
# Create method was not returning the created object with
# the create() method
self.config, new = Config.objects.get_or_create(
group=u'test group',
)
def tearDown(self):
self.config.delete()
def test_config_fields(self):
self.assertEqual(self.config.group, u'test group')
self.assertEqual(self.config.content_format, 'html')
self.assertEqual(unicode(self.config), u'test group')
|
[
"ellisonleao@gmail.com"
] |
ellisonleao@gmail.com
|
b180b7ee13e37740ff672f524e6d84abb68a9392
|
4ce2cff60ddbb9a3b6fc2850187c86f866091b13
|
/tfrecords/src/wai/tfrecords/object_detection/utils/json_utils_test.py
|
abd785fffd16944d382209e289dc19bd61f7b9e2
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
8176135/tensorflow
|
18cb8a0432ab2a0ea5bacd03309e647f39cb9dd0
|
2c3b4b1d66a80537f3e277d75ec1d4b43e894bf1
|
refs/heads/master
| 2020-11-26T05:00:56.213093
| 2019-12-19T08:13:44
| 2019-12-19T08:13:44
| 228,970,478
| 0
| 0
| null | 2019-12-19T03:51:38
| 2019-12-19T03:51:37
| null |
UTF-8
|
Python
| false
| false
| 3,594
|
py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for google3.image.understanding.object_detection.utils.json_utils."""
import os
import tensorflow as tf
from wai.tfrecords.object_detection.utils import json_utils
class JsonUtilsTest(tf.test.TestCase):
def testDumpReasonablePrecision(self):
output_path = os.path.join(tf.test.get_temp_dir(), 'test.json')
with tf.gfile.GFile(output_path, 'w') as f:
json_utils.Dump(1.0, f, float_digits=2)
with tf.gfile.GFile(output_path, 'r') as f:
self.assertEqual(f.read(), '1.00')
def testDumpPassExtraParams(self):
output_path = os.path.join(tf.test.get_temp_dir(), 'test.json')
with tf.gfile.GFile(output_path, 'w') as f:
json_utils.Dump([1.0], f, float_digits=2, indent=3)
with tf.gfile.GFile(output_path, 'r') as f:
self.assertEqual(f.read(), '[\n 1.00\n]')
def testDumpZeroPrecision(self):
output_path = os.path.join(tf.test.get_temp_dir(), 'test.json')
with tf.gfile.GFile(output_path, 'w') as f:
json_utils.Dump(1.0, f, float_digits=0, indent=3)
with tf.gfile.GFile(output_path, 'r') as f:
self.assertEqual(f.read(), '1')
def testDumpUnspecifiedPrecision(self):
output_path = os.path.join(tf.test.get_temp_dir(), 'test.json')
with tf.gfile.GFile(output_path, 'w') as f:
json_utils.Dump(1.012345, f)
with tf.gfile.GFile(output_path, 'r') as f:
self.assertEqual(f.read(), '1.012345')
def testDumpsReasonablePrecision(self):
s = json_utils.Dumps(1.0, float_digits=2)
self.assertEqual(s, '1.00')
def testDumpsPassExtraParams(self):
s = json_utils.Dumps([1.0], float_digits=2, indent=3)
self.assertEqual(s, '[\n 1.00\n]')
def testDumpsZeroPrecision(self):
s = json_utils.Dumps(1.0, float_digits=0)
self.assertEqual(s, '1')
def testDumpsUnspecifiedPrecision(self):
s = json_utils.Dumps(1.012345)
self.assertEqual(s, '1.012345')
def testPrettyParams(self):
s = json_utils.Dumps({'v': 1.012345, 'n': 2}, **json_utils.PrettyParams())
self.assertEqual(s, '{\n "n": 2,\n "v": 1.0123\n}')
def testPrettyParamsExtraParamsInside(self):
s = json_utils.Dumps(
{'v': 1.012345,
'n': float('nan')}, **json_utils.PrettyParams(allow_nan=True))
self.assertEqual(s, '{\n "n": NaN,\n "v": 1.0123\n}')
with self.assertRaises(ValueError):
s = json_utils.Dumps(
{'v': 1.012345,
'n': float('nan')}, **json_utils.PrettyParams(allow_nan=False))
def testPrettyParamsExtraParamsOutside(self):
s = json_utils.Dumps(
{'v': 1.012345,
'n': float('nan')}, allow_nan=True, **json_utils.PrettyParams())
self.assertEqual(s, '{\n "n": NaN,\n "v": 1.0123\n}')
with self.assertRaises(ValueError):
s = json_utils.Dumps(
{'v': 1.012345,
'n': float('nan')}, allow_nan=False, **json_utils.PrettyParams())
if __name__ == '__main__':
tf.test.main()
|
[
"coreytsterling@gmail.com"
] |
coreytsterling@gmail.com
|
a547b0c580f6eb57a424adc7697b39ea1301b365
|
98cb4fbb8bd776f187a9baf42d66911ed52211c9
|
/archives/vocalSeparation/network.py
|
598ceef69799222575f8631b585a63b4c1585f5e
|
[
"MIT"
] |
permissive
|
daniel769/Audio-Source-Separation
|
017878dd5302f76948f9db800c50a4a4c43e2c63
|
1693a9736fc08c53935aba7218ad82a271b42525
|
refs/heads/master
| 2022-02-13T16:14:28.279016
| 2018-10-24T04:15:25
| 2018-10-24T04:15:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 484
|
py
|
from module import *
import hyperparams as hp
def network(input_, use_mulaw=hp.use_mulaw):
input_ = conv1d(input_, output_channels=hp.hidden_dim, filter_width=3)
skip_connections = list()
for i in hp.dilation:
skip, res = residual_block(input_, rate=i, scope="res_%d" % i)
input_ = res
skip_connections.append(skip)
skip_output = tf.add_n(skip_connections)
output = skip_connection(skip_output, use_mulaw=use_mulaw)
return output
|
[
"c337134154@gmail.com"
] |
c337134154@gmail.com
|
8c63a54063013f4c1c82f843d7730ceee77f0320
|
64e0208fa243ebbab4855980a3f21be78a4a1025
|
/test/tests/object_new_arguments.py
|
873ff6bf2839456b969c7e3f8e8ae6893704f34f
|
[
"Python-2.0",
"Apache-2.0",
"BSD-2-Clause"
] |
permissive
|
H8ter/pyston
|
581f5fcb59bb8b19399347626639a688e92c80ff
|
4cd23054a81b58a8de32ecf43daab2cb2e67f53f
|
refs/heads/master
| 2021-01-15T13:28:46.252887
| 2015-07-24T21:39:13
| 2015-07-24T21:39:13
| 39,681,136
| 0
| 0
| null | 2015-07-25T10:46:01
| 2015-07-25T10:46:01
| null |
UTF-8
|
Python
| false
| false
| 484
|
py
|
# object.__new__ doesn't complain if __init__ is overridden:
class C1(object):
def __init__(self, a):
pass
class C2(object):
pass
print "Trying C1"
object.__new__(C1, 1)
object.__new__(C1, a=1)
print "Trying C2"
try:
object.__new__(C2, 1)
except TypeError as e:
print "caught TypeError"
# These are some tricky cases, since they can potentially look like arguments
# are being passed, but really they are not.
type.__call__(*[C2])
type.__call__(C2, **{})
|
[
"kmod@dropbox.com"
] |
kmod@dropbox.com
|
f290ff4dd28be8504759cd53e837052886f33c30
|
52b5773617a1b972a905de4d692540d26ff74926
|
/.history/Triangle_20200731215410.py
|
fe4e6be1a02b61ab6337d20bc82d3816c5bb2875
|
[] |
no_license
|
MaryanneNjeri/pythonModules
|
56f54bf098ae58ea069bf33f11ae94fa8eedcabc
|
f4e56b1e4dda2349267af634a46f6b9df6686020
|
refs/heads/master
| 2022-12-16T02:59:19.896129
| 2020-09-11T12:05:22
| 2020-09-11T12:05:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 366
|
py
|
def triangle(A):
A.sort()
for i in range(len(A)-2):
p = A[i]
q = A[i+1]
r = A[i+2]
print('p',p,'q',q,'r',r)
if (p + q) < r:
return 0
elif (q + r) < p:
return 0
elif (r + p) < q:
return 0
return 1
triangle
triangle([10,2,5,1,8,20])
|
[
"mary.jereh@gmail.com"
] |
mary.jereh@gmail.com
|
9d964dff99e224a5b1083c30488dc44ae8cf0580
|
d05e4b5f195d03506360483041c1864895e8f4da
|
/swe/helper.py
|
7ec2819606a2c728509b50d57c6d1d04c0e6db70
|
[] |
no_license
|
swesust/backEnd
|
bf01724429e68c5c1f3d21330c6b3acf617dceff
|
eb4eae0981d6d88071a11f1b973c83916e6d199e
|
refs/heads/master
| 2021-11-21T16:42:51.275389
| 2019-07-03T22:01:14
| 2019-07-03T22:01:14
| 162,592,471
| 6
| 0
| null | 2021-09-08T00:48:02
| 2018-12-20T14:46:35
|
Python
|
UTF-8
|
Python
| false
| false
| 3,507
|
py
|
from swe import models
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.storage import FileSystemStorage as FSS
from io import BytesIO
from PIL import Image as ImageProcess
from time import time
from os import remove, makedirs
from os.path import isdir
class Image():
"""
storing display images of profiles.
root location : data/
"""
def save(loc, bytesdata):
"""
this function will save an image and return the file location
>>helper.Image.save(location, image bytes file)
locations:
`see` : swe.variable #storage folder locations
# here hid = 8 length hash code generated by teacher email id
to show the image on html templates: <img src="/{{ user.imgsrc }}">
image will rename with current millisecond.type
"""
img,ext = Image.process(bytesdata)
# save the image
f = FSS(location = loc)
filename = str(int(time()*1000000))
filename = filename+'.'+ext
# make sure the dir already exist or not
if isdir(f.location) == False:
makedirs(f.location)
# save in storage
img.save(f.location+'/'+filename, format=ext, quality=90)
# return the storage location
return '/'+loc+'/'+filename
def delete(loc):
"""
delete a file from storage
"""
try:
remove(loc)
return True
except Exception as e:
return False
def process(bytesdata):
"""
retrieve the image file from bytes and resize (1000*x)
"""
img = ImageProcess.open(BytesIO(bytesdata))
size = img.size
if size[1] > 1000:
height = int(float(1000*size[1])/size[0])
return img.resize((1000,height)), img.format
return img, img.format
def is_valid_format(filename):
"""
required image format: .JPG, .PNG, .JPEG
"""
# transform the string to lower character
filename = filename.lower()
# get the last dot index
dotindex = filename.rfind('.')
# check whether the file has any dot or extenstion
if dotindex != -1:
# split the file extension
extension = filename[dotindex:len(filename)]
# check valid extensions
if extension == '.jpg':
return True
elif extension == '.png':
return True
elif extension == '.jpeg':
return True
return False
from hashlib import md5
class Token():
"""
Token class is stand for generate token according to a particular user
and check a token validation. This is the backbone of the forget password
functionality of this application.
functions:
`get_token(userid, hashed_password)`
to generate a token with a user ID and user hashed password
`get_userid(token)`
to get a user ID from a token
`is_valid(token)`
to check a token is valid or not
"""
def get_token(userid, hashed_password):
hash_token = md5(hashed_password.encode()).hexdigest()
return hash_token[0:16] + userid + hash_token[16:32]
def get_userid(token):
try:
# remove first 16 char
userid = token[16:len(token)]
# remove last 16 char
return userid[0:-16]
except Exception as e:
return None
def is_valid(token):
try:
h1 = token[0:16]
userid = Token.get_userid(token)
# split last 16 chars
h2 = token[-16:]
encode_password = h1+h2
try:
user = models.AuthUser.objects.get(userid=userid)
hashed_password = md5(user.password.encode()).hexdigest()
if encode_password != hashed_password:
return False
return True
except ObjectDoesNotExist as e:
return False
except Exception as e:
return False
from uuid import uuid4
def new_password_request():
return uuid4().hex
|
[
"avoidcloud@gmail.com"
] |
avoidcloud@gmail.com
|
6f2bc69f6325b24048fbf182011b336ea9ff6e4e
|
cfa35dc2ea93ee0eceb2399a9e6112e987579c09
|
/stonesoup/reader/tests/test_opensky.py
|
82d07f42184777b38386c2e114ca28d738c58229
|
[
"LicenseRef-scancode-proprietary-license",
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"Python-2.0",
"LicenseRef-scancode-secret-labs-2011"
] |
permissive
|
dstl/Stone-Soup
|
227e6a9e6fbdceca14af3f0259f311ec74095597
|
f24090cc919b3b590b84f965a3884ed1293d181d
|
refs/heads/main
| 2023-09-01T14:33:14.626428
| 2023-09-01T11:35:46
| 2023-09-01T11:35:46
| 98,420,803
| 315
| 126
|
MIT
| 2023-09-14T14:55:34
| 2017-07-26T12:34:28
|
Python
|
UTF-8
|
Python
| false
| false
| 1,326
|
py
|
import pytest
from ..opensky import OpenSkyNetworkDetectionReader, OpenSkyNetworkGroundTruthReader
pytestmark = pytest.mark.remote_data
@pytest.mark.parametrize(
'reader_type',
(OpenSkyNetworkDetectionReader, OpenSkyNetworkGroundTruthReader))
@pytest.mark.parametrize(
'bbox',
[None, (-7.57216793459, 49.959999905, 1.68153079591, 58.6350001085)],
ids=['None', 'GB'])
def test_opensky_reader(reader_type, bbox):
reader = reader_type(bbox)
prev_time = None
for n, (time, states) in enumerate(reader, 1):
if prev_time is not None:
assert time > prev_time
prev_time = time
for state in states:
if bbox:
assert bbox[0] < state.state_vector[0] < bbox[2]
assert bbox[1] < state.state_vector[1] < bbox[3]
# When using GroundTruthReader, and ID looks like ICAO24 (ignore those missing ICAO24)
if isinstance(reader_type, OpenSkyNetworkGroundTruthReader) and len(state.id) == 6:
assert all(sub_state.metadata['icao24'] == state.id for sub_state in state)
if isinstance(reader_type, OpenSkyNetworkGroundTruthReader):
assert any(len(path) == n for path in states)
assert all(len(path) <= n for path in states)
if n > 3:
break
|
[
"sdhiscocks@dstl.gov.uk"
] |
sdhiscocks@dstl.gov.uk
|
b6d090f6e0c520b5335e366c013a6a95871dfb02
|
b05685baab270b50918c49c1e25d3aef90f0e83d
|
/UNIV-2016/1-29-16.py
|
f07a9382203ea83de48a71657283bce4130ad3f7
|
[] |
no_license
|
daxaxelrod/notes-from-4th-semester
|
1ec25a0503d608bc50e20dd5d12e612c78b27f25
|
ba2c257271bc00ce8dd7a2a5d5984069ac5ae4d8
|
refs/heads/master
| 2016-08-11T12:37:46.605993
| 2016-02-18T00:26:59
| 2016-02-18T00:26:59
| 51,965,752
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,437
|
py
|
Univ 280C
Venture accelerator 2
Woot fucking woot
the job market millennials face
25% activily engaged
55% not engaged in their work
20% activly disengaged at work
Important priciples from last class
PITCH INCESSEANTLY
80/20 rule
learn from failure
be aware of change and pain
those little victory moments are in lui of compensation
perspective changes are much larger than the changes themselves
Slide on the team
need diverse teams
this was terrifying last year
Phrase loom in terms of change
slide ideaas
number of gaming hours spent on steam
time spent in open world feilds
change is the transition to dynamic worlds. set underlying story line but dont tether the user to that story
charatcerizing the pain is easy
What is loom as far as categorizing goes
market pull
tech push
design driven
idea started
Personal knowledge database
take all class notes
throw it into some seachable DB
Pitch notes
make it a story
keep it less than 60 seconds
course goals
launch Venture
serves as a backup career option
launch in emergant economic ecosystem
upstate ny
midwest
learn frugal innovation
jugaad innovation
"one of the most important things is to have at least 10 minutes of laughter per hour"
|
[
"daxaxelrod@gmail.com"
] |
daxaxelrod@gmail.com
|
199c9d62f0c02ac09f43a77d38a29026977066d8
|
a5e71a333a86476b9cb1bdf6989bb5f47dd5e409
|
/ScrapePlugins/M/KissLoader/ContentLoader.py
|
3b88b158cb8837c710fe9f4fc8942aa693f906c4
|
[] |
no_license
|
GDXN/MangaCMS
|
0e797299f12c48986fda5f2e7de448c2934a62bd
|
56be0e2e9a439151ae5302b3e6ceddc7868d8942
|
refs/heads/master
| 2021-01-18T11:40:51.993195
| 2017-07-22T12:55:32
| 2017-07-22T12:55:32
| 21,105,690
| 6
| 1
| null | 2017-07-22T12:55:33
| 2014-06-22T21:13:19
|
Python
|
UTF-8
|
Python
| false
| false
| 8,123
|
py
|
import logSetup
import runStatus
if __name__ == "__main__":
runStatus.preloadDicts = False
import webFunctions
import settings
import os
import os.path
import nameTools as nt
import time
import sys
import urllib.parse
import html.parser
import zipfile
import traceback
import bs4
import re
import json
import ScrapePlugins.RetreivalBase
from mimetypes import guess_extension
from concurrent.futures import ThreadPoolExecutor
import ScrapePlugins.ScrapeExceptions as ScrapeExceptions
import processDownload
import magic
import execjs
class ContentLoader(ScrapePlugins.RetreivalBase.RetreivalBase):
loggerPath = "Main.Manga.Ki.Cl"
pluginName = "Kiss Manga Content Retreiver"
tableKey = "ki"
dbName = settings.DATABASE_DB_NAME
tableName = "MangaItems"
wg = webFunctions.WebGetRobust(logPath=loggerPath+".Web")
retreivalThreads = 3
itemLimit = 200
def check_recaptcha(self, pgurl, soup=None, markup=None):
if markup:
soup = webFunctions.as_soup(markup)
if not soup:
raise RuntimeError("You have to pass either the raw page markup, or a pre-parsed bs4 soup object!")
capdiv = soup.find("div", class_='g-recaptcha')
if not capdiv:
if markup:
return markup
return soup
raise ScrapeExceptions.LimitedException("Encountered ReCaptcha! Cannot circumvent!")
self.log.warning("Found ReCaptcha div. Need to circumvent.")
sitekey = capdiv['data-sitekey']
# soup.find("")
params = {
'key' : settings.captcha_solvers['2captcha']['api_key'],
'method' : 'userrecaptcha',
'googlekey' : sitekey,
'pageurl' : pgurl,
'json' : 1,
}
# self.wg.getJson("https://2captcha.com/in.php", postData=params)
# # here we post site key to 2captcha to get captcha ID (and we parse it here too)
# captcha_id = s.post("?key={}&method=userrecaptcha&googlekey={}&pageurl={}".format(API_KEY, site_key, url), proxies=proxy).text.split('|')[1]
# # then we parse gresponse from 2captcha response
# recaptcha_answer = s.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(API_KEY, captcha_id), proxies=proxy).text
# print("solving ref captcha...")
# while 'CAPCHA_NOT_READY' in recaptcha_answer:
# sleep(5)
# recaptcha_answer = s.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(API_KEY, captcha_id), proxies=proxy).text
# recaptcha_answer = recaptcha_answer.split('|')[1]
# # we make the payload for the post data here, use something like mitmproxy or fiddler to see what is needed
# payload = {
# 'key': 'value',
# 'gresponse': recaptcha_answer # This is the response from 2captcha, which is needed for the post request to go through.
# }
resolved = {
"reUrl" : "/Manga/Love-Lab-MIYAHARA-Ruri/Vol-010-Ch-001?id=359632",
"g-recaptcha-response" : "03AOP2lf5kLccgf5aAkMmzXR8mN6Kv6s76BoqHIv-raSzGCa98HMPMdx0n04ourhM1mBApnesMRbzr2vFa0264mY83SCkL5slCFcC-i3uWJoHIjVhGh0GN4yyswg5-yZpDg1iK882nPuxEeaxb18pOK790x4Z18ib5UOPGU-NoECVb6LS03S3b4fCjWwRDLNF43WhkHDFd7k-Os7ULCgOZe_7kcF9xbKkovCh2uuK0ytD7rhiKnZUUvl1TimGsSaFkSSrQ1C4cxZchVXrz7kIx0r6Qp2hPr2_PW0CAutCkmr9lt9TS5n0ecdVFhdVQBniSB-NZv9QEpbQ8",
}
# # then send the post request to the url
# response = s.post(url, payload, proxies=proxy)
def getImage(self, imageUrl, referrer):
content, handle = self.wg.getpage(imageUrl, returnMultiple=True, addlHeaders={'Referer': referrer})
if not content or not handle:
raise ValueError("Failed to retreive image from page '%s'!" % referrer)
fileN = urllib.parse.unquote(urllib.parse.urlparse(handle.geturl())[2].split("/")[-1])
fileN = bs4.UnicodeDammit(fileN).unicode_markup
self.log.info("retreived image '%s' with a size of %0.3f K", fileN, len(content)/1000.0)
if not "." in fileN:
info = handle.info()
if 'Content-Type' in info:
tp = info['Content-Type']
if ";" in tp:
tp = tp.split(";")[0]
ext = guess_extension(tp)
if ext == None:
ext = "unknown_ftype"
print(info['Content-Type'], ext)
fileN += "." + ext
else:
fileN += ".jpg"
# Let magic figure out the files for us (it's probably smarter then kissmanga, anyways.)
guessed = magic.from_buffer(content, mime=True)
ext = guess_extension(tp)
if ext:
fileN = fileN + ext
return fileN, content
def getImageUrls(self, baseUrl):
pgctnt, filename, mimetype = self.wg.getItemPhantomJS(baseUrl)
pgctnt = self.check_recaptcha(pgurl=baseUrl, markup=pgctnt)
linkRe = re.compile(r'lstImages\.push\((wrapKA\(".+?"\))\);')
links = linkRe.findall(pgctnt)
pages = []
for item in links:
tgt = self.wg.pjs_driver.execute_script("return %s" % item)
if not tgt.startswith("http"):
raise ScrapeExceptions.LimitedException("URL Decryption failed!")
pages.append(tgt)
self.log.info("Found %s pages", len(pages))
return pages
# Don't download items for 12 hours after relase,
# so that other, (better) sources can potentially host
# the items first.
def checkDelay(self, inTime):
return inTime < (time.time() - 60*60*12)
def getLink(self, link):
sourceUrl = link["sourceUrl"]
print("Link", link)
seriesName = link['seriesName']
try:
self.log.info( "Should retreive url - %s", sourceUrl)
self.updateDbEntry(sourceUrl, dlState=1)
imageUrls = self.getImageUrls(sourceUrl)
if not imageUrls:
self.log.critical("Failure on retreiving content at %s", sourceUrl)
self.log.critical("Page not found - 404")
self.updateDbEntry(sourceUrl, dlState=-1)
return
self.log.info("Downloading = '%s', '%s' ('%s images)", seriesName, link["originName"], len(imageUrls))
dlPath, newDir = self.locateOrCreateDirectoryForSeries(seriesName)
if link["flags"] == None:
link["flags"] = ""
if newDir:
self.updateDbEntry(sourceUrl, flags=" ".join([link["flags"], "haddir"]))
chapterName = nt.makeFilenameSafe(link["originName"])
fqFName = os.path.join(dlPath, chapterName+" [KissManga].zip")
loop = 1
prefix, ext = os.path.splitext(fqFName)
while os.path.exists(fqFName):
fqFName = "%s (%d)%s" % (prefix, loop, ext)
loop += 1
self.log.info("Saving to archive = %s", fqFName)
images = []
imgCnt = 1
for imgUrl in imageUrls:
imageName, imageContent = self.getImage(imgUrl, sourceUrl)
imageName = "{num:03.0f} - {srcName}".format(num=imgCnt, srcName=imageName)
imgCnt += 1
images.append([imageName, imageContent])
if not runStatus.run:
self.log.info( "Breaking due to exit flag being set")
self.updateDbEntry(sourceUrl, dlState=0)
return
self.log.info("Creating archive with %s images", len(images))
if not images:
self.updateDbEntry(sourceUrl, dlState=-1, tags="error-404")
return
#Write all downloaded files to the archive.
arch = zipfile.ZipFile(fqFName, "w")
for imageName, imageContent in images:
arch.writestr(imageName, imageContent)
arch.close()
dedupState = processDownload.processDownload(seriesName, fqFName, deleteDups=True, includePHash=True, rowId=link['dbId'])
self.log.info( "Done")
filePath, fileName = os.path.split(fqFName)
self.updateDbEntry(sourceUrl, dlState=2, downloadPath=filePath, fileName=fileName, tags=dedupState)
return
except SystemExit:
print("SystemExit!")
raise
except Exception:
self.log.critical("Failure on retreiving content at %s", sourceUrl)
self.log.critical("Traceback = %s", traceback.format_exc())
self.updateDbEntry(sourceUrl, dlState=-1)
def setup(self):
'''
poke through cloudflare
'''
if not self.wg.stepThroughCloudFlare("http://kissmanga.com", 'KissManga'):
raise ValueError("Could not access site due to cloudflare protection.")
if __name__ == '__main__':
import utilities.testBase as tb
with tb.testSetup(load=False):
cl = ContentLoader()
# pg = 'http://dynasty-scans.com/chapters/qualia_the_purple_ch16'
# inMarkup = cl.wg.getpage(pg)
# cl.getImageUrls(inMarkup, pg)
cl.do_fetch_content()
# cl.getLink('http://www.webtoons.com/viewer?titleNo=281&episodeNo=3')
# cl.getImageUrls('http://kissmanga.com/Manga/Hanza-Sky/Ch-031-Read-Online?id=225102')
|
[
"something@fake-url.com"
] |
something@fake-url.com
|
1f0508ba72e0fa6b452f48d450270349e204b152
|
7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0
|
/0801-0900/0889-Construct Binary Tree from Preorder and Postorder Traversal/0889-Construct Binary Tree from Preorder and Postorder Traversal.py
|
91552f75a60306b6c33b24b37f46cb02f2423c10
|
[
"MIT"
] |
permissive
|
jiadaizhao/LeetCode
|
be31bd0db50cc6835d9c9eff8e0175747098afc6
|
4ddea0a532fe7c5d053ffbd6870174ec99fc2d60
|
refs/heads/master
| 2021-11-05T04:38:47.252590
| 2021-10-31T09:54:53
| 2021-10-31T09:54:53
| 99,655,604
| 52
| 28
|
MIT
| 2020-10-02T12:47:47
| 2017-08-08T05:57:26
|
C++
|
UTF-8
|
Python
| false
| false
| 1,638
|
py
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def constructFromPrePost(self, pre: List[int], post: List[int]) -> TreeNode:
if not pre:
return None
root = TreeNode(pre[0])
if len(pre) == 1:
return root
i = post.index(pre[1])
root.left = self.constructFromPrePost(pre[1 : i + 2], post[0 : i + 1])
root.right = self.constructFromPrePost(pre[i + 2 :], post[i + 1 : -1])
return root
class Solution2:
def constructFromPrePost(self, pre: List[int], post: List[int]) -> TreeNode:
table = {v: i for i, v in enumerate(post)}
def dfs(preStart, postStart, n):
if n == 0:
return None
root = TreeNode(pre[preStart])
if n == 1:
return root
i = table[pre[preStart + 1]]
L = i - postStart + 1
root.left = dfs(preStart + 1, postStart, L)
root.right = dfs(preStart + L + 1, i + 1, n - L - 1)
return root
return dfs(0, 0, len(pre))
class Solution3:
def constructFromPrePost(self, pre: List[int], post: List[int]) -> TreeNode:
St = [TreeNode(pre[0])]
i = 0
for val in pre[1:]:
while St[-1].val == post[i]:
St.pop()
i += 1
node = TreeNode(val)
if St[-1].left:
St[-1].right = node
else:
St[-1].left = node
St.append(node)
return St[0]
|
[
"jiadaizhao@gmail.com"
] |
jiadaizhao@gmail.com
|
8f37d8f65cf54dd283e22acd078d5087397db1d3
|
a1b375c3e98fe059dafc4d74cbcbcb99a0571e44
|
/images/urls.py
|
7b1168a1108824ea4f65b15f1955c15b27075621
|
[
"MIT"
] |
permissive
|
mohsenamoon1160417237/Social_app
|
478a73552ceed8001c167be6caaf550cd58626bd
|
79fa0871f7b83648894941f9010f1d99f1b27ab3
|
refs/heads/master
| 2022-12-09T16:03:53.623506
| 2020-09-21T05:59:22
| 2020-09-21T06:02:03
| 297,242,915
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 480
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('best_images/' , views.most_liked_images , name='most_liked_images'),
path('' , views.image_post , name='image_post'),
path('delete/<int:image_id>/<slug:image_slug>/' , views.image_delete , name='image_delete'),
path('all/' , views.images , name='images'),
path('<slug:image_slug>/<int:image_id>/' , views.image_detail , name='image_detail'),
path('like/' , views.image_like , name='image_like'),
]
|
[
"dramatic225@gmail.com"
] |
dramatic225@gmail.com
|
3f8a0fafc7d03d42481365c073a4dad659afc6ac
|
49ba5356bdc5df7dd9803b56fe507c5164a90716
|
/greatest-common-divisor-of-strings/test_solution.py
|
2c2c95aefa31c52e1d40ac929287824839313cf5
|
[] |
no_license
|
uxlsl/leetcode_practice
|
d80ad481c9d8ee71cce0f3c66e98446ced149635
|
d8ed762d1005975f0de4f07760c9671195621c88
|
refs/heads/master
| 2021-04-25T18:12:28.136504
| 2020-03-11T07:54:15
| 2020-03-11T07:54:15
| 121,472,384
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 138
|
py
|
from solution import Solution
def test_solution():
s = Solution()
assert s.gcdOfStrings(str1 = "ABCABC", str2 = "ABC") == "ABC"
|
[
"songlin.lin@yunfangdata.com"
] |
songlin.lin@yunfangdata.com
|
f6661f51dac094fd1cd85b3211f221fd56caccff
|
aed888628faf0f52081a8711653489e3982ce0c1
|
/mininet/wifiPlot.py
|
53bf454c111ea4aa97d7b87df0f9606c7ee6ab4d
|
[] |
no_license
|
tapparello/mininet-wifi
|
69d01beea7d9b456a254694f1c38c443f0b32560
|
0feb170337bef16ea1a972685fc4a0eaa1a51eea
|
refs/heads/master
| 2021-01-20T17:27:34.311830
| 2017-05-09T19:43:36
| 2017-05-09T19:43:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,857
|
py
|
"""
author: Ramon Fontes (ramonrf@dca.fee.unicamp.br)
ramonfontes.com
"""
import matplotlib.patches as patches
import matplotlib.pyplot as plt
from mininet.log import debug
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
class plot3d (object):
ax = None
@classmethod
def instantiateGraph(self, MAX_X, MAX_Y, MAX_Z):
"""instantiateGraph"""
plt.ion()
plt.title("Mininet-WiFi Graph")
self.ax = plt.subplot(111, projection='3d')
self.ax.set_xlabel('meters (x)')
self.ax.set_ylabel('meters (y)')
self.ax.set_zlabel('meters (z)')
self.ax.set_xlim([0, MAX_X])
self.ax.set_ylim([0, MAX_Y])
self.ax.set_zlim([0, MAX_Z])
self.ax.grid(True)
@classmethod
def instantiateAnnotate(self, node):
"""instantiateAnnotate"""
x = '%.2f' % float(node.params['position'][0])
y = '%.2f' % float(node.params['position'][1])
z = '%.2f' % float(node.params['position'][2])
node.plttxt = self.ax.text(float(x), float(y), float(z), node.name)
@classmethod
def instantiateNode(self, node):
"""Instantiate Node"""
x = '%.2f' % float(node.params['position'][0])
y = '%.2f' % float(node.params['position'][1])
z = '%.2f' % float(node.params['position'][2])
resolution = 40
u = np.linspace(0, 2 * np.pi, resolution)
v = np.linspace(0, np.pi, resolution)
r = 1
x = r * np.outer(np.cos(u), np.sin(v)) + float(x)
y = r * np.outer(np.sin(u), np.sin(v)) + float(y)
z = r * np.outer(np.ones(np.size(u)), np.cos(v)) + float(z)
node.pltNode = self.ax.plot_surface(x, y, z, alpha=0.2, edgecolor='none', color='black')
@classmethod
def graphInstantiateNodes(self, nodes):
"""Instantiate Nodes"""
for node in nodes:
self.instantiateAnnotate(node)
self.instantiateNode(node)
self.instantiateCircle(node)
self.plotDraw()
@classmethod
def graphPause(self):
"""Pause"""
plt.pause(0.001)
@classmethod
def graphUpdate(self, node):
"""Graph Update"""
node.pltNode.remove()
node.pltCircle.remove()
node.plttxt.remove()
self.instantiateCircle(node)
self.instantiateNode(node)
self.instantiateAnnotate(node)
self.plotDraw()
@classmethod
def plotDraw(self):
"""plotDraw"""
plt.draw()
@classmethod
def closePlot(self):
"""Close"""
try:
plt.close()
except:
pass
@classmethod
def instantiateCircle(self, node):
"""Instantiate Circle"""
x = '%.2f' % float(node.params['position'][0])
y = '%.2f' % float(node.params['position'][1])
z = '%.2f' % float(node.params['position'][2])
color = 'b'
if node.type == 'station':
color = 'g'
elif node.type == 'vehicle':
color = 'r'
resolution = 100
u = np.linspace(0, 2 * np.pi, resolution)
v = np.linspace(0, np.pi, resolution)
r = node.params['range']
x = r * np.outer(np.cos(u), np.sin(v)) + float(x)
y = r * np.outer(np.sin(u), np.sin(v)) + float(y)
z = r * np.outer(np.ones(np.size(u)), np.cos(v)) + float(z)
node.pltCircle = self.ax.plot_surface(x, y, z, alpha=0.2, edgecolor='none', color=color)
class plot2d (object):
ax = None
@classmethod
def closePlot(self):
"""Close"""
try:
plt.close()
except:
pass
@classmethod
def text(self, node):
"""draw text"""
x = '%.2f' % float(node.params['position'][0])
y = '%.2f' % float(node.params['position'][1])
if hasattr(node.plttxt, 'xyann'): node.plttxt.xyann = (x, y) # newer MPL versions (>=1.4)
else: node.plttxt.xytext = (x, y)
@classmethod
def circle(self, node):
"""drawCircle"""
x = '%.2f' % float(node.params['position'][0])
y = '%.2f' % float(node.params['position'][1])
node.pltCircle.center = x, y
@classmethod
def graphUpdate(self, node):
"""Graph Update"""
x = '%.2f' % float(node.params['position'][0])
y = '%.2f' % float(node.params['position'][1])
if hasattr(node.plttxt, 'xyann'): node.plttxt.xyann = (x, y) # newer MPL versions (>=1.4)
else: node.plttxt.xytext = (x, y)
node.pltNode.set_data(x, y)
node.pltCircle.center = x, y
self.plotDraw()
@classmethod
def graphPause(self):
"""Pause"""
plt.pause(0.001)
@classmethod
def plotDraw(self):
"plotDraw"
plt.draw()
@classmethod
def plotScatter(self, nodesx, nodesy):
"plotScatter"
return plt.scatter(nodesx, nodesy, color='red', marker='s')
@classmethod
def plotLine2d(self, nodesx, nodesy, color='', ls='-', lw=1):
"plotLine2d"
return plt.Line2D(nodesx, nodesy, color=color, ls=ls, lw=lw)
@classmethod
def plotLineTxt(self, x, y, i):
"plotLineTxt"
title = 'Av.%s' % i
plt.text(x, y, title, ha='left', va='bottom', fontsize=8, color='g')
@classmethod
def plotLine(self, line):
"plotLine"
ax = self.ax
ax.add_line(line)
@classmethod
def instantiateGraph(self, MAX_X, MAX_Y):
"instantiateGraph"
plt.ion()
plt.title("Mininet-WiFi Graph")
self.ax = plt.subplot(111)
self.ax.set_xlabel('meters')
self.ax.set_ylabel('meters')
self.ax.set_xlim([0, MAX_X])
self.ax.set_ylim([0, MAX_Y])
self.ax.grid(True)
@classmethod
def instantiateNode(self, node):
"instantiateNode"
ax = self.ax
color = 'b'
if node.type == 'station':
color = 'g'
elif node.type == 'vehicle':
color = 'r'
node.pltNode, = ax.plot(1, 1, linestyle='', marker='.', ms=10, mfc=color)
@classmethod
def instantiateCircle(self, node):
"instantiateCircle"
ax = self.ax
color = 'b'
if node.type == 'station':
color = 'g'
elif node.type == 'vehicle':
color = 'r'
node.pltCircle = ax.add_patch(
patches.Circle((0, 0),
node.params['range'], fill=True, alpha=0.1, color=color
)
)
@classmethod
def instantiateAnnotate(self, node):
"instantiateAnnotate"
node.plttxt = self.ax.annotate(node, xy=(0, 0))
@classmethod
def updateCircleRadius(self, node):
node.pltCircle.set_radius(node.params['range'])
@classmethod
def graphInstantiateNodes(self, node):
self.instantiateAnnotate(node)
self.instantiateCircle(node)
self.instantiateNode(node)
self.graphUpdate(node)
@classmethod
def plotGraph(self, wifiNodes=[], srcConn=[], dstConn=[]):
"Plot Graph"
debug('Enabling Graph...\n')
for node in wifiNodes:
x = '%.2f' % float(node.params['position'][0])
y = '%.2f' % float(node.params['position'][1])
self.graphInstantiateNodes(node)
node.pltNode.set_data(x, y)
self.text(node)
self.circle(node)
for c in range(0, len(srcConn)):
src_x = '%.2f' % float(srcConn[c].params['position'][0])
src_y = '%.2f' % float(srcConn[c].params['position'][1])
dst_x = '%.2f' % float(dstConn[c].params['position'][0])
dst_y = '%.2f' % float(dstConn[c].params['position'][1])
line = self.plotLine2d([src_x, dst_x], \
[src_y, dst_y], 'b')
self.plotLine(line)
|
[
"ramonreisfontes@gmail.com"
] |
ramonreisfontes@gmail.com
|
6f82494371dfa89abc75f24106979386cb34c94a
|
41777d4d219ea97b4632f4a8a31ab6c82a60772c
|
/kubernetes_typed/client/models/v1_scale_io_volume_source.py
|
b1eccb02122f904eb1df1f33838af3ec15a3c9d6
|
[
"Apache-2.0"
] |
permissive
|
gordonbondon/kubernetes-typed
|
501d9c998c266386dc7f66f522f71ac3ba624d89
|
82995b008daf551a4fe11660018d9c08c69f9e6e
|
refs/heads/master
| 2023-07-18T12:06:04.208540
| 2021-09-05T19:50:05
| 2021-09-05T19:50:05
| 319,183,135
| 24
| 2
|
Apache-2.0
| 2021-09-05T19:50:06
| 2020-12-07T02:34:12
|
Python
|
UTF-8
|
Python
| false
| false
| 584
|
py
|
# Code generated by `typeddictgen`. DO NOT EDIT.
"""V1ScaleIOVolumeSourceDict generated type."""
from typing import TypedDict
from kubernetes_typed.client import V1LocalObjectReferenceDict
V1ScaleIOVolumeSourceDict = TypedDict(
"V1ScaleIOVolumeSourceDict",
{
"fsType": str,
"gateway": str,
"protectionDomain": str,
"readOnly": bool,
"secretRef": V1LocalObjectReferenceDict,
"sslEnabled": bool,
"storageMode": str,
"storagePool": str,
"system": str,
"volumeName": str,
},
total=False,
)
|
[
"noreply@github.com"
] |
gordonbondon.noreply@github.com
|
bd1e0023e976133adf144217da8c40767542ad6a
|
40c4b8e9ac9074869bfb0dc1d3c3f566371f1764
|
/Hangman1/dog_1.py
|
8ef10b06f11b544da283359081ffd9dca0a7cd4a
|
[] |
no_license
|
katuhito/Hangman001
|
870a8827e69cbd9a8b01ffb55f5c499c71861b76
|
710a201c6ad8284e164ea8ad26cd061486c50849
|
refs/heads/master
| 2022-12-06T16:30:24.613288
| 2020-08-22T10:19:27
| 2020-08-22T10:19:27
| 285,448,433
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 296
|
py
|
class Dog:
def __init__(self, name, breed, owner):
self.name = name
self.breed = breed
self.owner = owner
class Person:
def __init__(self, name):
self.name = name
mick = Person("Mick Jagger")
stan = Dog("Stanley", "Bulldog", mick)
print(stan.owner.name)
|
[
"katuhitohara@gmail.com"
] |
katuhitohara@gmail.com
|
73632a0fc46bcb44ba1b1359191e89d643076b20
|
6d87f66357e4002c461532b39498636d29956097
|
/stack/cinder/cinder/tests/integrated/test_volumes.py
|
0ec851113d15b555d1a9e13b95fbf0ec2994a1dd
|
[
"Apache-2.0"
] |
permissive
|
megaumi/openstack-tbd-scheduler
|
b8588b35c45b8fe9eee59723276047601ce2dfde
|
13928e9fec092e573c4945343a8b60e1fa86c4b3
|
refs/heads/master
| 2021-06-06T05:45:48.026940
| 2020-07-24T08:52:55
| 2020-07-24T08:52:55
| 6,046,752
| 0
| 1
| null | 2020-07-24T08:52:57
| 2012-10-02T15:11:38
|
Python
|
UTF-8
|
Python
| false
| false
| 7,530
|
py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import unittest
from cinder import service
from cinder.openstack.common import log as logging
from cinder.tests.integrated import integrated_helpers
from cinder.tests.integrated.api import client
from cinder.volume import driver
LOG = logging.getLogger(__name__)
class VolumesTest(integrated_helpers._IntegratedTestBase):
def setUp(self):
super(VolumesTest, self).setUp()
driver.LoggingVolumeDriver.clear_logs()
def _start_api_service(self):
self.osapi = service.WSGIService("osapi_volume")
self.osapi.start()
self.auth_url = 'http://%s:%s/v1' % (self.osapi.host, self.osapi.port)
LOG.warn(self.auth_url)
def _get_flags(self):
f = super(VolumesTest, self)._get_flags()
f['use_local_volumes'] = False # Avoids calling local_path
f['volume_driver'] = 'cinder.volume.driver.LoggingVolumeDriver'
return f
def test_get_volumes_summary(self):
"""Simple check that listing volumes works."""
volumes = self.api.get_volumes(False)
for volume in volumes:
LOG.debug("volume: %s" % volume)
def test_get_volumes(self):
"""Simple check that listing volumes works."""
volumes = self.api.get_volumes()
for volume in volumes:
LOG.debug("volume: %s" % volume)
def _poll_while(self, volume_id, continue_states, max_retries=5):
"""Poll (briefly) while the state is in continue_states."""
retries = 0
while True:
try:
found_volume = self.api.get_volume(volume_id)
except client.OpenStackApiNotFoundException:
found_volume = None
LOG.debug("Got 404, proceeding")
break
LOG.debug("Found %s" % found_volume)
self.assertEqual(volume_id, found_volume['id'])
if not found_volume['status'] in continue_states:
break
time.sleep(1)
retries = retries + 1
if retries > max_retries:
break
return found_volume
def test_create_and_delete_volume(self):
"""Creates and deletes a volume."""
# Create volume
created_volume = self.api.post_volume({'volume': {'size': 1}})
LOG.debug("created_volume: %s" % created_volume)
self.assertTrue(created_volume['id'])
created_volume_id = created_volume['id']
# Check it's there
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
# It should also be in the all-volume list
volumes = self.api.get_volumes()
volume_names = [volume['id'] for volume in volumes]
self.assertTrue(created_volume_id in volume_names)
# Wait (briefly) for creation. Delay is due to the 'message queue'
found_volume = self._poll_while(created_volume_id, ['creating'])
# It should be available...
self.assertEqual('available', found_volume['status'])
# Delete the volume
self.api.delete_volume(created_volume_id)
# Wait (briefly) for deletion. Delay is due to the 'message queue'
found_volume = self._poll_while(created_volume_id, ['deleting'])
# Should be gone
self.assertFalse(found_volume)
LOG.debug("Logs: %s" % driver.LoggingVolumeDriver.all_logs())
create_actions = driver.LoggingVolumeDriver.logs_like(
'create_volume',
id=created_volume_id)
LOG.debug("Create_Actions: %s" % create_actions)
self.assertEquals(1, len(create_actions))
create_action = create_actions[0]
self.assertEquals(create_action['id'], created_volume_id)
self.assertEquals(create_action['availability_zone'], 'nova')
self.assertEquals(create_action['size'], 1)
export_actions = driver.LoggingVolumeDriver.logs_like(
'create_export',
id=created_volume_id)
self.assertEquals(1, len(export_actions))
export_action = export_actions[0]
self.assertEquals(export_action['id'], created_volume_id)
self.assertEquals(export_action['availability_zone'], 'nova')
delete_actions = driver.LoggingVolumeDriver.logs_like(
'delete_volume',
id=created_volume_id)
self.assertEquals(1, len(delete_actions))
delete_action = export_actions[0]
self.assertEquals(delete_action['id'], created_volume_id)
def test_create_volume_with_metadata(self):
"""Creates a volume with metadata."""
# Create volume
metadata = {'key1': 'value1',
'key2': 'value2'}
created_volume = self.api.post_volume(
{'volume': {'size': 1,
'metadata': metadata}})
LOG.debug("created_volume: %s" % created_volume)
self.assertTrue(created_volume['id'])
created_volume_id = created_volume['id']
# Check it's there and metadata present
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
self.assertEqual(metadata, found_volume['metadata'])
def test_create_volume_in_availability_zone(self):
"""Creates a volume in availability_zone."""
# Create volume
availability_zone = 'zone1:host1'
created_volume = self.api.post_volume(
{'volume': {'size': 1,
'availability_zone': availability_zone}})
LOG.debug("created_volume: %s" % created_volume)
self.assertTrue(created_volume['id'])
created_volume_id = created_volume['id']
# Check it's there and availability zone present
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
self.assertEqual(availability_zone, found_volume['availability_zone'])
def test_create_and_update_volume(self):
# Create vol1
created_volume = self.api.post_volume({'volume': {
'size': 1, 'display_name': 'vol1'}})
self.assertEqual(created_volume['display_name'], 'vol1')
created_volume_id = created_volume['id']
# update volume
body = {'volume': {'display_name': 'vol-one'}}
updated_volume = self.api.put_volume(created_volume_id, body)
self.assertEqual(updated_volume['display_name'], 'vol-one')
# check for update
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
self.assertEqual(found_volume['display_name'], 'vol-one')
if __name__ == "__main__":
unittest.main()
|
[
"umi@apricot.(none)"
] |
umi@apricot.(none)
|
0e9b9d7b4cfe391c63a71319701c9315c52c1588
|
efc690a4c42b1511deb0fe80bf146872c45aed69
|
/conf_site/sponsorship/models.py
|
f1a2c0a6aceed0242319506cff3c3807176f3152
|
[
"MIT"
] |
permissive
|
jasongrout/conf_site
|
34aa1197727fbbbdf8811338764a7451445f1803
|
6b3beb21de8d847cba65dcb6da84464b40739d48
|
refs/heads/master
| 2021-03-31T06:35:59.696561
| 2020-03-17T20:39:57
| 2020-03-17T20:39:57
| 248,086,087
| 0
| 0
|
MIT
| 2020-03-17T22:32:02
| 2020-03-17T22:32:01
| null |
UTF-8
|
Python
| false
| false
| 421
|
py
|
from django.db import models
from wagtail.contrib.settings.models import BaseSetting, register_setting
@register_setting(icon="group")
class SponsorshipSettings(BaseSetting):
info_link = models.URLField(
default=u"https://pydata.org/pdf/sponsor-prospectus.pdf",
max_length=2083,
verbose_name=u"Link to sponsor prospectus.",
)
class Meta:
verbose_name = u"sponsor settings"
|
[
"martey@mobolic.com"
] |
martey@mobolic.com
|
2512b083e55f6101e8cf2f1a317a475ba920dc4c
|
32e948f9e7d6bd8771d266d93db0a2653043af0c
|
/retrieve_cci.py
|
2e4c71257d529c9c71e1d2bc844b836f83643c91
|
[
"MIT"
] |
permissive
|
brorfred/density_stats
|
f14c979f6b6214f4546f5d6f14767cb0825c1d02
|
b66673c67083010f29afbac1ce30ab431643697f
|
refs/heads/main
| 2023-05-05T11:26:48.312639
| 2023-04-17T13:39:42
| 2023-04-17T13:39:42
| 506,174,445
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,701
|
py
|
import numpy as np
import xarray as xr
import pandas as pd
import pyresample as pr
from fast_histogram import histogram1d
import resample
bins = 100
binrange = np.log(10**-4), np.log(10**2)
binlist = np.linspace(*binrange, bins)
def open_dataset(date="2000-01-01"):
"""Open OC-CCI dataset as xarray dataarray via opendap"""
#dtm = np.datetime64(date) if type(date) is str else date
url = "https://www.oceancolour.org/thredds/dodsC/CCI_ALL-v5.0-DAILY"
ds = xr.open_dataset(url)
return ds["chlor_a"].sel(time=date)
def setup_grid():
"""Create matrices with latitudes and longitudes for the t-coords"""
i0t,imt,j0t,jmt = (0000 ,8640, 0, 4320)
incr = 360.0/imt
jR = np.arange(j0t, jmt)
iR = np.arange(i0t, imt)
latvec = ( 90 - jR*incr - incr/2)[::-1]
lonvec = (-180 + iR*incr + incr/2)
lons,lats = np.meshgrid(lonvec, latvec)
grid = pr.geometry.GridDefinition(lons=lons, lats=lats)
grid.ivec = np.arange(grid.shape[1])
grid.jvec = np.arange(grid.shape[0])
grid.iarr,grid.jarr = np.meshgrid(grid.ivec, grid.jvec)
return grid
def setup_darwin_grid():
"""Create matrices with latitudes and longitudes for the t-coords"""
latvec = np.arange(-79.5, 80.5)
lonvec = np.arange(-179.5, 180.5)
lons,lats = np.meshgrid(lonvec, latvec)
grid = pr.geometry.GridDefinition(lons=lons, lats=lats)
grid.ivec = np.arange(grid.shape[1])
grid.jvec = np.arange(grid.shape[0])
grid.iarr,grid.jarr = np.meshgrid(grid.ivec, grid.jvec)
return grid
def fields_to_histograms(date1="2001-01-01", date2="2001-12-31"):
"""Read CCI fields and convert to histograms, separating month and region"""
longh = xr.open_dataset("indata/longhurst_darwin.nc")
reglist = np.unique(longh.regions.data.astype(int))
histmat = np.zeros((12, len(reglist), len(binlist)))
griddr = setup_darwin_grid()
grid = setup_grid()
for dtm in pd.date_range(date1, date2):
fld = resample.coarsegrid(grid, open_dataset(date=dtm).data, griddr)
for npos,reg in enumerate(reglist):
mask = np.isfinite(fld) & (fld>0) & (reg==longh.regions.data)
cnt = histogram1d(np.log(fld[mask]), range=binrange, bins=bins)
histmat[dtm.month,npos,:] = cnt
print(dtm)
return histmat
"""
def longhurst_nc_file():
ds = longhurst.open_dataset()
griddr = setup_darwin_grid()
dsgr = xr.Dataset( coords={"lat":griddr.lats[:,0], "lon":griddr.lons[0,:]})
for key in ds.data_vars:
arr = resample.coarsegrid(longhurst.setup_grid(), ds[key].data, griddr)
dsgr[key] = (("lat","lon"), arr)
dsgr.to_netcdf("indata/longhurst_darwin.nc")
"""
|
[
"brorfred@gmail.com"
] |
brorfred@gmail.com
|
9c4c84794897db611285d44fe40a857bd5997ffb
|
e1303b5f9b13b9b7f0fa7b5af1e7b8b38314d31f
|
/findTreeDiameter.py
|
8945e0de95a004a85b0c3fd800eb3850c42f4d14
|
[] |
no_license
|
middleverse/ads_prac
|
a77b0a162ac34eae3847533f773e28e6bec93fbb
|
5b41defb74b1ae1fb38f244d9ffa070ac07d44c9
|
refs/heads/master
| 2023-03-25T06:54:56.913539
| 2021-03-10T23:06:51
| 2021-03-10T23:06:51
| 307,391,921
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,408
|
py
|
class TreeNode:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
class TreeDiameter:
def __init__(self):
self.treeDiameter = 0
def find_diameter(self, root):
self.find_diameter_recursive(root)
return self.treeDiameter
def find_diameter_recursive(self, node):
if node is None:
return 0
if node.left is None and node.right is None:
return 1
else:
left_depth = self.find_diameter_recursive(node.left)
right_depth = self.find_diameter_recursive(node.right)
current_diameter = left_depth + right_depth + 1
self.treeDiameter = max(self.treeDiameter, current_diameter)
return 1 + max(left_depth, right_depth)
def main():
treeDiameter = TreeDiameter()
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
root.left.left = TreeNode(4)
root.right.left = TreeNode(5)
root.right.right = TreeNode(6)
print("Tree Diameter: " + str(treeDiameter.find_diameter(root)))
root.left.left = None
root.right.left.left = TreeNode(7)
root.right.left.right = TreeNode(8)
root.right.right.left = TreeNode(9)
root.right.left.right.left = TreeNode(10)
root.right.right.left.left = TreeNode(11)
print("Tree Diameter: " + str(treeDiameter.find_diameter(root)))
main()
|
[
"arshi_b@hotmail.com"
] |
arshi_b@hotmail.com
|
cc4d1a777b82c57297ced99c26076184686c1a16
|
de8e0c5c759347917ca7f06b42ca6c82b8f8c95f
|
/baekjoon/11_math-2/9613.py
|
b08bf5a8ddfbf8f859876049db7ce69d8f7dcee3
|
[] |
no_license
|
Greek-and-Roman-God/Apollo
|
aaeb315a9e70c719b3e53e3c4b9b5dde7b517ec0
|
2823cbcc9fc10ecd3f1785732403cb9c288f8ef3
|
refs/heads/main
| 2023-08-23T12:08:05.322733
| 2021-10-02T10:54:13
| 2021-10-02T10:54:13
| 308,242,023
| 1
| 1
| null | 2020-11-26T12:03:44
| 2020-10-29T06:49:26
|
Python
|
UTF-8
|
Python
| false
| false
| 502
|
py
|
# 9613 GCD 합
# https://www.acmicpc.net/problem/9613
def gcd(a, b):
result = 0
if a > b:
a, b = b, a
while b > 0:
result = b
a, b = b, a % b
return result
t = int(input())
for _ in range(t):
inp = list(map(int, input().split()))
cnt = inp[0]
num_list = inp[1:]
answer = 0
while num_list:
num = num_list.pop(0)
temp = 0
for n in num_list:
temp += gcd(num, n)
answer += temp
print(answer)
|
[
"doyeon311@gmail.com"
] |
doyeon311@gmail.com
|
12a0858f92e944f3282765fe64115943dabf630e
|
a838d4bed14d5df5314000b41f8318c4ebe0974e
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_11_01/aio/operations/_express_route_service_providers_operations.py
|
562620df96ed6195c4d70aade703489bdeef9938
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
scbedd/azure-sdk-for-python
|
ee7cbd6a8725ddd4a6edfde5f40a2a589808daea
|
cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a
|
refs/heads/master
| 2023-09-01T08:38:56.188954
| 2021-06-17T22:52:28
| 2021-06-17T22:52:28
| 159,568,218
| 2
| 0
|
MIT
| 2019-08-11T21:16:01
| 2018-11-28T21:34:49
|
Python
|
UTF-8
|
Python
| false
| false
| 5,138
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteServiceProvidersOperations:
"""ExpressRouteServiceProvidersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs
) -> AsyncIterable["_models.ExpressRouteServiceProviderListResult"]:
"""Gets all the available express route service providers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteServiceProviderListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_11_01.models.ExpressRouteServiceProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteServiceProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteServiceProviderListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteServiceProviders'} # type: ignore
|
[
"noreply@github.com"
] |
scbedd.noreply@github.com
|
533602f46e5fd616444778719ca121007c3b3786
|
8e58feae60f66ed52e5e5a04843580591bc6968d
|
/setup.py
|
b295a85ded33f5522c9e1cac762aaf01290d18b2
|
[
"BSD-3-Clause"
] |
permissive
|
rblack42/sphinxcontrib-lpblocks
|
89a50ff64cbf8866ec2663dd999ccca0fad4bfd9
|
a30869b1ee2f4f0f73b35f4a9841609d1fafd487
|
refs/heads/master
| 2022-07-31T23:29:57.534383
| 2020-05-18T20:06:33
| 2020-05-18T20:06:33
| 264,270,050
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,256
|
py
|
'''
Sphinxcontrib-lpblocks
~~~~~~~~~~~~~~~~~~~~~~
Sphinx extension to support Literate Programming.
'''
import io
from setuptools import setup, find_packages
import sphinxcontrib.lpblocks as lp
def readfile(filename):
with io.open(filename, encoding="utf-8") as stream:
return stream.read().split("\n")
readme = readfile("README.rst")
setup(
name='sphinxcontrib-lpblocks',
version=lp.version,
url=lp.url,
download_url=lp.pypi,
license=lp.license,
author=lp.author,
author_email=lp.email,
description=lp.summary,
long_description="\n".join(readme),
long_description_content_type='text/x-rst',
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=['sphinx', 'sphinx-rtd-theme'],
namespace_packages=['sphinxcontrib'],
)
|
[
"roie.black@gmail.com"
] |
roie.black@gmail.com
|
e53dd06c2a2df6fe95473cf29333718731785fe7
|
be5f4d79910e4a93201664270916dcea51d3b9ee
|
/rovers/fastdownward/experiments/issue420/issue420-v1.py
|
b5d93b01dad7fc570cabc56ea38aee5819c50c0a
|
[
"MIT",
"GPL-1.0-or-later",
"GPL-3.0-or-later"
] |
permissive
|
mehrdadzakershahrak/Online-Explanation-Generation
|
17c3ab727c2a4a60381402ff44e95c0d5fd0e283
|
e41ad9b5a390abdaf271562a56105c191e33b74d
|
refs/heads/master
| 2022-12-09T15:49:45.709080
| 2019-12-04T10:23:23
| 2019-12-04T10:23:23
| 184,834,004
| 0
| 0
|
MIT
| 2022-12-08T17:42:50
| 2019-05-04T00:04:59
|
Python
|
UTF-8
|
Python
| false
| false
| 718
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from downward.suites import suite_optimal_with_ipc11
import common_setup
REVS = ["issue420-base", "issue420-v1"]
CONFIGS = {
"blind": ["--search", "astar(blind())"],
"lmcut": ["--search", "astar(lmcut())"],
}
TEST_RUN = False
if TEST_RUN:
SUITE = "gripper:prob01.pddl"
PRIORITY = None # "None" means local experiment
else:
SUITE = suite_optimal_with_ipc11()
PRIORITY = 0 # number means maia experiment
exp = common_setup.MyExperiment(
grid_priority=PRIORITY,
revisions=REVS,
configs=CONFIGS,
suite=SUITE,
)
exp.add_comparison_table_step(
attributes=common_setup.MyExperiment.DEFAULT_TABLE_ATTRIBUTES
)
exp()
|
[
"zaker.mehrdad@gmail.com"
] |
zaker.mehrdad@gmail.com
|
258b646c702736c7a992f113a68f318a0df6c2a5
|
e0980f704a573894350e285f66f4cf390837238e
|
/.history/menus/models_20201030152822.py
|
c70dffeb2c5e9ce219e941ae4b37043c1fd1e98f
|
[] |
no_license
|
rucpata/WagtailWebsite
|
28008474ec779d12ef43bceb61827168274a8b61
|
5aa44f51592f49c9a708fc5515ad877c6a29dfd9
|
refs/heads/main
| 2023-02-09T15:30:02.133415
| 2021-01-05T14:55:45
| 2021-01-05T14:55:45
| 303,961,094
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,640
|
py
|
from django.db import models
from django_extensions.db.fields import AutoSlugField
from modelcluster.models import ClusterableModel
from modelcluster.fields import ParentalKey
from wagtail.core.models import Orderable
from wagtail.admin.edit_handlers import FieldPanel, PageChooserPanel, InlinePanel
class MenuItem(Orderable):
link_title = models.CharField(blank=True, max_length=50)
link_url = models.CharField(max_length=500, blank=True)
link_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE,
)
open_in_new_tab = models.BooleanField(
default=False,
blank=True,
)
panels = [
FieldPanel('link_title'),
FieldPanel('link_url'),
PageChooserPanel('link_page'),
FieldPanel('open_in_new_tab',),
]
page = ParentalKey('Menu', related_name='menu_items')
@property
def link(self):
if self.link_page:
return self.link_page
elif self.link_url:
return self.link_url
return '#'
@property
def title(self):
if self.link_page and not self.link_title:
return self.link_page.from django.utils.translation import ugettext_lazy as _
class Menu(ClusterableModel):
title = models.CharField(max_length=100)
slug = AutoSlugField(
populate_from='title',
editable=True,
)
panels = [
FieldPanel('title'),
FieldPanel('slug'),
InlinePanel('menu_items', label='Menu Item'),
]
def __str__(self):
return self.title
|
[
"rucinska.patrycja@gmail.com"
] |
rucinska.patrycja@gmail.com
|
3e9956e9dcc4a1c4b5873fb6cc00798a7a1b8a6e
|
2eb386991d9975f0f8440d90de26e950304ac42f
|
/TGHACK2020/mystic/mystic.py
|
890845cecbe1fd91e46764c04ab3175c590b43f6
|
[] |
no_license
|
Quintec/CTFs2020
|
2816a66e8a486537c31e5ac25253840bc3a8ffe9
|
bdaa327c9f0b0ee16ff95bafcaf65f0df8acd8b9
|
refs/heads/master
| 2022-12-19T21:39:14.129702
| 2020-10-01T16:49:06
| 2020-10-01T16:49:06
| 281,812,929
| 1
| 0
| null | 2020-10-01T16:49:08
| 2020-07-23T00:37:44
| null |
UTF-8
|
Python
| false
| false
| 182
|
py
|
with open("mystic.png", 'rb') as file:
dat = file.read()
print(dat)
print(type(dat))
dat = bytes([d^42 for d in dat])
with open("mystic.dat", 'wb') as file:
file.write(dat)
|
[
"zhongbrothers@gmail.com"
] |
zhongbrothers@gmail.com
|
571dce4b35f1680fb3c9ae0aa7327fffecc8a440
|
eac985db4df6ef7429ee6798956aad85087dc444
|
/rcsfootprints/settings.py
|
5809049ca8dfede2566f444d0a91519b4eaebd56
|
[] |
no_license
|
scottcoughlin2014/rcsfootprints
|
10e7caafda5062ecece0cf1eba978a853e35240e
|
06317fd97cd1feda745428df4c65662941caa755
|
refs/heads/master
| 2022-11-28T15:50:57.004215
| 2020-08-03T20:34:06
| 2020-08-03T20:34:06
| 284,801,649
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,713
|
py
|
"""
Django settings for rcsfootprints project.
Generated by 'django-admin startproject' using Django 3.0.9.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
try:
SECRET_KEY
except NameError:
SECRET_FILE = os.path.join(BASE_DIR, 'secret.txt')
try:
SECRET_KEY = open(SECRET_FILE).read().strip()
except IOError:
try:
import random
SECRET_KEY = ''.join([random.SystemRandom().choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
secret = open(SECRET_FILE, 'w')
secret.write(SECRET_KEY)
secret.close()
except IOError:
Exception('Please create a %s file with random characters \
to generate your secret key!' % SECRET_FILE)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# rcsfootprints apps
'issue.apps.IssueConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'rcsfootprints.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'rcsfootprints.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
|
[
"scottcoughlin2014@u.northwestern.edu"
] |
scottcoughlin2014@u.northwestern.edu
|
1cc919de041c45de9d95e37af262c9a2f2d6e5fe
|
c2f85286d1e21fb803c35f6d996abc850b993e53
|
/mystorage/models.py
|
3b1da397f6d612372399cd856fd46f538d5bc4d8
|
[] |
no_license
|
devdw98/likelion_drf
|
dfeec1bf5ee153918807f99040c8c33240c4344c
|
6d0171961bc93f4edd7998b7351034e0a936079d
|
refs/heads/master
| 2020-07-29T20:38:29.041098
| 2019-10-27T07:22:53
| 2019-10-27T07:22:53
| 209,951,594
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 984
|
py
|
from django.db import models
from django.conf import settings
class Essay(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, default = 1, on_delete=models.CASCADE) #모델지우면 그 모델과 관련한 모든 것 지움
title = models.CharField(max_length = 30)
body = models.TextField()
class Album(models.Model): #image파일을 효율적으로 관리하기 위해 pip install Pillow 하기
author = models.ForeignKey(settings.AUTH_USER_MODEL, default = 1, on_delete=models.CASCADE) #모델지우면 그 모델과 관련한 모든 것 지움
image = models.ImageField(upload_to="images")
desc = models.CharField(max_length = 100)
class Files(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, default = 1, on_delete=models.CASCADE) #모델지우면 그 모델과 관련한 모든 것 지움
myfile = models.FileField(blank = False, null = False, upload_to="files")
desc = models.CharField(max_length = 100)
|
[
"devdw98@gmail.com"
] |
devdw98@gmail.com
|
00b51cd8339ee6726731147e53432b24dc540662
|
a4146004694aa8745f751fd49152b01718e43cdd
|
/Algorithm/test/IM/D03/미로탈출 로봇중간 단계2.py
|
6085d18524e9fd1a78f28e0e385c58c842be2f6e
|
[] |
no_license
|
hoyoung2176/TIL
|
1f4e2110e1e21d03855889044a7df280ad9788fc
|
e2bca069d36455fdf8f142fa9a06fb1a39f3c99f
|
refs/heads/master
| 2021-06-25T13:34:01.463989
| 2019-07-31T05:05:45
| 2019-07-31T05:05:45
| 163,781,026
| 0
| 0
| null | 2021-06-10T21:19:53
| 2019-01-02T01:39:56
|
Python
|
UTF-8
|
Python
| false
| false
| 872
|
py
|
import sys
sys.stdin = open("미로탈출 로봇중간 단계.txt")
N = int(input())
arr = [[1]*(N*2) for _ in range(N+2)]
for i in range(1,N+1):
arr[i]= [1] + (list(map(int, input()))) + [1]
Darr = list(map(int,input().split()))
Dno = 0 #방향순서
dr = [0, 1, 0, -1, 0] #아래1 왼2 위3, 오른4 방향
dc = [0, -1, 0, 1]
r,c = 1, 1 #현재좌표
cnt = 0
while True:
#좌표계산
r = r + dr[Darr[Dno]]
c = c + dc[Darr[Dno]]
if arr[r][c] == 0: # 0이면
#방문표시하고 카운트
arr[r][c] = 9
cnt += 1
elif arr[r][c] == 1: # 1이면
# 이전좌표로 이동하고 방향전환(단, 방향은 로테이션)
r = r - dr[Darr[Dno]]
c = c - dc[Darr[Dno]]
Dno += (Dno+1) % 4 #4가 넘으면 초기화 된다.
else:
break # 지나간 자리이면 탈출
print(cnt)
|
[
"hoyoung2176@gmail.com"
] |
hoyoung2176@gmail.com
|
4c119e4d899ded17fc322cbed2aae93f00f17728
|
28b4c4676cb016e35cc7b1048097628caa113114
|
/sso/relay.py
|
99757032575f381faeec89c1b1124012853f5fa4
|
[
"MIT"
] |
permissive
|
shiroyuki/voila-id
|
829127ef258c829f33d6f4079486829e3325f208
|
da86daef255d23b73ad6f2ba67fba02201407f09
|
refs/heads/master
| 2016-09-06T16:56:45.724263
| 2014-08-25T05:50:43
| 2014-08-25T05:50:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 757
|
py
|
from sso.common import Controller
from sso.security import AccessMode
class Relay(Controller):
def get(self):
routing_map = self.component('routing_map')
if not self.session.get('auth'):
return self.redirect(routing_map.resolve('authentication'))
auth = self.session.get('auth')
if auth['access_mode'] == AccessMode.MASTER:
return self.redirect(routing_map.resolve('admin.profile.list'))
if not self.session.get('referer'):
return self.redirect(routing_map.resolve('user.profile', key = auth['username']))
referer = self.session.get('referer')
raise NotImplemented('To be implemented')
return self.redirect(routing_map.resolve('authentication'))
|
[
"jnopporn@shiroyuki.com"
] |
jnopporn@shiroyuki.com
|
afe11d1033c0af9c586eb0ac088d3f083c15409b
|
44d62a165f943ca752795be97b5921767c034f29
|
/iblrig/frame2TTL.py
|
e20a493f101e1a7f882816f9d47bac791410a1f8
|
[
"MIT"
] |
permissive
|
magically-solutions-india/iblrig
|
1d2c52d9966f3b3c9f681fde37a9926e35b62542
|
0d428591bd91d22feed5254f36a5dc9e104d4ff5
|
refs/heads/master
| 2020-12-20T03:41:08.808129
| 2020-01-22T10:07:55
| 2020-01-22T10:07:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,300
|
py
|
import logging
import struct
import numpy as np
import serial
import iblrig.alyx
import iblrig.params
log = logging.getLogger('iblrig')
class Frame2TTL(object):
def __init__(self, serial_port):
self.serial_port = serial_port
self.connected = False
self.ser = self.connect(serial_port)
self.light_threshold = 40
self.dark_threshold = 80
self.streaming = False
self.measured_black = None
self.measured_white = None
self.recomend_dark = None
self.recomend_light = None
def connect(self, serial_port) -> serial.Serial:
"""Create connection to serial_port"""
ser = serial.Serial(port=serial_port, baudrate=115200, timeout=1)
self.connected = True
return ser
def close(self) -> None:
"""Close connection to serial port"""
self.ser.close()
self.connected = False
def start_stream(self) -> None:
"""Enable streaming to USB (stream rate 100Hz)
response = int.from_bytes(self.ser.read(4), byteorder='little')"""
self.ser.write(struct.pack('cB', b'S', 1))
self.streaming = True
def stop_stream(self) -> None:
"""Disable streaming to USB"""
self.ser.write(struct.pack('cB', b'S', 0))
self.streaming = False
def read_value(self) -> int:
"""Read one value from sensor (current)"""
self.ser.write(b'V')
response = self.ser.read(4)
# print(np.frombuffer(response, dtype=np.uint32))
response = int.from_bytes(response, byteorder='little')
return response
def measure_photons(self, num_samples: int = 250) -> dict:
"""Measure <num_samples> values from the sensor and return basic stats.
Mean, Std, SEM, Nsamples
"""
import time
sample_sum = []
for i in range(num_samples):
sample_sum.append(self.read_value())
time.sleep(0.001)
out = {
'mean_value': float(np.array(sample_sum).mean()),
'max_value': float(np.array(sample_sum).max()),
'min_value': float(np.array(sample_sum).min()),
'std_value': float(np.array(sample_sum).std()),
'sem_value': float(np.array(sample_sum).std() / np.sqrt(num_samples)),
'nsamples': float(num_samples)
}
return out
def set_thresholds(self, dark=None, light=None) -> None:
"""Set light, dark, or both thresholds for the device"""
if dark is None:
dark = self.dark_threshold
if light is None:
light = self.light_threshold
self.ser.write(b'C')
response = self.ser.read(1)
if response[0] != 218:
raise(ConnectionError)
# Device wants light threshold before dark
self.ser.write(struct.pack('<BHH', ord('T'), int(light), int(dark)))
if light != self.light_threshold:
log.info(f"Light threshold set to {light}")
if dark != self.dark_threshold:
log.info(f"Dark threshold set to {dark}")
if light == 40 and dark == 80:
log.info(f"Resetted to default values: light={light} - dark={dark}")
self.dark_threshold = dark
self.light_threshold = light
def measure_white(self):
log.info("Measuring white...")
self.measured_white = self.measure_photons(1000)
return self.measured_white
def measure_black(self):
log.info("Measuring black...")
self.measured_black = self.measure_photons(1000)
return self.measured_black
def calc_recomend_thresholds(self):
if (self.measured_black is None) or (self.measured_white is None):
log.error("No mesures exist")
return -1
self.recomend_light = self.measured_white.get('max_value')
if self.measured_black['min_value'] - self.recomend_light > 40:
self.recomend_dark = self.recomend_light + 40
else:
self.recomend_dark = round(self.recomend_light + (
(self.measured_black['min_value'] - self.recomend_light) / 3))
if self.recomend_dark - self.recomend_light < 5:
log.error('Cannot recommend thresholds:'),
log.error('Black and White measurements may be too close for accurate frame detection')
log.error(f'Light = {self.recomend_light}, Dark = {self.recomend_dark}')
return -1
else:
log.info('Recommended thresholds:')
log.info(f'Light ={self.recomend_light}, Dark = {self.recomend_dark}.')
print('Done')
return self.recomend_dark, self.recomend_light
def set_recommendations(self):
log.info(f'Sending thresholds to device...')
self.set_thresholds(dark=self.recomend_dark, light=self.recomend_light)
def suggest_thresholds(self) -> None:
input("Set pixels under Frame2TTL to white (rgb 255,255,255) and press enter >")
print(" ")
print("Measuring white...")
white_data = self.measure_photons(10000)
input("Set pixels under Frame2TTL to black (rgb 0,0,0) and press enter >")
print(" ")
print("Measuring black...")
dark_data = self.measure_photons(10000)
print(" ")
light_max = white_data.get('max_value')
dark_min = dark_data.get('min_value')
print(f"Max sensor reading for white (lower is brighter) = {light_max}.")
print(f"Min sensor reading for black = {dark_min}.")
recomend_light = light_max
if dark_min - recomend_light > 40:
recomend_dark = recomend_light + 40
else:
recomend_dark = round(
recomend_light + ((dark_min - recomend_light) / 3))
if recomend_dark - recomend_light < 5:
print('Error: Cannot recommend thresholds:',
'light and dark measurements may be too close for accurate frame detection')
else:
log.info(f"Recommended thresholds: Light = {recomend_light}, Dark = {recomend_dark}.")
log.info(f"Sending thresholds to device...")
self.recomend_dark = recomend_dark
self.recomend_light = recomend_light
self.set_thresholds(light=recomend_light, dark=recomend_dark)
print('Done')
def get_and_set_thresholds():
params = iblrig.params.load_params_file()
for k in params:
if 'F2TTL' in k and params[k] is None:
log.error(f"Missing parameter {k}, please calibrate the device.")
raise(KeyError)
return -1
dev = Frame2TTL(params['COM_F2TTL'])
dev.set_thresholds(dark=params['F2TTL_DARK_THRESH'], light=params['F2TTL_LIGHT_THRESH'])
log.info(f"Frame2TTL: Thresholds set.")
return 0
if __name__ == "__main__":
com_port = 'COM3'
f = Frame2TTL(com_port)
print(f.read_value())
print(f.measure_photons())
f.set_thresholds()
f.set_thresholds(light=41, dark=81)
f.set_thresholds(light=41)
f.set_thresholds(dark=81)
f.suggest_thresholds()
print('.')
|
[
"nbonacchi@gmail.com"
] |
nbonacchi@gmail.com
|
323314a44358a166ba601398ae2f8308055495fa
|
cfc46fd56c16ac9c010bcf0c1eb50da3047d1b2b
|
/toscametrics/general/text_entropy.py
|
d41f0bfbd5d377be29b2831b990d4c15a8b08cbf
|
[
"Apache-2.0"
] |
permissive
|
radon-h2020/radon-tosca-metrics
|
d93ef5b3dc53c7863ba98a985919237fe6c4aadf
|
d0a10e10f2d897299a04f69290f09d5589bc039f
|
refs/heads/master
| 2021-08-24T13:53:43.207745
| 2021-07-06T08:44:00
| 2021-07-06T08:44:00
| 242,997,596
| 3
| 0
|
Apache-2.0
| 2021-03-29T13:47:46
| 2020-02-25T12:45:05
|
Python
|
UTF-8
|
Python
| false
| false
| 811
|
py
|
import re
from math import log2
from toscametrics import utils
from toscametrics.blueprint_metric import BlueprintMetric
def splitter(x):
return re.sub(r'\s+', ' ', str(x)).split(' ')
class TextEntropy(BlueprintMetric):
""" This class measures the blueprint's Shannon entropy for keywords frequencies """
def count(self):
words_list = utils.all_keys(self.blueprint)
words_list.extend(utils.all_values(self.blueprint))
words_list = [item for sublist in list(map(splitter, words_list)) for item in sublist]
words_set = set(words_list)
freq = {w: words_list.count(w) for w in words_set}
entropy = 0
for word in words_set:
p = freq[word] / len(words_list)
entropy -= p * log2(p)
return round(entropy, 2)
|
[
"stefano.dallapalma0@gmail.com"
] |
stefano.dallapalma0@gmail.com
|
c5fa70baf76c52c1e7edd22a04caa0f0124d5192
|
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
|
/python/python_10971.py
|
1e427429e1f1fb08ee4d28ae4224b1b47e60d17e
|
[] |
no_license
|
AK-1121/code_extraction
|
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
|
5297a4a3aab3bb37efa24a89636935da04a1f8b6
|
refs/heads/master
| 2020-05-23T08:04:11.789141
| 2015-10-22T19:19:40
| 2015-10-22T19:19:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
# sampling random floats on a range in numpy
np.random.uniform(5,10) # A single value
np.random.uniform(5,10,[2,3]) # A 2x3 array
|
[
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] |
ubuntu@ip-172-31-7-228.us-west-2.compute.internal
|
2450814533832c1abc008339e64b4cf98eedf527
|
0206ac23a29673ee52c367b103dfe59e7733cdc1
|
/src/fao_un/fao_explore_text_file.py
|
8f720005daa4892cb920451ae2460557678d06ed
|
[] |
no_license
|
guziy/RPN
|
2304a93f9ced626ae5fc8abfcc079e33159ae56a
|
71b94f4c73d4100345d29a6fbfa9fa108d8027b5
|
refs/heads/master
| 2021-11-27T07:18:22.705921
| 2021-11-27T00:54:03
| 2021-11-27T00:54:03
| 2,078,454
| 4
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
__author__ = 'huziy'
def main():
path = "/skynet3_rech1/huziy/Global_terrain_slopes_30s/GloSlopesCl1_30as.asc"
with open(path) as f:
for i, line in enumerate(f):
if i < 6:
print(line)
if 3000 < i < 4000:
nums = [int(s.strip()) for s in line.split()]
nums = [n for n in nums if n != 255]
if len(nums):
print(min(nums), max(nums), len(nums))
if __name__ == "__main__":
main()
|
[
"guziy.sasha@gmail.com"
] |
guziy.sasha@gmail.com
|
e61562e0b288dce372a49eb34d6462236ab75a15
|
3a117858e61c87b703c694d8e1b6da61e6074851
|
/src/.history/Test/HiwinRT605_Strategy_testsocket_20190626140818.py
|
ae6dddd6e9834397236b8fe1d7181f7c4ff0b95c
|
[
"MIT"
] |
permissive
|
SamKaiYang/ROS_Socket
|
4daa2c66181a76038e20161a44f1362084b6bd58
|
aa131496617cec0a9c32141565faa668db738eb9
|
refs/heads/master
| 2020-05-25T22:02:02.468922
| 2019-07-18T09:02:35
| 2019-07-18T09:02:35
| 188,009,003
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,551
|
py
|
#!/usr/bin/env python3
# license removed for brevity
#策略 機械手臂 四點來回跑
import threading
import rospy
import os
import numpy as np
from std_msgs.msg import String
from ROS_Socket.srv import *
from ROS_Socket.msg import *
import math
import enum
import Hiwin_RT605_Arm_Command as ArmTask
##----Arm state-----------
Arm_state_flag = 0
Strategy_flag = 0
Sent_data_flag = True
##----Arm status enum
class Arm_status(enum.IntEnum):
Idle = 0
Isbusy = 1
Error = 2
shutdown = 6
##-----------server feedback arm state----------
def Arm_state(req):
global CurrentMissionType,Strategy_flag,Arm_state_flag
Arm_state_flag = int('%s'%req.Arm_state)
if Arm_state_flag == Arm_status.Isbusy: #表示手臂忙碌
Strategy_flag = False
return(1)
if Arm_state_flag == Arm_status.Idle: #表示手臂準備
Strategy_flag = True
return(0)
if Arm_state_flag == Arm_status.shutdown: #表示程式中斷
Strategy_flag = 6
return(6)
##-----------server feedback Sent_flag----------
def Sent_flag(req):
global Sent_data_flag
Sent_data_flag = int('%s'%req.sent_flag)
return(1)
def arm_state_server():
#rospy.init_node(NAME)
s = rospy.Service('arm_state',arm_state, Arm_state) ##server arm state
a = rospy.Service('sent_flag',sent_flag,Sent_flag)
#rospy.spin() ## spin one
##-----------switch define------------##
class switch(object):
def __init__(self, value):
self.value = value
self.fall = False
def __iter__(self):
"""Return the match method once, then stop"""
yield self.match
raise StopIteration
def match(self, *args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
return True
elif self.value in args: # changed for v1.5, see below
self.fall = True
return True
else:
return False
##------------class-------
class point():
def __init__(self,x,y,z,pitch,roll,yaw):
self.x = x
self.y = y
self.z = z
self.pitch = pitch
self.roll = roll
self.yaw = yaw
pos = point(0,36.8,11.35,-90,0,0)
##-------------------------strategy---------------------
action = 0
def Mission_Trigger(ItemNo):
global action,Arm_state_flag,Sent_data_flag
if Arm_state_flag == Arm_status.Idle:
# Sent_data_flag = False
for case in switch(ItemNo): #傳送指令給socket選擇手臂動作
if case(0):
pos.x = 10
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
action = 1
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
#ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
ArmTask.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
break
if case(1):
pos.x = 10
pos.y = 42
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
action = 2
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
#ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
ArmTask.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
break
if case(2):
pos.x = -10
pos.y = 42
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
action = 3
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
#ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
ArmTask.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
break
if case(3):
pos.x = -10
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
action = 4
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
#ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
ArmTask.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
break
if case(4):
pos.x = 0
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
action = 5
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
#ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
ArmTask.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
ArmTask.strategy_client_Arm_Mode(2,1,0,10,2)#action,ra,grip,vel,both
break
if case(): # default, could also just omit condition or 'if True'
rospy.on_shutdown(myhook)
ArmTask.rospy.on_shutdown(myhook)
#action: ptp line
#ra : abs rel
#grip 夾爪
#vel speed
#both : Ctrl_Mode
##-------------strategy end ------------
def myhook():
print ("shutdown time!")
if __name__ == '__main__':
argv = rospy.myargv()
rospy.init_node('strategy', anonymous=True)
GetInfoFlag = True #Test no data
arm_state_server()
start_input=int(input('開始策略請按1,離開請按3 : ')) #輸入開始指令
start_input = 1
if start_input==1:
# encoding: UTF-8
timer = threading.Timer(5, Mission_Trigger(action))
timer.start()
##while 1:
#Mission_Trigger(action)
if start_input == 3:
pass
ArmTask.rospy.spin()
rospy.spin()
|
[
"tt00621212@gmail.com"
] |
tt00621212@gmail.com
|
c2c096ac99f6a8ef635bfe244d6f1b363cafbb99
|
25bb4e760769cc483a20f27b6312698891dce034
|
/algorithms/dynamic programming/fibonacci-modified-English.py
|
43e913d6860797d42644fb453e498a9e8ba33f17
|
[] |
no_license
|
rangaeeeee/codes-hackerrank
|
e13d22adff1ef74974e34251d9bfac6cfd36f2b0
|
ce7fdf7f336c10164fd2f779d4ed3713849d7c2b
|
refs/heads/master
| 2021-01-19T17:07:28.451983
| 2017-09-01T18:05:33
| 2017-09-01T18:05:33
| 101,049,197
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 291
|
py
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
t1,t2,n = map(int,input().split())
result = [None] * (n+1)
result[1] = t1
result[2] = t2
for i in range(3,n+1,1):
if result[i] == None:
result[i] = result[i-2] + result[i-1]**2
print(result[n])
|
[
"rrangarajan.85@gmail.com"
] |
rrangarajan.85@gmail.com
|
812f0756e1b6b47085b05cdfe5bb31706fea3a48
|
d91a61442d0ba0547301f54e7f63039ab5346198
|
/for/exe_5_soma_impares.py
|
e24ac2e7564b8fc8ec7eb560600909bf26b37fd9
|
[] |
no_license
|
pedroceciliocn/programa-o-1
|
a41bf89475939ad1791320c8fa4845744921ec19
|
15f8a3dbc13f4cbbc15f2b626ccef47fe145cc3f
|
refs/heads/main
| 2023-07-12T20:06:20.632304
| 2021-08-17T22:05:57
| 2021-08-17T22:05:57
| 377,301,280
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 377
|
py
|
"""
Somar os inteiros ímpares entre dois valores
inteiros informados pelo usuário.
"""
n_1 = int(input("De o primeiro valor do intervalo: "))
n_2 = int(input("De o segundo valor do intervalo: "))
s = 0
if n_1 > n_2:
n_1, n_2 = n_2, n_1
for i in range(n_1, n_2 + 1):
if i % 2 != 0:
s += i
print(f"+ {s}")
print("------")
print(f"s = {s}")
|
[
"pedroceciliocn@gmail.com"
] |
pedroceciliocn@gmail.com
|
fb3de3e1e992cf59a0db77e1e77140ba3f9e8072
|
64dfafd940d77129b38865ccac869aadffb868ff
|
/natural_satmass_plots.py
|
0a3c6e64ef4ce8f400172a6e12b45c5a341759a2
|
[] |
no_license
|
RaymondSimons/foggie_local
|
242d4ef5fa814d44747b2bc38825433ade73de9f
|
f10bc56ef90b274d08fc81c8d02ddd9b653dfd19
|
refs/heads/master
| 2021-07-07T18:03:27.996681
| 2020-07-30T16:34:40
| 2020-07-30T16:34:40
| 147,853,943
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,178
|
py
|
import glob
from glob import glob
from astropy.io import fits
from scipy import interpolate
plt.rcParams['text.usetex'] = True
plt.ioff()
plt.close('all')
DD_to_t = np.load('/Users/rsimons/Dropbox/rcs_foggie/outputs/DD_time.npy')[()]
#DDs = arange(44, 800)
DDs = arange(44, 800)
sat_ns = arange(0, 12)
clrs = ['blue', 'navy', 'darkblue', 'royalblue', 'red']
ls = ['-', '-', ':', '--','-.']
lw = [2,2,2,2,2]
alp = [0.8, 0.8, 0.8, 0.8, 0.8]
#for s, sim in enumerate(array(['forced', 'natural(v1)', 'natural(v2)', 'natural(v3)', 'natural(v4)'])):
for sat_n in sat_ns:
print sat_n
fig, axes = plt.subplots(2,2, figsize = (10, 10))
fig2, ax2 = plt.subplots(1,1, figsize = (12, 6))
for s, sim in enumerate(array(['natural(v1)', 'natural(v2)', 'natural(v3)', 'natural(v4)'])):
ts = []
ms = []
mg = []
sf = []
dm = []
R_90 = []
#if sim == 'forced': DDs_use = arange(400, 1000)
#elif sim == 'natural(v1)': DDs_use = arange(44, 800)
#else: DDs_use = DDs
DDs_use = DDs
for DD in DDs_use:
if sim == 'forced': simname = 'nref11n_nref10f'
if sim == 'natural(v1)':
simname = 'natural'
dirname = 'natural'
ind_use = 50
if sim == 'natural(v2)':
dirname = 'natural_v2'
simname = 'nref11n_v2_selfshield_z15'
ind_use = 0
if sim == 'natural(v3)':
dirname = 'natural_v3'
simname = 'nref11n_v3_selfshield_z15'
ind_use = 0
if sim == 'natural(v4)':
dirname = 'natural_v4'
simname = 'nref11n_v4_selfshield_z15'
ind_use = 0
#fl = glob('/Users/rsimons/Dropbox/rcs_foggie/cenmass/%s_DD%.4i_mass.fits'%(simname, DD))
fl_name = '/Users/rsimons/Dropbox/rcs_foggie/satmass/%s/%s_DD%.4i_mass_sat%.2i.fits'%(dirname, simname, DD, sat_n)
fl = glob(fl_name)
if len(fl) > 0:
try:
a = fits.open(fl[0])
gd = where(DD_to_t[0] == DD)
t = DD_to_t[2][gd][0]
frac_mass = a['STARS_MASS'].data[:]/a['STARS_MASS'].data[-1]
ts.append(t)
ms.append(a['STARS_MASS'].data[ind_use])
mg.append(a['GAS_TOT'].data[ind_use])
sf.append(a['STARS_YOUNGMASS'].data[ind_use]/2.e7)
dm.append(a['DARK_MATTER'].data[ind_use])
#f = interpolate.interp1d(frac_mass, a['DISTANCE'].data)
#R_90.append(f(0.9))
except:
pass
axes[0,0].plot(ts, ms, label = sim, color = clrs[s], linestyle = ls[s], linewidth = lw[s], alpha = alp[s])
axes[0,1].plot(ts, dm, label = sim, color = clrs[s], linestyle = ls[s], linewidth = lw[s], alpha = alp[s])
axes[1,0].plot(ts, mg, label = sim, color = clrs[s], linestyle = ls[s], linewidth = lw[s], alpha = alp[s])
#axes[1,1].plot(ts, R_90, label = sim, color = clrs[s], linestyle = ls[s], linewidth = lw[s], alpha = alp[s])
ax2.plot(ts, sf, label = sim, color = clrs[s], linestyle = ls[s], linewidth = 1, alpha = alp[s])
axes[0,0].legend(loc = 2)
ax2.legend(loc = 2)
fs = 12
axes[0,0].set_ylabel('M$_*$ (M$_{\odot}$)', fontsize = fs)
axes[0,1].set_ylabel('M$_{DM}$ (M$_{\odot}$)', fontsize = fs)
axes[1,0].set_ylabel('M$_{g}$ (M$_{\odot}$)', fontsize = fs)
axes[1,1].set_ylabel(r'r$_{*,90}$ (kpc)', fontsize = fs)
ax2.set_ylabel('star formation rate (M$_{\odot}$ yr$^{-1}$)', fontsize = fs)
for ax in axes.ravel(): ax.set_xlim(1, 5.5)
ax2.set_xlim(1, 5.5)
axes[1,1].axis('off')
for ax in axes.ravel(): ax.set_xlabel('time (Gyr)', fontsize = fs)
ax2.set_xlabel('time (Gyr)', fontsize = fs)
fig.savefig('/Users/rsimons/Dropbox/rcs_foggie/figures/butterfly_sats/%i_mass.png'%sat_n, dpi = 300)
fig2.savefig('/Users/rsimons/Dropbox/rcs_foggie/figures/butterfly_sats/%i_SFR.png'%sat_n, dpi = 300)
plt.close('all')
|
[
"rsimons@jhu.edu"
] |
rsimons@jhu.edu
|
bd2be70e948d6957ceca507b2d58feba34736b39
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/network/azure-mgmt-network/generated_samples/hub_route_table_delete.py
|
6e2f314f2351a185a63bd8d45de74648f9983c8e
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,554
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.network import NetworkManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-network
# USAGE
python hub_route_table_delete.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = NetworkManagementClient(
credential=DefaultAzureCredential(),
subscription_id="subid",
)
client.hub_route_tables.begin_delete(
resource_group_name="rg1",
virtual_hub_name="virtualHub1",
route_table_name="hubRouteTable1",
).result()
# x-ms-original-file: specification/network/resource-manager/Microsoft.Network/stable/2023-04-01/examples/HubRouteTableDelete.json
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
70fe8e8b64018de3046f650a5b1487419d30a178
|
af259acdd0acd341370c9d5386c444da6a7a28a6
|
/Deep-Learning-in-Python/04-Fine-tuning-keras-models/02-Changing-optimization-parameters.py
|
de78e0ef25b6108c14e0a39d883bc0b1c62905bf
|
[] |
no_license
|
pace-noge/datacamp
|
fcd544f6478040660f7149b1a37bfd957eef9747
|
eeffb8af233e7304c0f122a48e6b4f78ee7c650e
|
refs/heads/master
| 2020-07-04T12:41:29.635167
| 2019-09-17T10:11:39
| 2019-09-17T10:11:39
| 202,289,041
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,013
|
py
|
"""
Changing optimization parameters
It's time to get your hands dirty with optimization. You'll now try optimizing a model at a very low learning rate, a very high learning rate, and a "just right" learning rate. You'll want to look at the results after running this exercise, remembering that a low value for the loss function is good.
For these exercises, we've pre-loaded the predictors and target values from your previous classification models (predicting who would survive on the Titanic). You'll want the optimization to start from scratch every time you change the learning rate, to give a fair comparison of how each learning rate did in your results. So we have created a function get_new_model() that creates an unoptimized model to optimize.
INSTRUCTION
-----------
Import SGD from keras.optimizers.
Create a list of learning rates to try optimizing with called lr_to_test. The learning rates in it should be .000001, 0.01, and 1.
Using a for loop to iterate over lr_to_test:
Use the get_new_model() function to build a new, unoptimized model.
Create an optimizer called my_optimizer using the SGD() constructor with keyword argument lr=lr.
Compile your model. Set the optimizer parameter to be the SGD object you created above, and because this is a classification problem, use 'categorical_crossentropy' for the loss parameter.
Fit your model using the predictors and target.
"""
# Import the SGD optimizer
from keras.optimizers import SGD
# Create list of learning rates: lr_to_test
lr_to_test = [.000001, 0.01, 1]
# Loop over learning rates
for lr in lr_to_test:
print('\n\nTesting model with learning rate: %f\n'%lr )
# Build new model to test, unaffected by previous models
model = get_new_model()
# Create SGD optimizer with specified learning rate: my_optimizer
my_optimizer = SGD(lr=lr)
# Compile the model
model.compile(optimizer=my_optimizer, loss='categorical_crossentropy')
# Fit the model
model.fit(predictors, target)
|
[
"noreply@github.com"
] |
pace-noge.noreply@github.com
|
5264e111233945faf45b9aefd6d120ef0d823acf
|
8a5ab3d33e3b653c4c64305d81a85f6a4582d7ac
|
/PySide/QtXml/QDomDocumentFragment.py
|
05e5e380013867b6c5bff79ce5d82cf2508b4f84
|
[
"Apache-2.0"
] |
permissive
|
sonictk/python-skeletons
|
be09526bf490856bb644fed6bf4e801194089f0d
|
49bc3fa51aacbc2c7f0c7ab86dfb61eefe02781d
|
refs/heads/master
| 2020-04-06T04:38:01.918589
| 2016-06-09T20:37:43
| 2016-06-09T20:37:43
| 56,334,503
| 0
| 0
| null | 2016-04-15T16:30:42
| 2016-04-15T16:30:42
| null |
UTF-8
|
Python
| false
| false
| 801
|
py
|
# encoding: utf-8
# module PySide.QtXml
# from /corp.blizzard.net/BFD/Deploy/Packages/Published/ThirdParty/Qt4.8.4/2015-05-15.163857/prebuilt/linux_x64_gcc41_python2.7_ucs4/PySide/QtXml.so
# by generator 1.138
# no doc
# no imports
from QDomNode import QDomNode
class QDomDocumentFragment(QDomNode):
# no doc
def nodeType(self, *args, **kwargs): # real signature unknown
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
def __nonzero__(self): # real signature unknown; restored from __doc__
""" x.__nonzero__() <==> x != 0 """
pass
__new__ = None
|
[
"yliangsiew@blizzard.com"
] |
yliangsiew@blizzard.com
|
cba3267fc1a634b25b0ca18b959b737bd7ad0cde
|
9f1db0cce0aebc6c14c7e3c4adfa424bbc20c968
|
/project/lit/migrations/0008_auto_20151112_0935.py
|
e03309c46b32757228973dba1d3bbb8e0d673561
|
[] |
no_license
|
JoshAddington/hawc
|
6b8f9efb0dec2aeb972a66a636dfc7493502003b
|
28cb8a2690ba934134c4dd4660d51e573cd6b895
|
refs/heads/master
| 2020-12-25T17:16:05.746517
| 2016-06-10T18:03:32
| 2016-06-10T18:03:32
| 44,217,584
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 555
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import utils.models
class Migration(migrations.Migration):
dependencies = [
('lit', '0007_auto_20151103_0925'),
]
operations = [
migrations.AlterField(
model_name='reference',
name='full_text_url',
field=utils.models.CustomURLField(help_text=b'Link to full-text publication (may require increased access privileges, only reviewers and team-members)', blank=True),
),
]
|
[
"shapiromatron@gmail.com"
] |
shapiromatron@gmail.com
|
69a804c507365deb078ecc69332d2ffa90e580e1
|
fac4c2fa64e6a22d0a80eec7b65c93d7a6236b7f
|
/original-modules/text-to-text-transfer-transformer-master/t5/models/gin/objectives/span.gin
|
2da19b4e5ebab684ee90690c65fafe40978d8775
|
[
"Apache-2.0"
] |
permissive
|
zouning68/nlp-transfer-learning
|
ec2b9e91f4b3bb9d77bf88dd78282f6ff5aaa4fd
|
e9b1544b55905ceb2235471f036abc1d7c4160db
|
refs/heads/master
| 2023-04-13T18:25:22.206475
| 2020-01-15T02:36:19
| 2020-01-15T02:36:19
| 228,514,893
| 2
| 0
|
Apache-2.0
| 2023-03-24T23:36:35
| 2019-12-17T02:21:15
|
Python
|
UTF-8
|
Python
| false
| false
| 1,801
|
gin
|
# -*-Python-*-
# Random pattern of noise and non-noise spans.
include 'objectives/denoise.gin'
preprocessors.unsupervised.preprocessors = [
@preprocessors.select_random_chunk,
@preprocessors.reduce_concat_tokens,
@preprocessors.split_tokens,
@preprocessors.denoise,
]
inputs_length = 512
noise_density = 0.15
preprocessors.denoise.noise_density = %noise_density
mean_noise_span_length = 3.0
preprocessors.denoise.inputs_fn = @preprocessors.noise_span_to_unique_sentinel
preprocessors.denoise.targets_fn = @preprocessors.nonnoise_span_to_unique_sentinel
preprocessors.denoise.noise_mask_fn = @preprocessors.random_spans_noise_mask
preprocessors.random_spans_noise_mask.mean_noise_span_length = %mean_noise_span_length
# Based on this combination of noise_mask_fn, inputs_fn, and targets_fn, we
# compute the exact split length and targets length so that the resulting
# training examples fit perfectly without any padding.
#
# These settings still work (but leave some padding) if the inputs_fn or
# targets_fn is switched to drop instead of replacing spans by sentinels.
# Compute the split length based on the other hyperparameters
preprocessors.split_tokens.max_tokens_per_segment = @preprocessors.random_spans_tokens_length()
targets_length = @preprocessors.random_spans_targets_length()
preprocessors.random_spans_helper.inputs_length = %inputs_length
preprocessors.random_spans_helper.noise_density = %noise_density
preprocessors.random_spans_helper.mean_noise_span_length = %mean_noise_span_length
preprocessors.random_spans_helper.extra_tokens_per_span_inputs = 1
preprocessors.random_spans_helper.extra_tokens_per_span_targets = 1
utils.run.sequence_length = {"inputs": %inputs_length, "targets": %targets_length}
sentencepiece_vocabulary.SentencePieceVocabulary.extra_ids = 100
|
[
"noreply@github.com"
] |
zouning68.noreply@github.com
|
530054b5f5bfcf224797ff7cb03ce8898da54858
|
6c5a2649bd7dce505e108d5ceb862d9a3912ca8b
|
/score_of_data.py
|
dc556bef64979e96be25a36395d5a45b68c9bbf1
|
[] |
no_license
|
maidao/machine_learning_Intermediate
|
680295ba8b9282312cd499151e57ef03e1a55283
|
7cb5a05b4bdd30f0555faa3705d7ab20793d61b5
|
refs/heads/master
| 2020-06-18T07:33:47.152520
| 2019-07-17T10:03:00
| 2019-07-17T10:03:00
| 196,215,591
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 335
|
py
|
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_absolute_error
def score_dataset(X_train, X_valid, y_train, y_valid):
model = RandomForestRegressor(n_estimators=10, random_state=0)
model.fit(X_train, y_train)
preds = model.predict(X_valid)
return mean_absolute_error(y_valid, preds)
|
[
"md.maidao@gmail.com"
] |
md.maidao@gmail.com
|
2497cfe49693743af6f390ab5ee75b6bd96ebf9e
|
48894a44b01b055c9d88eb93f4472fa1da72441b
|
/run.py
|
ffab80d383c6be37ca0c1c76c9b9eb84500f98b7
|
[] |
no_license
|
Biking0/ambari_monitor
|
4f349846f5be9042854427979e844934db3881c0
|
137cd8d7b051253a616c6b106722cac539616610
|
refs/heads/master
| 2020-08-10T22:39:44.331360
| 2020-03-19T05:41:22
| 2020-03-19T05:41:22
| 214,435,780
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,088
|
py
|
#!/usr/bin/env python
# -*-coding:utf-8 -*-
#********************************************************************************
# ** 文件名称:run.py
# ** 功能描述:ambari 监控项目启动所有监控程序
# ** 输 入 表:
# ** 输 出 表:
# ** 创 建 者:hyn
# ** 创建日期:20191020
# ** 修改日志:
# ** 修改日期:
# *******************************************************************************
# ** 程序调用格式:nohup python run.py >> nohup.out &
# *******************************************************************************
import os
import time
import config
import service_monitor
import solr_monitor
# 启动
if __name__=='__main__':
while True:
# 1.监控各类服务
service_monitor_object = service_monitor.ServiceMonitor()
service_monitor_object.request_data()
# 2.监控solr
solr_monitor_object = solr_monitor.SolrMonitor()
solr_monitor_object.request_data()
# 3.监控kafka消费
# 4.监控kafka日志,有监控
print('sleep 900s')
time.sleep(config.sleep_time)
#time.sleep(3)
|
[
"1342831695@qq.com"
] |
1342831695@qq.com
|
45a941542c6eb500c823b5d445f76412a2149b8a
|
1208ac3718420c4a118ab6b777d99980b85f952a
|
/112.py
|
aabd7878a6fe754ceb9cd8e4a711f5e799a5cd90
|
[] |
no_license
|
deimelperez/150_Py_challenges
|
6ab9aea77c9c117b682790bfe36fb5e280cb8afc
|
b58f55312e7abf30cb7cb6d68b249bb5dcd3c862
|
refs/heads/master
| 2023-03-13T02:30:15.095467
| 2021-03-04T19:02:11
| 2021-03-04T19:02:11
| 344,579,979
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 231
|
py
|
import csv
file = open ('111 Books.csv','a')
title = input('Enter title: ')
author = input('Enter author: ')
date = input('Enter date: ')
newRecord = tile + ',' + author + ',' + date + '\n'
file.write(str(newRecord))
file.close()
|
[
"deimelperez@gmail.com"
] |
deimelperez@gmail.com
|
69a692f69279f5075583c5453a69f1f6df3bec6c
|
079989df1765293d3536b98f841630246bb77a85
|
/information/models.py
|
69782d9228cdbcd29e08eba464dd17a09dce24be
|
[] |
no_license
|
R-Mielamud/ClasssixSite
|
fb74cf71acc55fd32b7be0b4365ff3b2099d5d7c
|
0d3bc58b4e13ddc29f0a7f2025a6c813d828433f
|
refs/heads/master
| 2020-12-27T08:10:52.040513
| 2020-07-09T19:19:15
| 2020-07-09T19:19:15
| 237,826,755
| 1
| 0
| null | 2020-02-10T13:18:41
| 2020-02-02T19:47:34
|
HTML
|
UTF-8
|
Python
| false
| false
| 723
|
py
|
from django.db.models import *
from diary.models import Subject
class TeacherData(Model):
full_name = CharField(max_length=100, default="")
subject = ForeignKey(Subject, on_delete=CASCADE, related_name="teacher_data_sets")
def __str__(self):
return "{} | {}".format(self.full_name, self.subject)
class Meta:
verbose_name_plural = "Teacher data sets"
class ScheduleSubject(Model):
day_number = IntegerField(default=1)
day_index = IntegerField(default=1)
subject = CharField(default="", max_length=200)
cabinet = CharField(default="31", max_length=10)
def __str__(self):
return "{} {} {} {}".format(self.day_number, self.day_index, self.subject, self.cabinet)
|
[
"2m.roman2@gmail.com"
] |
2m.roman2@gmail.com
|
3810a9e948c34313e813a1fe7c302f115e7b368c
|
0a28bcde2499e6a41e16d88ed62cd2e80a5b464d
|
/hb_quant/huobi/service/account/get_account_balance_by_subuid.py
|
29532076b68ed85bd1db6979439b5886c05dab41
|
[
"MIT"
] |
permissive
|
wenli135/Binance-volatility-trading-bot
|
2cfe66007294b13a89b16d1622d50ce1615f1d66
|
75a03ad61df0e95492128fb6f1f419d4dc256ab3
|
refs/heads/main
| 2023-06-13T06:40:43.855256
| 2021-07-01T02:03:25
| 2021-07-01T02:03:25
| 373,853,320
| 0
| 0
|
MIT
| 2021-06-04T13:38:26
| 2021-06-04T13:38:26
| null |
UTF-8
|
Python
| false
| false
| 696
|
py
|
from huobi.connection.restapi_sync_client import RestApiSyncClient
from huobi.constant.system import HttpMethod
from huobi.model.account import *
class GetAccountBalanceBySubUidService:
def __init__(self, params):
self.params = params
def request(self, **kwargs):
sub_uid = self.params["sub-uid"]
def get_channel():
path = "/v1/account/accounts/{}"
return path.format(sub_uid)
def parse(dict_data):
data_list = dict_data.get("data", [])
return AccountBalance.json_parse_list(data_list)
return RestApiSyncClient(**kwargs).request_process(HttpMethod.GET_SIGN, get_channel(), self.params, parse)
|
[
"wenli@quchaogu.com"
] |
wenli@quchaogu.com
|
29cff6d8c3117c518a9ab74b682916b64efc225e
|
9b0bdebe81e558d3851609687e4ccd70ad026c7f
|
/数据结构/栈/04.合法的出栈顺序.py
|
1f404b5a1970e989acf41c5634f48a5b42087105
|
[] |
no_license
|
lizenghui1121/DS_algorithms
|
645cdad007ccbbfa82cc5ca9e3fc7f543644ab21
|
9690efcfe70663670691de02962fb534161bfc8d
|
refs/heads/master
| 2022-12-13T22:45:23.108838
| 2020-09-07T13:40:17
| 2020-09-07T13:40:17
| 275,062,257
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 980
|
py
|
"""
1到n的数字序列,入栈,每个数字入栈后,即可出栈,也可在栈中停留,求该数字序列的出栈序列是否合法。
@Author: Li Zenghui
@Date: 2020-03-21 21:19
"""
from queue_and_stack import MyStack
def check_is_valid_order(arr):
s = MyStack()
for i in range(1, len(arr)+1):
s.push(i)
while not s.is_empty() and arr[0] == s.top():
s.pop()
arr.pop(0)
if not s.is_empty():
return False
return True
# 直接用列表当栈用
def check_is_valid_order_2(arr):
s = []
for i in range(1, len(arr)+1):
s.append(i)
while s and arr[0] == s[-1]:
s.pop()
arr.pop(0)
if s:
return False
return True
if __name__ == '__main__':
print(check_is_valid_order([3, 2, 5, 4, 1]))
print(check_is_valid_order([3, 1, 2, 4, 5]))
print(check_is_valid_order_2([3, 2, 5, 4, 1]))
print(check_is_valid_order_2([3, 1, 2, 4, 5]))
|
[
"954267393@qq.com"
] |
954267393@qq.com
|
11a6f509dac11d1628a4239ad6e3882798927964
|
1839a3881de40db86a2c1e50086ccd6562ed221e
|
/UnetVgg11/network/textnet.py
|
cf1ac08f05461b500ccb74444b31ded0a846858b
|
[] |
no_license
|
weijiawu/Yibao-cup_competition
|
491c3e63ee1398c97339e95d5299f2aa2ea8af32
|
84369b038d8c3b3b2e9bfeae824518e49586612e
|
refs/heads/master
| 2020-05-07T12:09:22.987473
| 2019-04-10T06:41:16
| 2019-04-10T06:41:16
| 180,490,552
| 9
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,052
|
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import torch
import torch.nn as nn
import torchvision.models
import torch.nn.functional as F
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
class UnetVgg11(nn.Module):
def __init__(self, n_classes=3, num_filters=64, v=1):
super(UnetVgg11, self).__init__()
print('UnetVgg11 version={}'.format(v))
print('base num_filters={}'.format(num_filters))
self.pool = nn.MaxPool2d(2, 2)
self.encoder = torchvision.models.vgg11(pretrained=True).features
self.relu = self.encoder[1]
self.conv1 = self.encoder[0]
self.conv2 = self.encoder[3]
self.conv3s = self.encoder[6]
self.conv3 = self.encoder[8]
self.conv4s = self.encoder[11]
self.conv4 = self.encoder[13]
self.conv5s = self.encoder[16]
self.conv5 = self.encoder[18]
self.center = DecoderBlock_5(num_filters * 8, num_filters * 16, num_filters * 8)
self.dec5 = DecoderBlock(num_filters * 16, num_filters * 8, num_filters * 8)
self.dec4 = DecoderBlock(num_filters * 16, num_filters * 8, num_filters * 4)
self.dec3 = DecoderBlock(num_filters * 8, num_filters * 4, num_filters * 2)
self.dec2 = DecoderBlock(num_filters * 4, num_filters * 2, num_filters)
self.dec1 = ConvRelu(num_filters * 2, num_filters)
self.final = nn.Conv2d(num_filters, n_classes, kernel_size=1)
def forward(self, x):
conv1 = self.relu(self.conv1(x))
conv2 = self.relu(self.conv2(self.pool(conv1)))
conv3s = self.relu(self.conv3s(self.pool(conv2)))
conv3 = self.relu(self.conv3(conv3s))
conv4s = self.relu(self.conv4s(self.pool(conv3)))
conv4 = self.relu(self.conv4(conv4s))
conv5s = self.relu(self.conv5s(self.pool(conv4)))
conv5 = self.relu(self.conv5(conv5s))
center = self.center(self.pool(conv5))
# print 'dec5.in_channels', self.dec5.in_channels
# print center.size(), conv5.size()
dec5 = self.dec5(torch.cat([center, conv5], 1))
dec4 = self.dec4(torch.cat([dec5, conv4], 1))
dec3 = self.dec3(torch.cat([dec4, conv3], 1))
dec2 = self.dec2(torch.cat([dec3, conv2], 1))
dec1 = self.dec1(torch.cat([dec2, conv1], 1))
return self.final(dec1)
class ConvRelu(nn.Module):
def __init__(self, in_channels, out_channels):
super(ConvRelu, self).__init__()
self.block = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
# nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True)
)
def forward(self, x):
return self.block(x)
class DoubleConvRelu(nn.Module):
def __init__(self, in_channels, out_channels):
super(DoubleConvRelu, self).__init__()
self.block = nn.Sequential(
ConvRelu(in_channels, out_channels),
ConvRelu(out_channels, out_channels),
)
def forward(self, x):
return self.block(x)
class DecoderBlock_5(nn.Module):
def __init__(self, in_channels, middle_channels, out_channels, is_deconv=True):
super(DecoderBlock_5, self).__init__()
self.in_channels = in_channels
if is_deconv:
self.block = nn.Sequential(
ConvRelu(in_channels, middle_channels),
nn.ConvTranspose2d(middle_channels, out_channels, kernel_size=4, stride=2,
padding=1, output_padding=1),
nn.ReLU(inplace=True)
# nn.Upsample(scale_factor=2)
)
else:
self.block = nn.Sequential(
nn.Upsample(scale_factor=2, mode='bilinear'),
ConvRelu(in_channels, middle_channels),
ConvRelu(middle_channels, out_channels),
)
def forward(self, x):
return self.block(x)
class DecoderBlock(nn.Module):
def __init__(self, in_channels, middle_channels, out_channels, is_deconv=True):
super(DecoderBlock, self).__init__()
self.in_channels = in_channels
if is_deconv:
self.block = nn.Sequential(
ConvRelu(in_channels, middle_channels),
nn.ConvTranspose2d(middle_channels, out_channels, kernel_size=3, stride=2,
padding=1, output_padding=1),
nn.ReLU(inplace=True)
# nn.Upsample(scale_factor=2)
)
else:
self.block = nn.Sequential(
nn.Upsample(scale_factor=2, mode='bilinear'),
ConvRelu(in_channels, middle_channels),
ConvRelu(middle_channels, out_channels),
)
def forward(self, x):
return self.block(x)
if __name__ == '__main__':
import torch
input = torch.randn((1, 3, 400, 400))
print(input.dtype)
net = torch.nn.DataParallel(UnetVgg11(n_classes=3)).cuda()
print(net(input.cuda()).shape)
print(net(input.cuda())[:,0].view(-1).shape)
pass
|
[
"wwj123@zju.edu.cn"
] |
wwj123@zju.edu.cn
|
26306fc13a2759dc8d39618d990a765d293540d4
|
49aa2eaeda287fdbbab6f83734473633f200b501
|
/dataPlatform/go_platform/api/LibScootersOwners.py
|
7172161c8125935f57716365ef901d7d05f770c6
|
[] |
no_license
|
petercheng168/My-account-automation
|
9d41255bb18421a887a308345db94e973467bae5
|
31c29cc9a9b3f5a4b2c2b721a33df04c52b9d80a
|
refs/heads/master
| 2023-03-13T10:14:00.896812
| 2020-09-11T06:26:44
| 2020-09-11T06:26:44
| 341,831,073
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,604
|
py
|
import os
import sys
__dirname = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(__dirname, '../api'))
from _init_ import _init_
class LibScootersOwners(object):
def __init__(self):
self.init = _init_()
def scooters_owners_get(self, request_data_type=None, owner_ids= None, scooter_ids=None, scooter_plates=None,
scooter_vins=None, scooter_guids=None, scooter_vin_for_transfer=None,
scooter_plate_for_transfer=None, profile_id_for_transfer=None, account=None, offset=None, limit=None):
""" get scooter's owner
Examples:
| ${resp} = | Scooters Owners Get | data |
"""
self.init.authHeader(account)
data = {
"op_code": "get",
"get_data": {
"request_data_type": request_data_type,
"owner_ids": owner_ids,
"scooter_ids": scooter_ids,
"scooter_plates": scooter_plates,
"scooter_vins": scooter_vins,
"scooter_guids": scooter_guids,
"scooter_vin_for_transfer": scooter_vin_for_transfer,
"scooter_plate_for_transfer": scooter_plate_for_transfer,
"profile_id_for_transfer": profile_id_for_transfer,
"pagination_criteria": {
"offset": offset,
"limit": limit
}
}
}
resp = self.init.request('post', "/scooters/owners", json=data)
return resp
|
[
"peter.yf.cheng@gogoro.com"
] |
peter.yf.cheng@gogoro.com
|
b85b77389b8994f59d4cb318c5d635f56232ca7d
|
e1e8e29d202b663be6d424a41aaf7c42f90c8ab3
|
/ex40c1.py
|
72888476074cfb401f36728067a2464107d84161
|
[] |
no_license
|
Amo95/Learning-coding-with-Python3
|
414f60d1b7da2fa41a4527034420c6ab34c6acb7
|
c4527a670b01c55031eb1ad65403a032e545baac
|
refs/heads/master
| 2020-12-15T19:20:12.266274
| 2020-01-21T18:12:58
| 2020-01-21T18:12:58
| 235,226,827
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 292
|
py
|
class MyStuff(object):
"""docstring for ClassName"""
def __init__(self):
self.tangerine = "I love coding in python"
print(self.tangerine)
def apple(self):
print("Coding OOP is amazing")
def main():
thing = MyStuff()
thing.apple()
if __name__ == '__main__':
main()
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
dea6745e243b7a67b860dc3d86998254c22d6dd9
|
112e792b80f0ba5f0989a297156d1554e18034d9
|
/testapp/migrations/0003_person__etag.py
|
77350671fec980d95092bece88612d0351d2bf1b
|
[] |
no_license
|
VNG-Realisatie/vng-api-common
|
ba4537c230f47f0b0ba305eccc289eef09be56f2
|
609c931b3f8b640aa6dff6d02cfb799745f25eb5
|
refs/heads/master
| 2023-06-26T22:52:50.419762
| 2023-06-20T11:20:53
| 2023-06-20T11:20:53
| 136,349,326
| 4
| 14
| null | 2023-09-01T14:26:58
| 2018-06-06T15:31:27
|
Python
|
UTF-8
|
Python
| false
| false
| 629
|
py
|
# Generated by Django 2.1.8 on 2019-09-05 06:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("testapp", "0002_auto_20190620_0849")]
operations = [
migrations.AddField(
model_name="person",
name="_etag",
field=models.CharField(
default="",
editable=False,
help_text="MD5 hash of the resource representation in its current version.",
max_length=32,
verbose_name="etag value",
),
preserve_default=False,
)
]
|
[
"sergei@maykinmedia.nl"
] |
sergei@maykinmedia.nl
|
b43075304e3f3743babbc05752088d35d38d269f
|
8370083dbbbd32740ad1862637809396dc7984e2
|
/paresh60/a12.py
|
673c08cc381941391743fa151ddb4784c6997b26
|
[] |
no_license
|
parshuramsail/PYTHON_LEARN
|
a919b14aab823e0f5e769d8936ddbfb357133db2
|
8c76720bf73f13cf96930e6d4d5128e6ba9aa535
|
refs/heads/main
| 2023-07-14T16:25:26.240555
| 2021-08-29T17:10:19
| 2021-08-29T17:10:19
| 401,095,644
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 258
|
py
|
# Method over-loading in python using default argument.
class A:
def first(self,f=None):
if f is not None:
print('Method',f)
else:
print('method without argument')
a=A()
a.first()
print(a.first('with argument'))
|
[
"64275709+parshuramsail@users.noreply.github.com"
] |
64275709+parshuramsail@users.noreply.github.com
|
29e71287fe48a5b6f7722a2cac26373b8fdbe652
|
d6833270e21fc14d8dd9d6624f8906ed7fe3ae86
|
/SoarUtils.py
|
3944a872bd43b057c2eda986c0618db191e20328
|
[
"MIT"
] |
permissive
|
ChienDavid/pysoarlib
|
0f9c91424f7900d43b39e401f91274b0fdf21540
|
3c722d163acf3dd35c5be914bf200e23369e169a
|
refs/heads/master
| 2022-10-24T22:37:24.839494
| 2020-06-13T15:10:32
| 2020-06-13T15:10:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,783
|
py
|
import Python_sml_ClientInterface as sml
from .SoarWME import SoarWME
class SoarUtils:
""" A Class containing static utility methods for dealing with Soar and working memory """
def update_wm_from_tree(root_id, root_name, input_dict, wme_table):
"""
Recursively update WMEs that have a sub-tree structure rooted at the given identifier.
We scan through the `input_dict`, which represents the input value getters (or further
sub-trees) of the sub-tree root, either adding terminal WMEs as usual or further recursing.
:param root_id: The sml identifier of the root of the sub-tree
:param root_name: The attribute which is the root of this sub-tree
:param input_dict: A dict mapping attributes to getter functions
:param wme_table: A table to lookup and store wme's and identifiers
:return: None
"""
assert isinstance(input_dict, dict), "Should only recurse on dicts!"
for attribute in input_dict.keys():
input_val = input_dict[attribute]
child_name = root_name + "." + attribute
if not callable(input_val):
if child_name not in wme_table:
wme_table[child_name] = root_id.CreateIdWME(attribute)
child_id = wme_table[child_name]
SoarUtils.update_wm_from_tree(child_id, child_name, input_val, wme_table)
continue
value = input_val()
if child_name not in wme_table:
wme_table[child_name] = SoarWME(att=attribute, val=value)
wme = wme_table[child_name]
wme.set_value(value)
wme.update_wm(root_id)
def remove_tree_from_wm(wme_table):
"""
Given a wme_table filled by SoarUtils.update_wm_from_tree, removes all wmes from working memory
Intermediate nodes are sml.Identifiers, which are removed from the table
Leaves are SoarWME's which are kept in the table but .remove_from_wm() is called on them
"""
items_to_remove = set()
for path, wme in wme_table.items():
if isinstance(wme, sml.Identifier):
items_to_remove.add(path)
else:
wme.remove_from_wm()
for path in items_to_remove:
del wme_table[path]
def extract_wm_graph(root_id, max_depth=1000000, id_map=None):
"""
Given a soar identifier (root_id), crawls over the children and builds a graph rep for them
Return dictionary:
d['__id__'] = root_id
d['__sym__'] = root_id as a string
d['attr'] = constant # for string, double, or int value
d['attr'] = dict # for identifier
d['attr'] = [ val1, val2, ... ] # for multi-valued attributes
This will handle loops, where the same dict will be reused for each reference to an identifier
Example:
Given an identifier <obj> with the following wm structure:
(<obj> ^id 5 ^volume 23.3 ^predicates <preds>)
(<preds> ^predicate red ^predicate cube ^predicate block)
Will return the following dictionary:
{
'__id': (sml Identifier for <obj>)
'__sym__': 'O32'
'id' : 5 (int),
'volume': 23.3 (float),
'predicates': {
'__id__': (sml Identifier for <preds>)
'__sym__': 'P53'
'predicate': [ 'red', 'cube', 'block' ]
}
}
:param root_id: The sml identifier of the root of the sub-graph
:param max_depth: The maximum depth to extract
:param id_map: A dictionary from identifiers to their corresponding dictionaries
:return a dict containing a recurisve enumeration of all children reachable from the given root_id
"""
if id_map is None:
id_map = dict()
root_id_str = root_id.GetValueAsString()
if root_id_str in id_map:
return id_map[root_id_str]
child_wmes = dict()
child_wmes['__id__'] = root_id
child_wmes['__sym__'] = root_id_str
id_map[root_id_str] = child_wmes
if max_depth == 0:
return child_wmes
for index in range(root_id.GetNumberChildren()):
wme = root_id.GetChild(index)
attr = wme.GetAttribute()
if wme.IsIdentifier():
wme_val = SoarUtils.extract_wm_graph(wme.ConvertToIdentifier(), max_depth-1, id_map)
elif wme.GetValueType() == "int":
wme_val = wme.ConvertToIntElement().GetValue()
elif wme.GetValueType() == "double":
wme_val = wme.ConvertToFloatElement().GetValue()
else:
wme_val = wme.GetValueAsString()
if attr in child_wmes:
cur_val = child_wmes[attr]
if isinstance(cur_val, list):
cur_val.append(wme_val)
else:
child_wmes[attr] = [ cur_val, wme_val ]
else:
child_wmes[attr] = wme_val
return child_wmes
def wm_graph_to_str(wm_graph):
"""
Given a wm_graph produced by extract_wm_graph, returns a nicely formatted string representation of it
:param wm_graph: A dictionary representing a wm graph produced by extract_wm_graph
"""
return SoarUtils._wm_value_to_str(wm_graph, "", set())
def _wm_value_to_str(val, indent, ignore_ids):
"""
recursive helper function which returns a string representation of any given value type
(str, int, float, list, dict)
:param wm_graph: A dictionary representing a wm graph produced by extract_wm_graph
:param indent: a string of spaces to indent the current level
:param ignore_ids: A set of Identifiers to not print
"""
if isinstance(val, str):
return val
if isinstance(val, int):
return str(val)
if isinstance(val, float):
return str(val)
if isinstance(val, list):
return "[ " + ", ".join(SoarUtils._wm_value_to_str(i, indent, ignore_ids) for i in val) + " ]"
if not isinstance(val, dict):
return ""
id_str = val['__sym__']
if id_str in ignore_ids:
return "<" + id_str + ">"
ignore_ids.add(id_str)
if len(val) == 1:
return "<" + id_str + ">"
s = "<" + id_str + "> {\n"
for a, v in val.items():
if a == '__sym__' or a == '__id__':
continue
s += indent + " " + a + ": " + SoarUtils._wm_value_to_str(v, indent + " ", ignore_ids) + "\n"
s += indent + "}"
return s
|
[
"mininger@umich.edu"
] |
mininger@umich.edu
|
8f8df82e8f10eae8a382750c473b56390d9cf50e
|
52243c4a05a296e7c042663b5942faa47eb66aee
|
/common/plot_points_shp.py
|
2f0e6e7945bf659016fd5b03ffaaa47015baf92c
|
[
"MIT"
] |
permissive
|
joaoppadua/Pesquisas
|
fbe0311b59340c041732d6d1f7f4862fa6c53198
|
808d8b0ef9e432e05a4f284ce18778ed8b3acd96
|
refs/heads/master
| 2023-07-16T02:50:30.846205
| 2021-09-03T13:34:54
| 2021-09-03T13:34:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 663
|
py
|
import matplotlib.pyplot as plt, geopandas as gpd
from sklearn.preprocessing import MinMaxScaler
def plot_points_shape_file(
shape_file, list_values, list_longitude, list_latitude, title
):
scaler = MinMaxScaler(feature_range=(50, 1000))
list_final = scaler.fit_transform([[i] for i in (list_values)])
gdf = gpd.read_file(shape_file)
fig, ax = plt.subplots(figsize=(20, 10))
ax.axis("off")
for i in range(len(list_values)):
plt.scatter(x=[list_longitude[i]], y=[list_latitude[i]], s=list_final[i])
plt.title(title)
plt.tight_layout()
gdf.plot(facecolor="none", ax=ax, legend=True, linewidth=1, edgecolor="black")
|
[
"danilopcarlotti@gmail.com"
] |
danilopcarlotti@gmail.com
|
44ae1b353c4c95c6209f6de8cde15112550f0a3b
|
f966c891c666db846d86406cb9c08a530902d032
|
/algorithms/implementation/divisible_sum_pairs.py
|
94ca67a38d2a2b8e609d8ed2769e0956df8b0853
|
[] |
no_license
|
rickharris-dev/hacker-rank
|
36620a16894571e324422c83bd553440cf5bbeb1
|
2ad0fe4b496198bec1b900d2e396a0704bd0c6d4
|
refs/heads/master
| 2020-12-25T14:33:20.118325
| 2016-09-06T01:10:43
| 2016-09-06T01:10:43
| 67,264,242
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 275
|
py
|
#!/usr/bin/python
n,k = raw_input().strip().split(' ')
n,k = [int(n),int(k)]
a = map(int,raw_input().strip().split(' '))
count = 0
i = 0
for i in range(0,n):
j = i + 1
while j < n:
if (a[i] + a[j]) % k == 0:
count += 1
j += 1
print count
|
[
"rickharris724@gmail.com"
] |
rickharris724@gmail.com
|
85404e686fe6601424f46561d0938c179b5a1cb7
|
dddbfd8eb6dff0bd3449bac87ee76b5c3e0bdfb1
|
/icehouse-patches/neutron/dvr-patch/neutron/services/vpn/plugin.py
|
74218e2ab6b6e6fecfba5e5f3a40c5d4d3e0f0af
|
[
"Apache-2.0"
] |
permissive
|
joey5678/tricircle
|
40897fed8fe9d6772e8878b4f06ba1a829636488
|
e211f7efef129bbfb038cc05232ea1de33f82a97
|
refs/heads/master
| 2021-01-17T21:04:32.945469
| 2014-11-17T09:46:29
| 2014-11-17T10:10:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,525
|
py
|
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Swaminathan Vasudevan, Hewlett-Packard
from neutron.db.vpn import vpn_db
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants
from neutron.services import service_base
LOG = logging.getLogger(__name__)
class VPNPlugin(vpn_db.VPNPluginDb):
"""Implementation of the VPN Service Plugin.
This class manages the workflow of VPNaaS request/response.
Most DB related works are implemented in class
vpn_db.VPNPluginDb.
"""
supported_extension_aliases = ["vpnaas", "service-type"]
class VPNDriverPlugin(VPNPlugin, vpn_db.VPNPluginRpcDbMixin):
"""VpnPlugin which supports VPN Service Drivers."""
#TODO(nati) handle ikepolicy and ipsecpolicy update usecase
def __init__(self):
super(VPNDriverPlugin, self).__init__()
# Load the service driver from neutron.conf.
drivers, default_provider = service_base.load_drivers(
constants.VPN, self)
LOG.info(_("VPN plugin using service driver: %s"), default_provider)
self.ipsec_driver = drivers[default_provider]
def _get_driver_for_vpnservice(self, vpnservice):
return self.ipsec_driver
def _get_driver_for_ipsec_site_connection(self, context,
ipsec_site_connection):
#TODO(nati) get vpnservice when we support service type framework
vpnservice = None
return self._get_driver_for_vpnservice(vpnservice)
def create_ipsec_site_connection(self, context, ipsec_site_connection):
ipsec_site_connection = super(
VPNDriverPlugin, self).create_ipsec_site_connection(
context, ipsec_site_connection)
driver = self._get_driver_for_ipsec_site_connection(
context, ipsec_site_connection)
driver.create_ipsec_site_connection(context, ipsec_site_connection)
return ipsec_site_connection
def delete_ipsec_site_connection(self, context, ipsec_conn_id):
ipsec_site_connection = self.get_ipsec_site_connection(
context, ipsec_conn_id)
super(VPNDriverPlugin, self).delete_ipsec_site_connection(
context, ipsec_conn_id)
driver = self._get_driver_for_ipsec_site_connection(
context, ipsec_site_connection)
driver.delete_ipsec_site_connection(context, ipsec_site_connection)
def update_ipsec_site_connection(
self, context,
ipsec_conn_id, ipsec_site_connection):
old_ipsec_site_connection = self.get_ipsec_site_connection(
context, ipsec_conn_id)
ipsec_site_connection = super(
VPNDriverPlugin, self).update_ipsec_site_connection(
context,
ipsec_conn_id,
ipsec_site_connection)
driver = self._get_driver_for_ipsec_site_connection(
context, ipsec_site_connection)
driver.update_ipsec_site_connection(
context, old_ipsec_site_connection, ipsec_site_connection)
return ipsec_site_connection
def update_vpnservice(self, context, vpnservice_id, vpnservice):
old_vpn_service = self.get_vpnservice(context, vpnservice_id)
new_vpn_service = super(
VPNDriverPlugin, self).update_vpnservice(context, vpnservice_id,
vpnservice)
driver = self._get_driver_for_vpnservice(old_vpn_service)
driver.update_vpnservice(context, old_vpn_service, new_vpn_service)
return new_vpn_service
def delete_vpnservice(self, context, vpnservice_id):
vpnservice = self._get_vpnservice(context, vpnservice_id)
super(VPNDriverPlugin, self).delete_vpnservice(context, vpnservice_id)
driver = self._get_driver_for_vpnservice(vpnservice)
driver.delete_vpnservice(context, vpnservice)
|
[
"joehuang@huawei.com"
] |
joehuang@huawei.com
|
881028d34f8ae09d7ee3fa9e047a5a4c55c4a893
|
487ce91881032c1de16e35ed8bc187d6034205f7
|
/codes/CodeJamCrawler/CJ/16_0_3_54N_c.py
|
a900e884b099ed456e2e4f9878c7443257e54913
|
[] |
no_license
|
DaHuO/Supergraph
|
9cd26d8c5a081803015d93cf5f2674009e92ef7e
|
c88059dc66297af577ad2b8afa4e0ac0ad622915
|
refs/heads/master
| 2021-06-14T16:07:52.405091
| 2016-08-21T13:39:13
| 2016-08-21T13:39:13
| 49,829,508
| 2
| 0
| null | 2021-03-19T21:55:46
| 2016-01-17T18:23:00
|
Python
|
UTF-8
|
Python
| false
| false
| 1,146
|
py
|
#!/usr/bin/python
luts = []
def initlut():
for i in range(2,11):
l = []
for k in range(0,32):
l.append(i**k)
luts.append(l)
# stackoverflow
def is_prime(n):
if n == 2 or n == 3: return True, 0
# never happens in our case?
if n < 2: return False, 1
if n%2 == 0: return False, 2
if n < 9: return True, 0
if n%3 == 0: return False, 3
r = int(n**0.5)
f = 5
while f <= r:
# print '\t',f
if n%f == 0: return False, f
if n%(f+2) == 0: return False, f+2
f +=6
return True, 0
def is_prime_nt(n):
x, y = is_prime(n)
return y
def yield_coin(N):
hbit = 2**(N-1)
for i in range(0,2**(N-2)):
s = "{0:b}".format(hbit + i*2 + 1)
bits = [int(x) for x in reversed(s)]
nums = []
for lut in luts:
nums.append(sum([y for x,y in zip(bits,lut) if x==1]))
divisor = []
for x in nums:
y = is_prime_nt(x)
divisor.append(y)
if y == 0:
break
if all(divisor):
yield s, nums, divisor
initlut()
N = 16
J = 50
print "Case #1:"
case = 0
for s, nums, divisor in yield_coin(N):
print s, " ".join([str(x) for x in divisor])
case += 1
#print case, s, nums, divisor
if case == J:
break
|
[
"[dhuo@tcd.ie]"
] |
[dhuo@tcd.ie]
|
a73d86d3a4f951a81943d8e5269cbe679da0b96d
|
7727187a009e4b9c46c2fe06609372ec8814cd23
|
/freemix_akara/__init__.py
|
91c164e4bb4284d6a3acfe41a1ff6a0104cecf65
|
[] |
no_license
|
govtmirror/freemix-akara
|
ebf204554f4effc0543e60083698f2ea012413b8
|
1d10c3f02afbd4268852e2c52afdf77809176bdd
|
refs/heads/master
| 2021-01-12T07:47:08.183429
| 2014-06-05T18:53:56
| 2014-06-05T18:53:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 787
|
py
|
import os
import sys
target_version = "5.0.0"
def build_version():
distance ="0"
try:
from subprocess import Popen, PIPE
prev_tag,distance,revision = Popen(["git", "describe", "--match", "[0-9]*", "--long"],
cwd=os.path.dirname(__file__),
stdout=PIPE
).communicate()[0].strip().split("-")
if distance == "0":
return prev_tag
elif prev_tag == target_version:
return "%s.post%s"%(target_version, distance)
except Exception as e:
print e
return "%s.dev%s"%(target_version, distance)
try:
from .version import __version__
except ImportError:
__version__=build_version()
|
[
"dfeeney@gmail.com"
] |
dfeeney@gmail.com
|
543f98e378822d026624672325a7e5e70e01bdbb
|
0103046cd77e9f86ccde477736de36bba766ceb6
|
/src/sentry/projectoptions/manager.py
|
e0fea86bd8bc6dd142d90dbc9d5bf9ee8979bf40
|
[
"BUSL-1.1",
"Apache-2.0"
] |
permissive
|
kaozdl/sentry
|
ad41ada649a20300e9f2fe69050200cfbf738a63
|
63d698f5294f64a8c206b4c741e2a11be1f9a9be
|
refs/heads/master
| 2021-06-21T18:24:21.713064
| 2021-03-04T19:45:20
| 2021-03-04T19:45:20
| 198,681,569
| 0
| 0
|
BSD-3-Clause
| 2019-07-24T17:32:29
| 2019-07-24T17:32:28
| null |
UTF-8
|
Python
| false
| false
| 3,109
|
py
|
import uuid
import bisect
from datetime import datetime
from pytz import utc
class WellKnownProjectOption:
def __init__(self, key, default=None, epoch_defaults=None):
self.key = key
self.default = default
self.epoch_defaults = epoch_defaults
self._epoch_default_list = sorted(epoch_defaults or ())
def get_default(self, project=None, epoch=None):
if self.epoch_defaults:
if epoch is None:
if project is None:
epoch = 1
else:
epoch = project.get_option("sentry:option-epoch") or 1
idx = bisect.bisect(self._epoch_default_list, epoch)
if idx > 0:
return self.epoch_defaults[self._epoch_default_list[idx - 1]]
return self.default
class ProjectOptionsManager:
"""Project options used to be implemented in a relatively ad-hoc manner
in the past. The project manager still uses the functionality of the
project model and just dispatches to it.
Options can be used without declaring defaults, but if defaults are
declared they are returned without having to define a default at the
time of the option lookup.
"""
def __init__(self):
self.registry = {}
def lookup_well_known_key(self, key):
return self.registry.get(key)
def freeze_option_epoch(self, project, force=False):
# The options are frozen in a receiver hook for project saves.
# See `sentry.receivers.core.freeze_option_epoch_for_project`
if force or project.get_option("sentry:option-epoch") is None:
from .defaults import LATEST_EPOCH
project.update_option("sentry:option-epoch", LATEST_EPOCH)
def set(self, project, key, value):
from sentry.models import ProjectOption
self.update_rev_for_option(project)
return ProjectOption.objects.set_value(project, key, value)
def isset(self, project, key):
return project.get_option(project, key, Ellipsis) is not Ellipsis
def get(self, project, key, default=None, validate=None):
from sentry.models import ProjectOption
return ProjectOption.objects.get_value(project, key, default, validate=validate)
def delete(self, project, key):
from sentry.models import ProjectOption
self.update_rev_for_option(project)
return ProjectOption.objects.unset_value(project, key)
def update_rev_for_option(self, project):
from sentry.models import ProjectOption
ProjectOption.objects.set_value(project, "sentry:relay-rev", uuid.uuid4().hex)
ProjectOption.objects.set_value(
project, "sentry:relay-rev-lastchange", datetime.utcnow().replace(tzinfo=utc)
)
def register(self, key, default=None, epoch_defaults=None):
self.registry[key] = WellKnownProjectOption(
key=key, default=default, epoch_defaults=epoch_defaults
)
def all(self):
"""
Return an iterator for all keys in the registry.
"""
return self.registry.values()
|
[
"noreply@github.com"
] |
kaozdl.noreply@github.com
|
ead7b1a0755191bb9b690a567988c0afb102bd9f
|
ce76b3ef70b885d7c354b6ddb8447d111548e0f1
|
/man/call_last_world_with_next_woman/want_last_case/come_new_hand/few_person.py
|
61c2026eb36fe40fd1c4e1cefa0bcdb43a2f6891
|
[] |
no_license
|
JingkaiTang/github-play
|
9bdca4115eee94a7b5e4ae9d3d6052514729ff21
|
51b550425a91a97480714fe9bc63cb5112f6f729
|
refs/heads/master
| 2021-01-20T20:18:21.249162
| 2016-08-19T07:20:12
| 2016-08-19T07:20:12
| 60,834,519
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 189
|
py
|
#! /usr/bin/env python
def group(str_arg):
thing(str_arg)
print('feel_company_by_week')
def thing(str_arg):
print(str_arg)
if __name__ == '__main__':
group('same_part')
|
[
"jingkaitang@gmail.com"
] |
jingkaitang@gmail.com
|
07abeb853173fc2e69c1a3dea3176bc3904f1b83
|
cdb186ad49bba1406c81f634b936e73f8cb04009
|
/ARC/002/a2.py
|
b88ac58257608a246949e7fe5a812d771b81e63d
|
[] |
no_license
|
ToshikiShimizu/AtCoder
|
9e46f5581f2c1f5149ce1394d61d652cda6256a3
|
41fe6408c20c59bbf1b5d7ee9db2e132f48ad1ac
|
refs/heads/master
| 2023-07-26T22:45:51.965088
| 2023-07-10T14:11:35
| 2023-07-10T14:11:35
| 148,154,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 153
|
py
|
#coding:utf-8
y = int(input())
if (y%400==0):
print ("YES")
elif (y%100==0):
print ("NO")
elif (y%4==0):
print ("YES")
else:
print ("NO")
|
[
"tamreff3290@gmail.com"
] |
tamreff3290@gmail.com
|
ed52f1f1bcf6a8340ae7a9012b1186ea964513df
|
6710c52d04e17facbc9fb35a7df313f7a2a7bd53
|
/0258. Add Digits.py
|
9fcb8f9b0f7b36558df111738752ea389b781eb3
|
[] |
no_license
|
pwang867/LeetCode-Solutions-Python
|
535088fbe747a453360457728cc22cf336020bd2
|
188befbfb7080ba1053ee1f7187b177b64cf42d2
|
refs/heads/master
| 2022-11-13T16:20:28.211707
| 2020-06-28T06:01:14
| 2020-06-28T06:01:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 839
|
py
|
"""
explanation, mathematical proof:
N=(a[0] * 1 + a[1] * 10 + ...a[n] * 10 ^n),and a[0]...a[n] are all between [0,9]
we set M = a[0] + a[1] + ..a[n]
and another truth is that:
1 % 9 = 1
10 % 9 = 1
100 % 9 = 1
so N % 9 = a[0] + a[1] + ..a[n]
means N % 9 = M
so N = M (% 9)
"""
# digit root, time/space O(1)
class Solution(object):
def addDigits(self, num):
"""
:type num: int
:rtype: int
"""
if num == 0:
return 0
return (num-1)%9 + 1 # period is 9
"""
Given a non-negative integer num, repeatedly add all its digits until the result has only one digit.
Example:
Input: 38
Output: 2
Explanation: The process is like: 3 + 8 = 11, 1 + 1 = 2.
Since 2 has only one digit, return it.
Follow up:
Could you do it without any loop/recursion in O(1) runtime?
"""
|
[
"wzhou007@ucr.edu"
] |
wzhou007@ucr.edu
|
a9617016af138a8744cd4ab32de42682c82f62a9
|
6f44214567f1fe9c5534ea3d55800e9aaabaa390
|
/evergreen_requests.py
|
73c97ef056f55049281e3fcfd2f921c5cf23debe
|
[
"BSD-2-Clause"
] |
permissive
|
saghul/evergreen-requests
|
c3505c1f669834b997a3b15907cd25a1d88b08f1
|
4df9e7564492d82d9850399ad6b0c83e59e30423
|
refs/heads/master
| 2016-09-06T05:53:26.345690
| 2013-05-14T21:36:22
| 2013-05-14T21:36:22
| 10,063,878
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,433
|
py
|
from functools import partial
from operator import methodcaller
from evergreen import futures, patcher
# Monkey-patch.
requests = patcher.import_patched('requests')
__version__ = '0.0.1'
__all__ = ['map', 'imap', 'get', 'options', 'head', 'post', 'put', 'patch', 'delete', 'request', '__version__']
# Export same items as vanilla requests
__requests_imports__ = ['utils', 'session', 'Session', 'codes', 'RequestException', 'Timeout', 'URLRequired', 'TooManyRedirects', 'HTTPError', 'ConnectionError']
patcher.slurp_properties(requests, globals(), srckeys=__requests_imports__)
__all__.extend(__requests_imports__)
del requests, patcher, __requests_imports__
class AsyncRequest(object):
""" Asynchronous request.
Accept same parameters as ``Session.request`` and some additional:
:param session: Session which will do the request, else one is created automatically.
:param callback: Callback called on response. Same as passing ``hooks={'response': callback}``.
"""
def __init__(self, method, url, **kwargs):
self.method = method
self.url = url
self.session = kwargs.pop('session', None)
if self.session is None:
self.session = Session()
callback = kwargs.pop('callback', None)
if callback:
kwargs['hooks'] = {'response': callback}
self.kwargs = kwargs # Arguments for ``Session.request``
self.response = None
def send(self, **kwargs):
"""
Prepares request based on parameter passed to constructor and optional ``kwargs```.
Then sends request and saves response to :attr:`response`
:returns: ``Response``
"""
merged_kwargs = {}
merged_kwargs.update(self.kwargs)
merged_kwargs.update(kwargs)
self.response = self.session.request(self.method, self.url, **merged_kwargs)
return self.response
# Shortcuts for creating AsyncRequest with appropriate HTTP method
get = partial(AsyncRequest, 'GET')
options = partial(AsyncRequest, 'OPTIONS')
head = partial(AsyncRequest, 'HEAD')
post = partial(AsyncRequest, 'POST')
put = partial(AsyncRequest, 'PUT')
patch = partial(AsyncRequest, 'PATCH')
delete = partial(AsyncRequest, 'DELETE')
def request(method, url, **kwargs):
return AsyncRequest(method, url, **kwargs)
def map(reqs, concurrency=10):
"""Concurrently converts a list of Requests to Responses. Results are yielded
in order even if requests are performed concurrently.
:param reqs: a collection of AsyncRequest objects.
:param concurrency: Specifies the number of requests to make at a time. Defaults to 10.
"""
def result_iterator():
with futures.TaskPoolExecutor(concurrency) as e:
fs = [e.submit(r.send) for r in reqs]
for f in fs:
yield f.get()
return result_iterator()
def imap(reqs, concurrency=10):
"""Concurrently converts a list of Requests to Responses. Results are yielded
in arbitrary order, as soon as requests complete.
:param reqs: a collection of AsyncRequest objects.
:param concurrency: Specifies the number of requests to make at a time. Defaults to 10.
"""
def result_iterator():
with futures.TaskPoolExecutor(concurrency) as e:
fs = [e.submit(r.send) for r in reqs]
for f in futures.as_completed(fs):
yield f.get()
return result_iterator()
|
[
"saghul@gmail.com"
] |
saghul@gmail.com
|
d2004d8e68741301c502305b263147f20710d51e
|
ef821468b081ef2a0b81bf08596a2c81e1c1ef1a
|
/PythonWebBasics_Django/Django_Web_Basics/Django_Web_Basics_2/migrations/0007_auto_20210130_1511.py
|
c45bf61e0b3924a636cba22def0c3c357974ec4f
|
[] |
no_license
|
Ivaylo-Atanasov93/The-Learning-Process
|
71db22cd79f6d961b9852f140f4285ef7820dd80
|
354844e2c686335345f6a54b3af86b78541ed3f3
|
refs/heads/master
| 2023-03-30T20:59:34.304207
| 2021-03-29T15:23:05
| 2021-03-29T15:23:05
| 294,181,544
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 448
|
py
|
# Generated by Django 3.1.5 on 2021-01-30 15:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Django_Web_Basics_2', '0006_auto_20210130_1456'),
]
operations = [
migrations.AlterField(
model_name='game',
name='level_of_difficulty',
field=models.IntegerField(choices=[(0, 'Easy'), (1, 'Medium'), (2, 'Hard')]),
),
]
|
[
"ivailo.atanasov93@gmail.com"
] |
ivailo.atanasov93@gmail.com
|
cf3fde1e92f30234f03259580d338b0fc8a49ef0
|
94a4388cee6dfeaa1674fba20423e8a3f8f6dd42
|
/backend/game_mdoe_18873/urls.py
|
d4b0c3f0622dafe5bd8ba458e4c2793e11cd2878
|
[] |
no_license
|
crowdbotics-apps/game-mdoe-18873
|
0dc5c1e1827f382d5a84847697d0b1b05191066d
|
8bcbe6c9b116fa1294b8104018c9cd36b1536c13
|
refs/heads/master
| 2022-11-15T17:29:36.126851
| 2020-07-13T22:23:28
| 2020-07-13T22:23:28
| 279,423,144
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,058
|
py
|
"""game_mdoe_18873 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
path("home/", include("home.urls")),
path("api/v1/", include("course.api.v1.urls")),
path("course/", include("course.urls")),
]
admin.site.site_header = "game mdoe"
admin.site.site_title = "game mdoe Admin Portal"
admin.site.index_title = "game mdoe Admin"
# swagger
schema_view = get_schema_view(
openapi.Info(
title="game mdoe API",
default_version="v1",
description="API documentation for game mdoe App",
),
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
579f9cdd764f516d3acae14fe2bbe0ae5e6f94b5
|
165d2e9a6d451b3a614a28629cca256f849b73ab
|
/notebooks/_solutions/case2_biodiversity_analysis21.py
|
087bcee2f2924c310bc97c9115dab1564054c9e1
|
[
"BSD-3-Clause"
] |
permissive
|
DemeulemeesterT/DS-python-data-analysis
|
5b2439382571769dcc9b61fb3408a51552364c08
|
b4dd68b9c912c5d5c52c607aa117f5054449c73d
|
refs/heads/master
| 2023-05-07T16:51:31.426493
| 2021-05-31T07:24:38
| 2021-05-31T07:24:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 514
|
py
|
species_per_plot = survey_data.reset_index().pivot_table(index="name",
columns="verbatimLocality",
values="occurrenceID",
aggfunc='count')
# alternative ways to calculate this
#species_per_plot = survey_data.groupby(['name', 'plot_id']).size().unstack(level=-1)
#species_per_plot = pd.crosstab(survey_data['name'], survey_data['plot_id'])
|
[
"jorisvandenbossche@gmail.com"
] |
jorisvandenbossche@gmail.com
|
1401b708e738bdf68f663d12f6a128f484f08f08
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-waf/huaweicloudsdkwaf/v1/model/statistics_timeline_item.py
|
71106a07b0316521f9190c69730cb31447e4a86e
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 4,489
|
py
|
# coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class StatisticsTimelineItem:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'key': 'str',
'timeline': 'list[TimeLineItem]'
}
attribute_map = {
'key': 'key',
'timeline': 'timeline'
}
def __init__(self, key=None, timeline=None):
"""StatisticsTimelineItem
The model defined in huaweicloud sdk
:param key: 键值,包括请求总量(ACCESS)、Bot攻击防护(CRAWLER)、攻击总量(ATTACK)、Web基础防护(WEB_ATTACK)、精准防护(PRECISE)、CC攻击防护(CC)
:type key: str
:param timeline: 对应键值的时间线统计数据
:type timeline: list[:class:`huaweicloudsdkwaf.v1.TimeLineItem`]
"""
self._key = None
self._timeline = None
self.discriminator = None
if key is not None:
self.key = key
if timeline is not None:
self.timeline = timeline
@property
def key(self):
"""Gets the key of this StatisticsTimelineItem.
键值,包括请求总量(ACCESS)、Bot攻击防护(CRAWLER)、攻击总量(ATTACK)、Web基础防护(WEB_ATTACK)、精准防护(PRECISE)、CC攻击防护(CC)
:return: The key of this StatisticsTimelineItem.
:rtype: str
"""
return self._key
@key.setter
def key(self, key):
"""Sets the key of this StatisticsTimelineItem.
键值,包括请求总量(ACCESS)、Bot攻击防护(CRAWLER)、攻击总量(ATTACK)、Web基础防护(WEB_ATTACK)、精准防护(PRECISE)、CC攻击防护(CC)
:param key: The key of this StatisticsTimelineItem.
:type key: str
"""
self._key = key
@property
def timeline(self):
"""Gets the timeline of this StatisticsTimelineItem.
对应键值的时间线统计数据
:return: The timeline of this StatisticsTimelineItem.
:rtype: list[:class:`huaweicloudsdkwaf.v1.TimeLineItem`]
"""
return self._timeline
@timeline.setter
def timeline(self, timeline):
"""Sets the timeline of this StatisticsTimelineItem.
对应键值的时间线统计数据
:param timeline: The timeline of this StatisticsTimelineItem.
:type timeline: list[:class:`huaweicloudsdkwaf.v1.TimeLineItem`]
"""
self._timeline = timeline
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StatisticsTimelineItem):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
bc34681fd530d87f8de958eb30d88c342abbeffe
|
daee54824cb107f9b5749e3c12e7f09f544bac0e
|
/modules/readers/resources/python/rawVolumeRDRViewFrame.py
|
f809c2e9c0e1da779a57aa6d0deafd0a9ea6ba7c
|
[] |
no_license
|
JoonVan/devide
|
8fa556d2b42c5ad70c3595303253f2a171de0312
|
586225d68b079e2a96007bd33784113b3a19a538
|
refs/heads/master
| 2020-12-26T06:25:01.744966
| 2017-01-22T19:47:50
| 2017-01-22T19:47:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,028
|
py
|
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# generated by wxGlade 0.6.3 on Sat Feb 09 13:43:04 2008
import wx
# begin wxGlade: extracode
# end wxGlade
class rawVolumeRDRViewFrame(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: rawVolumeRDRViewFrame.__init__
kwds["style"] = wx.CAPTION|wx.MINIMIZE_BOX|wx.MAXIMIZE_BOX|wx.SYSTEM_MENU|wx.RESIZE_BORDER
wx.Frame.__init__(self, *args, **kwds)
self.viewFramePanel = wx.Panel(self, -1)
self.label_1_copy_2 = wx.StaticText(self.viewFramePanel, -1, "Filename")
self.filenameText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.browseButtonId = wx.NewId()
self.button_1_copy = wx.Button(self.viewFramePanel, self.browseButtonId, "Browse")
self.label_3_copy_1 = wx.StaticText(self.viewFramePanel, -1, "Data type")
self.dataTypeChoice = wx.Choice(self.viewFramePanel, -1, choices=["Dummy 1", "Dummy 2", "Dummy 3", "Dummy 4", "Dummy 5"])
self.endiannessRadioBox = wx.RadioBox(self.viewFramePanel, -1, "Endianness", choices=["Little (LSB at lowest address)", "Big (MSB at lowest address)"], majorDimension=2, style=wx.RA_SPECIFY_COLS)
self.label_2 = wx.StaticText(self.viewFramePanel, -1, "Header size (bytes)")
self.headerSizeText = wx.TextCtrl(self.viewFramePanel, -1, "0")
self.label_4 = wx.StaticText(self.viewFramePanel, -1, "Extent (x0, x1, y0, y1, z0, z1)")
self.extentText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.label_5 = wx.StaticText(self.viewFramePanel, -1, "Spacing (Sx, Sy, Sz)")
self.spacingText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.__set_properties()
self.__do_layout()
# end wxGlade
def __set_properties(self):
# begin wxGlade: rawVolumeRDRViewFrame.__set_properties
self.SetTitle("Raw Volume Reader")
self.dataTypeChoice.SetSelection(0)
self.endiannessRadioBox.SetSelection(0)
# end wxGlade
def __do_layout(self):
# begin wxGlade: rawVolumeRDRViewFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_5 = wx.BoxSizer(wx.VERTICAL)
grid_sizer_1 = wx.FlexGridSizer(3, 2, 4, 4)
sizer_2 = wx.BoxSizer(wx.VERTICAL)
sizer_7 = wx.BoxSizer(wx.HORIZONTAL)
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6.Add(self.label_1_copy_2, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 2)
sizer_6.Add(self.filenameText, 1, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_6.Add(self.button_1_copy, 0, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_2.Add(sizer_6, 0, wx.BOTTOM|wx.EXPAND, 4)
sizer_3.Add(self.label_3_copy_1, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 2)
sizer_3.Add(self.dataTypeChoice, 1, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_2.Add(sizer_3, 0, wx.BOTTOM|wx.EXPAND, 4)
sizer_7.Add(self.endiannessRadioBox, 1, wx.EXPAND, 0)
sizer_2.Add(sizer_7, 0, wx.BOTTOM|wx.EXPAND, 4)
sizer_5.Add(sizer_2, 1, wx.ALL|wx.EXPAND, 7)
grid_sizer_1.Add(self.label_2, 0, wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.headerSizeText, 0, wx.EXPAND, 0)
grid_sizer_1.Add(self.label_4, 0, wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.extentText, 0, wx.EXPAND, 0)
grid_sizer_1.Add(self.label_5, 0, wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.spacingText, 0, wx.EXPAND, 0)
grid_sizer_1.AddGrowableCol(1)
sizer_5.Add(grid_sizer_1, 0, wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 7)
self.viewFramePanel.SetSizer(sizer_5)
sizer_1.Add(self.viewFramePanel, 1, wx.EXPAND, 0)
self.SetSizer(sizer_1)
sizer_1.Fit(self)
self.Layout()
# end wxGlade
# end of class rawVolumeRDRViewFrame
if __name__ == "__main__":
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
frame_1 = rawVolumeRDRViewFrame(None, -1, "")
app.SetTopWindow(frame_1)
frame_1.Show()
app.MainLoop()
|
[
"cpbotha@users.noreply.github.com"
] |
cpbotha@users.noreply.github.com
|
fc7f96130086adfc9ad4a25f653c89d83852ef51
|
bf99b1b14e9ca1ad40645a7423f23ef32f4a62e6
|
/AtCoder/EducationalDP/n_2.py
|
cbed84f2ac5e605e38eb513f8cf7a89401c910ac
|
[] |
no_license
|
y-oksaku/Competitive-Programming
|
3f9c1953956d1d1dfbf46d5a87b56550ff3ab3db
|
a3ff52f538329bed034d3008e051f30442aaadae
|
refs/heads/master
| 2021-06-11T16:14:12.635947
| 2021-05-04T08:18:35
| 2021-05-04T08:18:35
| 188,639,647
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 447
|
py
|
import numpy as np
N = int(input())
A = np.array(input().split(), np.int64)
Acum = np.zeros((N, N + 1), dtype=np.int64)
for i in range(N):
Acum[i, i + 1:] = A[i:].cumsum()
dp = np.zeros((N, N + 1), dtype=np.int64)
for length in range(2, N + 1):
for left in range(N - length + 1):
right = left + length
dp[left, right] = (dp[left, left + 1: right] + dp[left + 1: right, right]).min() + Acum[left, right]
print(dp[0, N])
|
[
"y.oksaku@stu.kanazawa-u.ac.jp"
] |
y.oksaku@stu.kanazawa-u.ac.jp
|
39011567a8e74d0c3d1a0fa36bbb225839194414
|
349dadbf45b7c12a3fe41c5e0421c0488b679919
|
/transformers/tests/test_trainer_seq2seq.py
|
dc4a11260d4c0fe61480ce2e944d48835dcfbbd3
|
[
"BSD-3-Clause",
"CC0-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"Apache-2.0"
] |
permissive
|
salesforce/CodeRL
|
c772e408bac690527759f416ea22add4c97e5bec
|
51db4ff983d5376e62b9e7eba150316a651c80d9
|
refs/heads/main
| 2023-08-18T18:38:02.740995
| 2022-11-18T16:14:28
| 2022-11-18T16:14:28
| 508,912,853
| 412
| 52
|
BSD-3-Clause
| 2023-08-31T07:51:27
| 2022-06-30T02:54:36
|
Python
|
UTF-8
|
Python
| false
| false
| 4,859
|
py
|
# coding=utf-8
# Copyright 2020 the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from transformers import BertTokenizer, EncoderDecoderModel, Seq2SeqTrainer, Seq2SeqTrainingArguments
from transformers.file_utils import is_datasets_available
from transformers.testing_utils import TestCasePlus, require_torch, slow
if is_datasets_available():
import datasets
class Seq2seqTrainerTester(TestCasePlus):
@slow
@require_torch
def test_finetune_bert2bert(self):
bert2bert = EncoderDecoderModel.from_encoder_decoder_pretrained("prajjwal1/bert-tiny", "prajjwal1/bert-tiny")
tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
bert2bert.config.vocab_size = bert2bert.config.encoder.vocab_size
bert2bert.config.eos_token_id = tokenizer.sep_token_id
bert2bert.config.decoder_start_token_id = tokenizer.cls_token_id
bert2bert.config.max_length = 128
train_dataset = datasets.load_dataset("cnn_dailymail", "3.0.0", split="train[:1%]")
val_dataset = datasets.load_dataset("cnn_dailymail", "3.0.0", split="validation[:1%]")
train_dataset = train_dataset.select(range(32))
val_dataset = val_dataset.select(range(16))
batch_size = 4
def _map_to_encoder_decoder_inputs(batch):
# Tokenizer will automatically set [BOS] <text> [EOS]
inputs = tokenizer(batch["article"], padding="max_length", truncation=True, max_length=512)
outputs = tokenizer(batch["highlights"], padding="max_length", truncation=True, max_length=128)
batch["input_ids"] = inputs.input_ids
batch["attention_mask"] = inputs.attention_mask
batch["decoder_input_ids"] = outputs.input_ids
batch["labels"] = outputs.input_ids.copy()
batch["labels"] = [
[-100 if token == tokenizer.pad_token_id else token for token in labels] for labels in batch["labels"]
]
batch["decoder_attention_mask"] = outputs.attention_mask
assert all([len(x) == 512 for x in inputs.input_ids])
assert all([len(x) == 128 for x in outputs.input_ids])
return batch
def _compute_metrics(pred):
labels_ids = pred.label_ids
pred_ids = pred.predictions
# all unnecessary tokens are removed
pred_str = tokenizer.batch_decode(pred_ids, skip_special_tokens=True)
label_str = tokenizer.batch_decode(labels_ids, skip_special_tokens=True)
accuracy = sum([int(pred_str[i] == label_str[i]) for i in range(len(pred_str))]) / len(pred_str)
return {"accuracy": accuracy}
# map train dataset
train_dataset = train_dataset.map(
_map_to_encoder_decoder_inputs,
batched=True,
batch_size=batch_size,
remove_columns=["article", "highlights"],
)
train_dataset.set_format(
type="torch",
columns=["input_ids", "attention_mask", "decoder_input_ids", "decoder_attention_mask", "labels"],
)
# same for validation dataset
val_dataset = val_dataset.map(
_map_to_encoder_decoder_inputs,
batched=True,
batch_size=batch_size,
remove_columns=["article", "highlights"],
)
val_dataset.set_format(
type="torch",
columns=["input_ids", "attention_mask", "decoder_input_ids", "decoder_attention_mask", "labels"],
)
output_dir = self.get_auto_remove_tmp_dir()
training_args = Seq2SeqTrainingArguments(
output_dir=output_dir,
per_device_train_batch_size=batch_size,
per_device_eval_batch_size=batch_size,
predict_with_generate=True,
evaluation_strategy="steps",
do_train=True,
do_eval=True,
warmup_steps=0,
eval_steps=2,
logging_steps=2,
)
# instantiate trainer
trainer = Seq2SeqTrainer(
model=bert2bert,
args=training_args,
compute_metrics=_compute_metrics,
train_dataset=train_dataset,
eval_dataset=val_dataset,
tokenizer=tokenizer,
)
# start training
trainer.train()
|
[
"hungle@salesforce.com"
] |
hungle@salesforce.com
|
b0914bfb223699b5b1e55591b0080efe663329aa
|
f94c2337607ef06856fcff8acd18e60059894e21
|
/src/program/consumers.py
|
9d141e87613bd88c6f795b85e8bcb5a01bc8c4e4
|
[] |
no_license
|
coral/bornhack-website
|
d82446486db8523d0e059e82851ae80498b0bf2d
|
260a37af3c1cb4cd33dfc8cb80f6bd966b794c3d
|
refs/heads/master
| 2021-01-16T00:09:15.237955
| 2017-08-10T18:55:13
| 2017-08-10T18:55:13
| 99,955,304
| 0
| 0
| null | 2017-08-10T18:51:34
| 2017-08-10T18:51:34
| null |
UTF-8
|
Python
| false
| false
| 2,043
|
py
|
from channels.generic.websockets import JsonWebsocketConsumer
from camps.models import Camp
from .models import EventInstance, Favorite
class ScheduleConsumer(JsonWebsocketConsumer):
http_user = True
def connection_groups(self, **kwargs):
return ['schedule_users']
def connect(self, message, **kwargs):
camp_slug = message.http_session['campslug']
try:
camp = Camp.objects.get(slug=camp_slug)
days = list(map(
lambda day:
{ 'repr': day.lower.strftime('%A %Y-%m-%d')
, 'iso': day.lower.strftime('%Y-%m-%d')
, 'day_name': day.lower.strftime('%A')
},
camp.get_days('camp')
))
event_instances_query_set = EventInstance.objects.filter(event__camp=camp)
event_instances = list(map(lambda x: x.to_json(user=message.user), event_instances_query_set))
self.send({
"accept": True,
"event_instances": event_instances,
"days": days,
"action": "init"
})
except Camp.DoesNotExist:
pass
def raw_receive(self, message, **kwargs):
content = self.decode_json(message['text'])
action = content.get('action')
data = {}
if action == 'favorite':
event_instance_id = content.get('event_instance_id')
event_instance = EventInstance.objects.get(id=event_instance_id)
Favorite.objects.create(
user=message.user,
event_instance=event_instance
)
if action == 'unfavorite':
event_instance_id = content.get('event_instance_id')
event_instance = EventInstance.objects.get(id=event_instance_id)
favorite = Favorite.objects.get(event_instance=event_instance, user=message.user)
favorite.delete()
self.send(data)
def disconnect(self, message, **kwargs):
pass
|
[
"valberg@orn.li"
] |
valberg@orn.li
|
2673f7a679c32787464288aeaaf3f84c4970adcd
|
1852cdc422fe605a379ab24368157a1b82e8f66f
|
/037_trunc_primes.py
|
2367585319fcdf307dc6473976ef2f6828dcd21d
|
[] |
no_license
|
JesseAldridge/Euler
|
e96bd7995fd8da60ce4db3c1daa20a719778b4a2
|
ee23c562cfcf3187f8768264249a41470c9d6355
|
refs/heads/master
| 2021-01-16T19:35:53.064280
| 2014-07-06T01:20:26
| 2014-07-06T01:20:26
| 2,194,822
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 710
|
py
|
def gen_primes():
## http://stackoverflow.com/questions/567222/simple-prime-generator-in-python
D = {}
q = 2
while True:
if q not in D:
yield q
D[q * q] = [q]
else:
for p in D[q]:
D.setdefault(p + q, []).append(p)
del D[q]
q += 1
# Find the eleven truncatable primes.
primes = set()
count = 0
sum_ = 0
for prime in gen_primes():
primes.add(prime)
if prime <= 7: continue
sprime = str(prime)
for i in range(1, len(sprime)):
if int(sprime[i:]) not in primes: break
if int(sprime[:-i]) not in primes: break
else:
count += 1
sum_ += prime
if count >= 11:
break
print 'sum:', sum_
|
[
"JesseAldridge@gmail.com"
] |
JesseAldridge@gmail.com
|
89f8dbce692f523fe4b4add92ab763aebb764dbb
|
bdc47ebbe3e125a48d3cfe762061f4b1070465c4
|
/mysite/fcuser/migrations/0001_initial.py
|
49fb63275f1f537773b58852b3526f7fe6aadb84
|
[] |
no_license
|
swpheus/Django_prj
|
3de3d2b110e51087cdf89b95fc2b6ea3f63acf00
|
10b9af058f0b76c424e24ecd1996a49646948547
|
refs/heads/master
| 2020-07-21T17:17:14.019550
| 2019-09-21T03:00:36
| 2019-09-21T03:00:36
| 206,929,161
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 791
|
py
|
# Generated by Django 2.2.4 on 2019-08-30 12:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Fcuser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=64, verbose_name='사용자이름')),
('password', models.CharField(max_length=64, verbose_name='비밀번호')),
('registered_dttm', models.DateTimeField(auto_now_add=True, verbose_name='등록시간')),
],
options={
'db_table': 'sw',
},
),
]
|
[
"swpheus1@naver.com"
] |
swpheus1@naver.com
|
5c04bc43c47e68240dcf58ac436ae995004210b7
|
0a33cc0ebb67c51cc38750f0f04c3e6c088e3b1a
|
/tests/components/media_source/test_init.py
|
491b1972cb680ef2f8d91bc7ee511cd93d8951ad
|
[
"Apache-2.0"
] |
permissive
|
robert-alfaro/home-assistant
|
e9bb08ad22a167ed226fb3de8f5b36acfc393548
|
4a53121b58b77a318f08c64ad2c5372a16b800e0
|
refs/heads/dev
| 2023-02-28T06:46:23.217246
| 2022-04-26T17:30:08
| 2022-04-26T17:30:08
| 115,894,662
| 4
| 0
|
Apache-2.0
| 2023-02-22T06:21:08
| 2018-01-01T02:00:35
|
Python
|
UTF-8
|
Python
| false
| false
| 7,917
|
py
|
"""Test Media Source initialization."""
from unittest.mock import Mock, patch
import pytest
import yarl
from homeassistant.components import media_source
from homeassistant.components.media_player import MEDIA_CLASS_DIRECTORY, BrowseError
from homeassistant.components.media_source import const, models
from homeassistant.setup import async_setup_component
async def test_is_media_source_id():
"""Test media source validation."""
assert media_source.is_media_source_id(media_source.URI_SCHEME)
assert media_source.is_media_source_id(f"{media_source.URI_SCHEME}domain")
assert media_source.is_media_source_id(
f"{media_source.URI_SCHEME}domain/identifier"
)
assert not media_source.is_media_source_id("test")
async def test_generate_media_source_id():
"""Test identifier generation."""
tests = [
(None, None),
(None, ""),
("", ""),
("domain", None),
("domain", ""),
("domain", "identifier"),
]
for domain, identifier in tests:
assert media_source.is_media_source_id(
media_source.generate_media_source_id(domain, identifier)
)
async def test_async_browse_media(hass):
"""Test browse media."""
assert await async_setup_component(hass, media_source.DOMAIN, {})
await hass.async_block_till_done()
# Test non-media ignored (/media has test.mp3 and not_media.txt)
media = await media_source.async_browse_media(hass, "")
assert isinstance(media, media_source.models.BrowseMediaSource)
assert media.title == "media"
assert len(media.children) == 2
# Test content filter
media = await media_source.async_browse_media(
hass,
"",
content_filter=lambda item: item.media_content_type.startswith("video/"),
)
assert isinstance(media, media_source.models.BrowseMediaSource)
assert media.title == "media"
assert len(media.children) == 1, media.children
media.children[0].title = "Epic Sax Guy 10 Hours"
assert media.not_shown == 1
# Test content filter adds to original not_shown
orig_browse = models.MediaSourceItem.async_browse
async def not_shown_browse(self):
"""Patch browsed item to set not_shown base value."""
item = await orig_browse(self)
item.not_shown = 10
return item
with patch(
"homeassistant.components.media_source.models.MediaSourceItem.async_browse",
not_shown_browse,
):
media = await media_source.async_browse_media(
hass,
"",
content_filter=lambda item: item.media_content_type.startswith("video/"),
)
assert isinstance(media, media_source.models.BrowseMediaSource)
assert media.title == "media"
assert len(media.children) == 1, media.children
media.children[0].title = "Epic Sax Guy 10 Hours"
assert media.not_shown == 11
# Test invalid media content
with pytest.raises(BrowseError):
await media_source.async_browse_media(hass, "invalid")
# Test base URI returns all domains
media = await media_source.async_browse_media(hass, const.URI_SCHEME)
assert isinstance(media, media_source.models.BrowseMediaSource)
assert len(media.children) == 1
assert media.children[0].title == "Local Media"
async def test_async_resolve_media(hass):
"""Test browse media."""
assert await async_setup_component(hass, media_source.DOMAIN, {})
await hass.async_block_till_done()
media = await media_source.async_resolve_media(
hass,
media_source.generate_media_source_id(media_source.DOMAIN, "local/test.mp3"),
)
assert isinstance(media, media_source.models.PlayMedia)
assert media.url == "/media/local/test.mp3"
assert media.mime_type == "audio/mpeg"
async def test_async_unresolve_media(hass):
"""Test browse media."""
assert await async_setup_component(hass, media_source.DOMAIN, {})
await hass.async_block_till_done()
# Test no media content
with pytest.raises(media_source.Unresolvable):
await media_source.async_resolve_media(hass, "")
# Test invalid media content
with pytest.raises(media_source.Unresolvable):
await media_source.async_resolve_media(hass, "invalid")
# Test invalid media source
with pytest.raises(media_source.Unresolvable):
await media_source.async_resolve_media(hass, "media-source://media_source2")
async def test_websocket_browse_media(hass, hass_ws_client):
"""Test browse media websocket."""
assert await async_setup_component(hass, media_source.DOMAIN, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
media = media_source.models.BrowseMediaSource(
domain=media_source.DOMAIN,
identifier="/media",
title="Local Media",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_type="listing",
can_play=False,
can_expand=True,
)
with patch(
"homeassistant.components.media_source.async_browse_media",
return_value=media,
):
await client.send_json(
{
"id": 1,
"type": "media_source/browse_media",
}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["id"] == 1
assert media.as_dict() == msg["result"]
with patch(
"homeassistant.components.media_source.async_browse_media",
side_effect=BrowseError("test"),
):
await client.send_json(
{
"id": 2,
"type": "media_source/browse_media",
"media_content_id": "invalid",
}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "browse_media_failed"
assert msg["error"]["message"] == "test"
@pytest.mark.parametrize("filename", ["test.mp3", "Epic Sax Guy 10 Hours.mp4"])
async def test_websocket_resolve_media(hass, hass_ws_client, filename):
"""Test browse media websocket."""
assert await async_setup_component(hass, media_source.DOMAIN, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
media = media_source.models.PlayMedia(
f"/media/local/{filename}",
"audio/mpeg",
)
with patch(
"homeassistant.components.media_source.async_resolve_media",
return_value=media,
):
await client.send_json(
{
"id": 1,
"type": "media_source/resolve_media",
"media_content_id": f"{const.URI_SCHEME}{media_source.DOMAIN}/local/{filename}",
}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["id"] == 1
assert msg["result"]["mime_type"] == media.mime_type
# Validate url is relative and signed.
assert msg["result"]["url"][0] == "/"
parsed = yarl.URL(msg["result"]["url"])
assert parsed.path == getattr(media, "url")
assert "authSig" in parsed.query
with patch(
"homeassistant.components.media_source.async_resolve_media",
side_effect=media_source.Unresolvable("test"),
):
await client.send_json(
{
"id": 2,
"type": "media_source/resolve_media",
"media_content_id": "invalid",
}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "resolve_media_failed"
assert msg["error"]["message"] == "test"
async def test_browse_resolve_without_setup():
"""Test browse and resolve work without being setup."""
with pytest.raises(BrowseError):
await media_source.async_browse_media(Mock(data={}), None)
with pytest.raises(media_source.Unresolvable):
await media_source.async_resolve_media(Mock(data={}), None)
|
[
"noreply@github.com"
] |
robert-alfaro.noreply@github.com
|
c994e9588ddb9863412a93b03416f881104cb03e
|
7e4e2acb0cdd4dba7d23e15ce56cdc3e4842f601
|
/openstack/telemetry/v2/_proxy.py
|
f9265372a1af74b7c92e798ebfaed693919c7dcc
|
[
"Apache-2.0"
] |
permissive
|
david-guyon/python-openstacksdk
|
99b2a52af9acf75b3df76438569477d2a46c1679
|
4a7c643b1b09904ac5ab8863a06a11e493a3d235
|
refs/heads/master
| 2021-05-28T23:06:46.337935
| 2015-02-26T23:12:53
| 2015-02-26T23:12:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,801
|
py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.telemetry.v2 import alarm
from openstack.telemetry.v2 import alarm_change
from openstack.telemetry.v2 import capability
from openstack.telemetry.v2 import meter
from openstack.telemetry.v2 import resource
from openstack.telemetry.v2 import sample
from openstack.telemetry.v2 import statistics
class Proxy(object):
def __init__(self, session):
self.session = session
def create_alarm(self, **data):
return alarm.Alarm(data).create(self.session)
def delete_alarm(self, **data):
alarm.Alarm(data).delete(self.session)
def find_alarm(self, name_or_id):
return alarm.Alarm.find(self.session, name_or_id)
def get_alarm(self, **data):
return alarm.Alarm(data).get(self.session)
def list_alarms(self):
return alarm.Alarm.list(self.session)
def update_alarm(self, **data):
return alarm.Alarm(data).update(self.session)
def find_alarm_change(self, name_or_id):
return alarm_change.AlarmChange.find(self.session, name_or_id)
def list_alarm_changes(self):
return alarm_change.AlarmChange.list(self.session)
def find_capability(self, name_or_id):
return capability.Capability.find(self.session, name_or_id)
def list_capabilitys(self):
return capability.Capability.list(self.session)
def find_meter(self, name_or_id):
return meter.Meter.find(self.session, name_or_id)
def list_meters(self):
return meter.Meter.list(self.session)
def find_resource(self, name_or_id):
return resource.Resource.find(self.session, name_or_id)
def get_resource(self, **data):
return resource.Resource(data).get(self.session)
def list_resources(self):
return resource.Resource.list(self.session)
def create_sample(self, **data):
return sample.Sample(data).create(self.session)
def find_sample(self, name_or_id):
return sample.Sample.find(self.session, name_or_id)
def list_samples(self):
return sample.Sample.list(self.session)
def find_statistics(self, name_or_id):
return statistics.Statistics.find(self.session, name_or_id)
def list_statistics(self):
return statistics.Statistics.list(self.session)
|
[
"terrylhowe@gmail.com"
] |
terrylhowe@gmail.com
|
40fd3a424ce366164e63986b518fd63f14d16468
|
2a0efe30198c948982a85ea74c7e47ef9ecb4823
|
/pers/cyj/day11/shoot/person.py
|
124be128ac066503bc23040dc39c6c6912db87cf
|
[] |
no_license
|
cyjhunnyboy/PythonTutorialProj
|
00151ed364807c6df54b1b0300cb622086128b74
|
c3a02e6fa2dd09bf5d7b47fd78a4d3a31356a39d
|
refs/heads/master
| 2023-02-23T18:35:45.481252
| 2021-01-25T11:13:45
| 2021-01-25T11:13:45
| 298,922,161
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 219
|
py
|
class Person(object):
"""人类"""
def __init__(self, gun):
self.gun = gun
def fire(self):
self.gun.shoot()
def fillBullet(self, count):
self.gun.bulletBox.bulletCount = count
|
[
"cyjhunnyboy@sina.com"
] |
cyjhunnyboy@sina.com
|
ca3aa4adc107683032e77e9d1df22364acfe304a
|
0e4dc82a94563dacb0c25d0d43fbcbe3def21f72
|
/259-3Sum-Smaller/Python/Solution01.py
|
d722b1e3406c547a4b5602fe95da262f1617fbce
|
[
"CC-BY-3.0",
"MIT"
] |
permissive
|
Eroica-cpp/LeetCode
|
3ce3b05b3098e8097c1090e2116b813efaadd2a3
|
07276bd11558f3d0e32bec768b09e886de145f9e
|
refs/heads/master
| 2021-06-20T05:41:30.506250
| 2017-03-16T05:17:39
| 2017-03-16T05:17:39
| 35,126,816
| 7
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,478
|
py
|
#!/usr/bin/python
"""
==============================================================================
Author: Tao Li (taoli@ucsd.edu)
Date: Aug 29, 2015
Question: 259-3Sum-Smaller
Link: https://leetcode.com/problems/3sum-smaller/
==============================================================================
Given an array of n integers nums and a target, find the number of index
triplets i, j, k with 0 <= i < j < k < n that satisfy the condition
nums[i] + nums[j] + nums[k] < target.
For example, given nums = [-2, 0, 1, 3], and target = 2.
Return 2. Because there are two triplets which sums are less than 2:
[-2, 0, 1]
[-2, 0, 3]
Follow up:
Could you solve it in O(n2) runtime?
==============================================================================
Method: sort first
Time Complexity: O(n^2)
Space Complexity: O(1)
==============================================================================
"""
class Solution(object):
def threeSumSmaller(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
nums.sort()
size = len(nums)
counter = 0
for i in xrange(size-2):
begin, end = i + 1, size - 1
while begin < end:
if nums[i] + nums[begin] + nums[end] < target:
counter += end - begin
begin += 1
else:
end -= 1
return counter
|
[
"eroicacmcs@gmail.com"
] |
eroicacmcs@gmail.com
|
d8a209f8fec660b58abf72cb3b33b420aa69cf12
|
665d9bad46e68f779512640e582d2522867b0dba
|
/Linked List Problems/21. Merge Two Sorted Lists.py
|
48c4ac08963a9b30f419087ad340bcb3731dfc20
|
[] |
no_license
|
RahatIbnRafiq/leetcodeProblems
|
6fd1e9726b14b7ad3571e5a4af5665b72f7aee0a
|
2d35590d189938e0705a21be110e75e0b209ea96
|
refs/heads/master
| 2021-11-25T11:58:20.498911
| 2021-11-25T00:43:04
| 2021-11-25T00:43:04
| 72,865,562
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 824
|
py
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def mergeTwoLists(self, l1, l2):
d1,d2,d3 = ListNode(0),ListNode(0),ListNode(0)
d1.next, d2.next = l1,l2
cur = d3
while d1.next and d2.next:
if d1.next.val <= d2.next.val:
cur.next = d1.next
d1.next = d1.next.next
else:
cur.next = d2.next
d2.next = d2.next.next
cur = cur.next
if d1.next is not None:
cur.next = d1.next
d1.next = None
elif d2.next is not None:
cur.next = d2.next
d2.next = None
return d3.next
|
[
"rahat.rafiq@colorado.edu"
] |
rahat.rafiq@colorado.edu
|
77d1b6b14ec4e15eceb3b3cb3df095166f40518e
|
88bbf27deb0b2a1b96985c0a94ff0b7a3d012820
|
/Feeds/migrations/0001_initial.py
|
4b9aaa549847b201a89bcbc92820989702ecc36c
|
[] |
no_license
|
Code-Community99/Hiq-django
|
e8efb7d63bd4fc0bc8e2af193fdec9aaab0975b0
|
af62622648ad88f6e8d94e86a8dc5d6660e3bbe2
|
refs/heads/master
| 2022-12-14T01:12:45.218318
| 2020-05-18T23:29:35
| 2020-05-18T23:29:35
| 233,811,384
| 2
| 1
| null | 2022-12-08T03:34:53
| 2020-01-14T10:02:55
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 783
|
py
|
# Generated by Django 3.0.2 on 2020-01-31 11:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('signup', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='feeds_list',
fields=[
('Fid', models.AutoField(primary_key=True, serialize=False)),
('feed', models.CharField(max_length=1255)),
('post_time', models.DateTimeField(auto_now_add=True)),
('uid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='signup.signup_user')),
],
options={
'db_table': 'feeds',
},
),
]
|
[
"duncansantiago18@gmail.com"
] |
duncansantiago18@gmail.com
|
64bec78da4f9613a704e778e9861c439a2f6da10
|
f281d0d6431c1b45c6e5ebfff5856c374af4b130
|
/DAY001~099/DAY84-BOJ7579-앱/ykim.py
|
57d20480ba6f75c21a3574aa7376c4bf95479268
|
[] |
no_license
|
tachyon83/code-rhino
|
ec802dc91dce20980fac401b26165a487494adb4
|
b1af000f5798cd12ecdab36aeb9c7a36f91c1101
|
refs/heads/master
| 2022-08-13T09:10:16.369287
| 2022-07-30T11:27:34
| 2022-07-30T11:27:34
| 292,142,812
| 5
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 799
|
py
|
import sys
N,M=map(int,sys.stdin.readline().split())
#앱
m=list(map(int,sys.stdin.readline().split()))#메모리
c=list(map(int,sys.stdin.readline().split()))#비활성화 비용
result=[10001]*(M+1)
result[0]=0
#M = 확보할 메모리
for i in range(N):
rm=m[i]#해당 메모리를 제거할때
for j in range(M,-1,-1):
if result[j]!=10001:#해당 값이있다면
if j+rm>=M:#목표 메모리보다 크다면
if result[M]>result[j]+c[i]:#기존 보다 비용 더 작게 든다면
result[M]=result[j]+c[i]
else:#목표 메모리보다 작다면
if result[j+rm]>result[j]+c[i]:#기존보다 비용이 작게든다면 갱신시켜준다.
result[j+rm]=result[j]+c[i]
print(result[M])
|
[
"noreply@github.com"
] |
tachyon83.noreply@github.com
|
6a77925e19d91eaed67900e2e238817e5b1b7ae6
|
2e70b3ce93762c5b66fba57f8b9cba37aacf0702
|
/new/event/migrations/0068_auto_20190604_0558.py
|
2b9ad4a849e0cdbfb1119ab88bfba5b068febf1e
|
[] |
no_license
|
mahidul-islam/jamah
|
02be511fe119e8934ec7d5aa1eaa8e2b24fad246
|
c8ddf9a8094d33e8b1d6cb834eab3d9f18b1a9ea
|
refs/heads/master
| 2022-05-13T15:11:38.609550
| 2019-06-08T04:52:09
| 2019-06-08T04:52:09
| 184,331,276
| 2
| 0
| null | 2022-04-22T21:27:18
| 2019-04-30T21:04:06
|
Python
|
UTF-8
|
Python
| false
| false
| 536
|
py
|
# Generated by Django 2.2.1 on 2019-06-04 05:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('event', '0067_auto_20190602_1815'),
]
operations = [
migrations.RenameField(
model_name='eventmember',
old_name='account',
new_name='accountant_account',
),
migrations.RenameField(
model_name='eventmember',
old_name='total_sent',
new_name='total_sent_money',
),
]
|
[
"mizihan84@gmail.com"
] |
mizihan84@gmail.com
|
8abf0d510ba72fc3a8052dcc5dd840e749d5fe57
|
84bf0086bfe7af894bbfba353f1884ae9f31c335
|
/1123. Lowest Common Ancestor of Deepest Leaves/Python 3/solution.py
|
3ab86fe8912541cd5a9e096944789f1d46549a38
|
[] |
no_license
|
HarrrrryLi/LeetCode
|
7625d9ec3d7854f72e7aeeb0292960af06a78d05
|
fe5c6936627c2459731ddda6f67422c217b3cc91
|
refs/heads/master
| 2020-04-12T23:57:12.387253
| 2020-01-16T02:22:26
| 2020-01-16T02:22:26
| 162,834,492
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,129
|
py
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def lcaDeepestLeaves(self, root: TreeNode) -> TreeNode:
stack = collections.deque()
path = set()
path.add((root, 0))
stack.append((root, 0, path))
leaves = collections.defaultdict(list)
while stack:
node, depth, path = stack.pop()
if not node.left and not node.right:
leaves[depth].append(path)
continue
if node.left:
temp = set(path)
temp.add((node.left, depth + 1))
stack.append((node.left, depth + 1, temp))
if node.right:
temp = set(path)
temp.add((node.right, depth + 1))
stack.append((node.right, depth + 1, temp))
max_depth = max(leaves)
candidates = leaves[max_depth][0]
for path in leaves[max_depth]:
candidates &= path
return max(candidates, key=lambda x: x[1])[0]
|
[
"lfrharry@gmail.com"
] |
lfrharry@gmail.com
|
4eebe69f6eb17ac0db185dfa84f1ab896d702cd5
|
781e2692049e87a4256320c76e82a19be257a05d
|
/all_data/exercism_data/python/bob/3a4115ceaf044682aee09918ac6e23d1.py
|
b78aaaad71268364f91be3bfbc607b615372c346
|
[] |
no_license
|
itsolutionscorp/AutoStyle-Clustering
|
54bde86fe6dbad35b568b38cfcb14c5ffaab51b0
|
be0e2f635a7558f56c61bc0b36c6146b01d1e6e6
|
refs/heads/master
| 2020-12-11T07:27:19.291038
| 2016-03-16T03:18:00
| 2016-03-16T03:18:42
| 59,454,921
| 4
| 0
| null | 2016-05-23T05:40:56
| 2016-05-23T05:40:56
| null |
UTF-8
|
Python
| false
| false
| 236
|
py
|
def hey(arg):
arg = arg.strip()
if arg == '':
return 'Fine. Be that way!'
elif arg.isupper():
return "Whoa, chill out!"
elif arg.endswith('?'):
return "Sure."
else:
return "Whatever."
|
[
"rrc@berkeley.edu"
] |
rrc@berkeley.edu
|
99fc264e361c08fda18f058be8e53b400b61379e
|
6ecf40c771874f31fa19f9534677c95c731352e9
|
/DRTransmitter.py
|
d60163c5eddb41d056a1880c6cf332dfa9d71c5b
|
[] |
no_license
|
fstakem/OptNetFilt
|
1c86e82629352ee8ee9eb270aa3be01c6202696d
|
c862f3d1dd09ad9cdd146871761cb1f814e82de9
|
refs/heads/master
| 2016-09-11T03:06:06.466206
| 2014-06-07T02:03:04
| 2014-06-07T02:03:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,700
|
py
|
# .------------------------------------------------------------------------------.
# | |
# | D E A D R E C K O N I N G T R A N S M I T T E R |
# | |
# '------------------------------------------------------------------------------'
from copy import *
from Vector import Vector
from Sample import Sample
from PredictionSample import PredictionSample
from Packet import Packet
class DRTransmitter(object):
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# P U B L I C F U N C T I O N S
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def __init__(self, heartbeatRate):
# Data
self.inputData = []
self.transmittedPackets = []
# Algorithm parameters
self.distanceThreshold = 0.01
self.heartbeatRate = 500
if isinstance( heartbeatRate, int ) and heartbeatRate > 0:
self.heartbeatRate = heartbeatRate
def getTransmittedPackets(self, distanceThreshold, data):
if isinstance( data, list ):
self.inputData = data
if isinstance( distanceThreshold, float ) and distanceThreshold > 0:
self.distanceThreshold = distanceThreshold
self.executeAlgorithm()
return self.transmittedPackets
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# P R I V A T E F U N C T I O N S
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def executeAlgorithm(self):
self.transmittedPackets = []
sequenceNumber = 1
# Start algorithm before loop
self.transmittedPackets.append( self.createPacket(self.inputData[0], sequenceNumber) )
sequenceNumber += 1
lastTransmittedSample = self.inputData[0]
for predictionSample in self.inputData:
estimatedPosition = self.calculateEstPosition(lastTransmittedSample, \
predictionSample.sample.time)
distance = predictionSample.sample.position.distance(estimatedPosition)
if predictionSample.sample.time >= \
( lastTransmittedSample.sample.time + self.heartbeatRate ):
self.transmittedPackets.append( self.createPacket(predictionSample, sequenceNumber) )
sequenceNumber += 1
lastTransmittedSample = predictionSample
elif distance >= self.distanceThreshold:
self.transmittedPackets.append( self.createPacket(predictionSample, sequenceNumber) )
sequenceNumber += 1
lastTransmittedSample = predictionSample
def calculateEstPosition(self, lastTransmittedSample, currentTime):
deltaTime = currentTime - lastTransmittedSample.sample.time
deltaTimeVector = Vector(deltaTime, deltaTime, deltaTime)
deltaPosition = lastTransmittedSample.velocity * deltaTimeVector
estimatedPosition = lastTransmittedSample.sample.position + deltaPosition
return estimatedPosition
def createPacket(self, predictionSample, sequenceNumber):
packet = Packet()
packet.predictionSample = copy( predictionSample )
packet.sequenceNumber = sequenceNumber
packet.timeTransmitted = predictionSample.sample.time
return packet
|
[
"fstakem@gmail.com"
] |
fstakem@gmail.com
|
de83a23717434f077f88e113fd01c3bab8a6fea2
|
4a41223e8c8ab33d83c6f213692c6097bb96540d
|
/eelbrain/_experiment/parc.py
|
9fc979de7443ed56f4acb4c9f91f8447365c11b2
|
[
"BSD-3-Clause"
] |
permissive
|
rbaehr/Eelbrain
|
33ceeee24533581ab3e7569c31e0f6a6c6dfcda1
|
6301dc256e351fdbb58bbe13ab48fde7bfcf192a
|
refs/heads/master
| 2021-07-05T19:19:20.573231
| 2017-10-03T04:35:23
| 2017-10-03T04:35:23
| 104,907,464
| 0
| 0
| null | 2017-09-26T16:03:20
| 2017-09-26T16:03:20
| null |
UTF-8
|
Python
| false
| false
| 2,196
|
py
|
import re
COMBINATION_PARC = 'combination'
FS_PARC = 'subject_parc' # Parcellation that come with every MRI-subject
FSA_PARC = 'fsaverage_parc' # Parcellation that comes with fsaverage
SEEDED_PARC = 'seeded'
SEEDED_PARC_RE = re.compile('(\w+)-(\d+)$')
class Parcellation(object):
make = False
morph_from_fsaverage = False
def __init__(self, name, views=None):
self.name = name
self.views = views
def as_dict(self):
return NotImplemented
class CombinationParcellation(Parcellation):
"Recombine labels from an existingparcellation"
make = True
def __init__(self, name, base, labels, views=None):
Parcellation.__init__(self, name, views)
self.base = base
self.labels = labels
def as_dict(self):
return {'kind': COMBINATION_PARC, 'base': self.base,
'labels': self.labels}
class EelbrainParcellation(Parcellation):
"Parcellation that has special make rule"
make = True
def __init__(self, name, morph_from_fsaverage, views=None):
Parcellation.__init__(self, name, views)
self.morph_from_fsaverage = morph_from_fsaverage
def as_dict(self):
return {'kind': 'eelbrain_parc'}
class FreeSurferParcellation(Parcellation):
"Parcellation that comes with FreeSurfer"
def as_dict(self):
return {'kind': FS_PARC}
class FSAverageParcellation(Parcellation):
"Parcellation that comes with FSAverage"
morph_from_fsaverage = True
def as_dict(self):
return {'kind': FSA_PARC}
class SeededParcellation(Parcellation):
"Parcellation that is grown from seed vertices"
make = True
def __init__(self, name, seeds, mask=None, surface='white', views=None):
Parcellation.__init__(self, name, views)
self.seeds = seeds
self.mask = mask
self.surface = surface
def as_dict(self):
return {'kind': SEEDED_PARC, 'seeds': self.seeds,
'surface': self.surface, 'mask': self.mask}
PARC_CLASSES = {
COMBINATION_PARC: CombinationParcellation,
FS_PARC: FreeSurferParcellation,
FSA_PARC: FSAverageParcellation,
SEEDED_PARC: SeededParcellation,
}
|
[
"christianmbrodbeck@gmail.com"
] |
christianmbrodbeck@gmail.com
|
69e27cea29749aeb55b8285ea5472f561685d74c
|
fd9ce5afe2d1160e04071e0645eb823e90ed66bb
|
/tredis/__init__.py
|
f38a49bae5e190831d83b61c48569bf1f710f587
|
[] |
no_license
|
haoxuu/tredis
|
b5efb1728ba94e7799547288623241706c9bd9f9
|
506c9a8e3d63b4a654e022f5d8127c8de4c82faf
|
refs/heads/master
| 2021-01-18T16:04:36.292451
| 2017-04-13T12:58:48
| 2017-04-13T12:58:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 234
|
py
|
"""
TRedis
======
An asynchronous Redis client for Tornado
"""
from tredis.client import Client, RedisClient
from tredis.exceptions import *
from tredis.strings import BITOP_AND, BITOP_OR, BITOP_XOR, BITOP_NOT
__version__ = '0.7.2'
|
[
"gavinr@aweber.com"
] |
gavinr@aweber.com
|
066775eea5a9fc755660f1f1b211a6359a9d39b9
|
eb82022c0cfc7c8747661cff9624ad2099fa1c3f
|
/accounting_report_xls/wizard/requisition_report.py
|
b8a86af9ecb7b11cf815777cb5903909435929ab
|
[] |
no_license
|
dadysuarsa/Odoo
|
8d026a066c390cc8f72805d2672212e61260c1cb
|
c9becd0c192fa239520ad3e1a11d81f70832eddf
|
refs/heads/master
| 2023-03-11T06:02:06.011575
| 2021-02-26T02:17:37
| 2021-02-26T02:17:37
| 276,346,540
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,509
|
py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
#
# Please note that these reports are not multi-currency !!!
#
from odoo import api, fields, models, tools
class RequisitionReport(models.Model):
_name = "requisition.report"
_description = "Requisition Reports"
_auto = False
_order = 'pr_date desc'
pr_date = fields.Datetime('Requisition Date', readonly=True, help="Date on which this document has been created")
product_id = fields.Many2one('product.product', 'Product', readonly=True)
requisition_id = fields.Many2one('po.request', 'Requisition', readonly=True)
purchase_id = fields.Many2one('purchase.order', 'Purchase', readonly=True)
partner_id = fields.Many2one('res.partner', 'Vendor', readonly=True)
uom_id = fields.Many2one('product.uom', 'Unit of Measure', required=True)
pr_qty = fields.Float('PR Qty', readonly=True)
po_qty = fields.Float('PO Qty', readonly=True)
sm_qty = fields.Float('STPB Qty', readonly=True)
@api.model_cr
def init(self):
tools.drop_view_if_exists(self._cr, 'requisition_report')
self._cr.execute("""
create view requisition_report as (
select
min(prl.id) as id,
prl.order_id as requisition_id,
pr.date_trans as pr_date,
pr.divisi as divisi,
prl.product_id as product_id,
prl.product_qty as pr_qty,
po.id as purchase_id,
pol2.product_qty as po_qty,
pol2.price_unit as po_price,
sm.product_uom_qty as sm_qty
from po_request_line prl
left join po_request pr on (pr.id = prl.order_id)
left join purchase_order po on (po.po_request_id = prl.id)
left join purchase_order_line pol on (pol.order_id = po.id)
left join purchase_order_line pol2 on (pol2.product_id = prl.product_id)
left join stock_move sm on (sm.purchase_line_id = pol2.id)
group by
prl.order_id,
pr.date_trans,
pr.divisi,
prl.product_id,
prl.product_qty,
po.id,
pol2.product_qty,
sm.product_uom_qty,
pol2.price_unit
)
""")
|
[
"dads02_zetti@yahoo.com"
] |
dads02_zetti@yahoo.com
|
2571969d76af0f76eb9cf4d6518aa2ee3cb716ed
|
4c207b2dd10db5598ccf5d04ccbf5b272bb1a3ae
|
/app/services/healthcheck.py
|
d01f5b6cb3836e30f9e6bcdb524a2b45be854c81
|
[
"MIT"
] |
permissive
|
sina-e/bot
|
44ede51f498bae10bae59c705dbe42e2d78921cc
|
5d892a24724b3cd9b50928f1f5753e8b38d537c4
|
refs/heads/master
| 2022-03-22T12:32:30.890950
| 2019-11-10T09:53:10
| 2019-11-10T09:53:10
| 244,046,558
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,458
|
py
|
from aiogram import Dispatcher
from aiogram.utils.executor import Executor
from aiohttp_healthcheck import HealthCheck
from loguru import logger
from app import config
health = HealthCheck()
def setup(executor: Executor):
executor.on_startup(on_startup, webhook=True, polling=False)
async def on_startup(dispatcher: Dispatcher):
from app.utils.executor import runner
logger.info("Setup healthcheck")
health.add_check(check_redis)
health.add_check(check_postgres)
health.add_check(check_webhook)
runner.web_app.router.add_get("/healthcheck", health)
async def check_redis():
from app.misc import storage
try:
redis = await storage.redis()
info = await redis.info()
except Exception as e:
return False, str(e)
return True, f"Redis {info['server']['redis_version']}"
async def check_postgres():
from app.models.db import db
try:
version = await db.scalar("select version();")
except Exception as e:
return False, str(e)
return True, version
async def check_webhook():
from app.misc import bot
webhook = await bot.get_webhook_info()
if webhook.url and webhook.url == config.WEBHOOK_URL:
return True, f"Webhook configured. Pending updates count {webhook.pending_update_count}"
else:
logger.error("Configured wrong webhook URL {webhook}", webhook=webhook.url)
return False, "Configured invalid webhook URL"
|
[
"jroot.junior@gmail.com"
] |
jroot.junior@gmail.com
|
64aaae03c3ab90f3f0225cbf42dc6b88da545cbb
|
8bdce915174678a90a6be811ea91b50930b9d26a
|
/coding/Algorithm_exercise/Leetcode/0051-N-Queens.py
|
feda3efdc7bdf33d817beb02ebc76b87d12dbe3e
|
[] |
no_license
|
CharlesBird/Resources
|
daefffef8fb3735e656cd0a3bf400d5e2ff85cc0
|
517ac7b7992a686fa5370b6fda8b62663735853c
|
refs/heads/master
| 2022-12-15T02:54:56.530940
| 2020-02-29T14:33:43
| 2020-02-29T14:33:43
| 109,668,108
| 1
| 1
| null | 2022-12-08T05:04:25
| 2017-11-06T08:34:30
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,247
|
py
|
"""
The n-queens puzzle is the problem of placing n queens on an n×n chessboard such that no two queens attack each other.
Given an integer n, return all distinct solutions to the n-queens puzzle.
Each solution contains a distinct board configuration of the n-queens' placement, where 'Q' and '.' both indicate a queen and an empty space respectively.
Example:
Input: 4
Output: [
[".Q..", // Solution 1
"...Q",
"Q...",
"..Q."],
["..Q.", // Solution 2
"Q...",
"...Q",
".Q.."]
]
Explanation: There exist two distinct solutions to the 4-queens puzzle as shown above.
"""
class Solution:
def solveNQueens(self, n: int) -> 'List[List[str]]':
def valid(cols, index):
for i in range(index):
if abs(cols[i] - cols[index]) == index - i or cols[i] == cols[index]:
return False
return True
def dfs(cols, index, path):
if index == len(cols):
res.append(path)
return
for i in range(len(cols)):
cols[index] = i
if valid(cols, index):
dfs(cols, index+1, path+["."*i+"Q"+"."*(n-i-1)])
res = []
dfs([-1] * n, 0, [])
return res
|
[
"1016784928@qq.com"
] |
1016784928@qq.com
|
02d805ec2da1c542813f48e2d36487ea8c3282fd
|
54a18823b019112c53fc62766da10cbe5f879615
|
/pkg/codegen/internal/test/testdata/output-funcs/py_tests/funcWithConstInput.py
|
d3ba8492d878603b3f22f3f1d551e00dbb14a2fe
|
[
"Apache-2.0"
] |
permissive
|
TrendingTechnology/pulumi
|
01904487b2560244c3588004b562571f85cf2e51
|
1a4f36e97b15e6a25d92a8ebbabd5238c6c5be54
|
refs/heads/master
| 2023-07-27T19:38:17.162679
| 2021-09-16T04:12:49
| 2021-09-16T04:12:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 832
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by . ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'func_with_const_input',
]
def func_with_const_input(plain_input: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None):
"""
Codegen demo with const inputs
"""
__args__ = dict()
__args__['plainInput'] = plain_input
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('madeup-package:codegentest:funcWithConstInput', __args__, opts=opts).value
|
[
"noreply@github.com"
] |
TrendingTechnology.noreply@github.com
|
be9142c193081f1a1035d6f272c22cc11c1b0e9d
|
2bc8f66fd34ba1b93de82c67954a10f8b300b07e
|
/general_backbone/layers/global_context.py
|
0c80d60097c862eb96f2e9085a77dbbde1fa354a
|
[] |
no_license
|
DoDucNhan/general_backbone
|
7dabffed5a74e622ba23bf275358ca2d09faddc1
|
686c92ab811221d594816207d86a0b97c9b4bc73
|
refs/heads/main
| 2023-08-31T14:59:23.873555
| 2021-10-23T06:34:14
| 2021-10-23T06:34:14
| 420,419,141
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,449
|
py
|
# Copyright (c) general_backbone. All rights reserved.
""" Global Context Attention Block
Paper: `GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond`
- https://arxiv.org/abs/1904.11492
Official code consulted as reference: https://github.com/xvjiarui/GCNet
"""
from torch import nn as nn
import torch.nn.functional as F
from .create_act import create_act_layer, get_act_layer
from .helpers import make_divisible
from .mlp import ConvMlp
from .norm import LayerNorm2d
class GlobalContext(nn.Module):
def __init__(self, channels, use_attn=True, fuse_add=False, fuse_scale=True, init_last_zero=False,
rd_ratio=1./8, rd_channels=None, rd_divisor=1, act_layer=nn.ReLU, gate_layer='sigmoid'):
super(GlobalContext, self).__init__()
act_layer = get_act_layer(act_layer)
self.conv_attn = nn.Conv2d(channels, 1, kernel_size=1, bias=True) if use_attn else None
if rd_channels is None:
rd_channels = make_divisible(channels * rd_ratio, rd_divisor, round_limit=0.)
if fuse_add:
self.mlp_add = ConvMlp(channels, rd_channels, act_layer=act_layer, norm_layer=LayerNorm2d)
else:
self.mlp_add = None
if fuse_scale:
self.mlp_scale = ConvMlp(channels, rd_channels, act_layer=act_layer, norm_layer=LayerNorm2d)
else:
self.mlp_scale = None
self.gate = create_act_layer(gate_layer)
self.init_last_zero = init_last_zero
self.reset_parameters()
def reset_parameters(self):
if self.conv_attn is not None:
nn.init.kaiming_normal_(self.conv_attn.weight, mode='fan_in', nonlinearity='relu')
if self.mlp_add is not None:
nn.init.zeros_(self.mlp_add.fc2.weight)
def forward(self, x):
B, C, H, W = x.shape
if self.conv_attn is not None:
attn = self.conv_attn(x).reshape(B, 1, H * W) # (B, 1, H * W)
attn = F.softmax(attn, dim=-1).unsqueeze(3) # (B, 1, H * W, 1)
context = x.reshape(B, C, H * W).unsqueeze(1) @ attn
context = context.view(B, C, 1, 1)
else:
context = x.mean(dim=(2, 3), keepdim=True)
if self.mlp_scale is not None:
mlp_x = self.mlp_scale(context)
x = x * self.gate(mlp_x)
if self.mlp_add is not None:
mlp_x = self.mlp_add(context)
x = x + mlp_x
return x
|
[
"phamdinhkhanh.tkt53.neu@gmail.com"
] |
phamdinhkhanh.tkt53.neu@gmail.com
|
162e921c3a9621f4f8e877078a58211b9278b0a5
|
f6bba50fccc6fb0dae2f046193434cfb4b9d32d5
|
/121/A.py
|
9289baf98b8a4ddbe3c1fee1117da72c3e424709
|
[] |
no_license
|
seven320/AtCoder
|
4c26723d20004fe46ce118b882faabc05066841c
|
45e301e330e817f1ace4be4088d3babe18588170
|
refs/heads/master
| 2021-11-22T22:57:32.290504
| 2021-10-24T09:15:12
| 2021-10-24T09:15:12
| 162,827,473
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 177
|
py
|
# encoding:utf-8
import copy
import numpy as np
import random
H,W = map(int,input().split())
h,w = map(int,input().split())
ans = H*W
ans -= (h*W+H*w)
ans += h*w
print(ans)
|
[
"yosyuaomenw@yahoo.co.jp"
] |
yosyuaomenw@yahoo.co.jp
|
d39063fe831b550fc02fc674df6797f6774dc2af
|
2b4af8810511b5f1ed47fdf5662753b9b4af76b8
|
/custom/enikshay/private_sector_datamigration/migrations/0003_auto_20170513_1805.py
|
fbfb08937806346ac17af7cf5bbc2706612319a5
|
[] |
no_license
|
DeckOfPandas/commcare-wddcp
|
55bde89197ec5bc4a4b53d327ec6a811aec0d752
|
810d2e09d3890e3d0d70178745da5924c1db767b
|
refs/heads/dimagi
| 2020-12-02T19:19:53.992796
| 2017-06-30T15:18:16
| 2017-07-05T12:23:26
| 96,325,707
| 1
| 0
| null | 2017-07-05T14:02:49
| 2017-07-05T14:02:49
| null |
UTF-8
|
Python
| false
| false
| 477
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-13 18:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('private_sector_datamigration', '0002_auto_20170512_1919'),
]
operations = [
migrations.AlterField(
model_name='adherence',
name='episodeId',
field=models.CharField(max_length=8),
),
]
|
[
"npellegrino@dimagi.com"
] |
npellegrino@dimagi.com
|
a8900822d7304576a7b117aea8738776f84b8cfc
|
ae7c6bf729fc4527cfbb7b29bd4dee77b01584c4
|
/chicago/zip_codes.py
|
13725efc7dab3c1d18c9c420fa3d06d984a02a04
|
[] |
no_license
|
ghing/python-chicago
|
61fd2639160bf1092d7bcfbc1101dc8428764042
|
4fefb221bca83d55e6c6eb7bae4b90149acf7c57
|
refs/heads/master
| 2021-01-13T04:33:02.289631
| 2017-01-20T22:13:53
| 2017-01-20T22:13:53
| 79,591,829
| 0
| 0
| null | 2017-01-20T19:44:17
| 2017-01-20T19:44:17
| null |
UTF-8
|
Python
| false
| false
| 998
|
py
|
import os.path
import six
from .base import Model, Collection, DATA_DIRECTORY
ZIP_CODE_GEOJSON_PATH = os.path.join(DATA_DIRECTORY, 'chicago_zip_codes.geojson')
class ZipCode(Model):
fields = [
'zip',
]
def __str__(self):
return self.zip
def __repr__(self):
return "ZipCode(zip='{z.zip}')".format(z=self)
class ZipCodeCollection(Collection):
model = ZipCode
def __init__(self):
self._by_zip = {}
super(ZipCodeCollection, self).__init__()
def add_item(self, item):
super(ZipCodeCollection, self).add_item(item)
self._by_zip[item.zip] = item
def get_by_zip(self, zip_code):
return self._by_zip[six.text_type(zip_code)]
def default_sort(self):
self._items = sorted(self._items, key=lambda p: p.zip)
return self
def is_chicago(self, zip_code):
return six.text_type(zip_code) in self._by_zip
ZIP_CODES = ZipCodeCollection().from_geojson(ZIP_CODE_GEOJSON_PATH)
|
[
"geoffhing@gmail.com"
] |
geoffhing@gmail.com
|
3192468b3d6d4a4e049545f1a74508b86d451062
|
aa0270b351402e421631ebc8b51e528448302fab
|
/sdk/paloaltonetworks/azure-mgmt-paloaltonetworksngfw/generated_samples/fqdn_list_local_rulestack_create_or_update_maximum_set_gen.py
|
755d0412200bb3a98af329c57e98ac8474ced4c4
|
[
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
fangchen0601/azure-sdk-for-python
|
d04a22109d0ff8ff209c82e4154b7169b6cb2e53
|
c2e11d6682e368b2f062e714490d2de42e1fed36
|
refs/heads/master
| 2023-05-11T16:53:26.317418
| 2023-05-04T20:02:16
| 2023-05-04T20:02:16
| 300,440,803
| 0
| 0
|
MIT
| 2020-10-16T18:45:29
| 2020-10-01T22:27:56
| null |
UTF-8
|
Python
| false
| false
| 2,048
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.paloaltonetworks import PaloAltoNetworksNgfwMgmtClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-paloaltonetworks
# USAGE
python fqdn_list_local_rulestack_create_or_update_maximum_set_gen.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = PaloAltoNetworksNgfwMgmtClient(
credential=DefaultAzureCredential(),
subscription_id="2bf4a339-294d-4c25-b0b2-ef649e9f5c27",
)
response = client.fqdn_list_local_rulestack.begin_create_or_update(
resource_group_name="rgopenapi",
local_rulestack_name="lrs1",
name="armid1",
resource={
"properties": {
"auditComment": "string",
"description": "string",
"etag": "aaaaaaaaaaaaaaaaaa",
"fqdnList": ["string1", "string2"],
"provisioningState": "Accepted",
}
},
).result()
print(response)
# x-ms-original-file: specification/paloaltonetworks/resource-manager/PaloAltoNetworks.Cloudngfw/preview/2022-08-29-preview/examples/FqdnListLocalRulestack_CreateOrUpdate_MaximumSet_Gen.json
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
fangchen0601.noreply@github.com
|
ffc913ac274984b687fba1e23716572ced7bbd7b
|
fc861233f2ae31f3cdf312e5ca8d68029e811f69
|
/arrays/SuperWashingMachine_HARD.py
|
d07ff21678abb6b5f9300ede6cf7a6089d3f2b78
|
[] |
no_license
|
QuentinDuval/PythonExperiments
|
20fc54d98ff6e4131975809c32cf8844ff2a8ecb
|
3ffcfee5cedf421d5de6d0dec4ba53b0eecbbff8
|
refs/heads/master
| 2021-07-26T21:24:18.324350
| 2020-05-02T08:11:58
| 2020-05-02T08:11:58
| 163,210,028
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,749
|
py
|
"""
https://leetcode.com/problems/super-washing-machines/
You have n super washing machines on a line. Initially, each washing machine has some dresses or is empty.
For each move, you could choose any m (1 ≤ m ≤ n) washing machines, and pass one dress of each washing machine
to one of its adjacent washing machines at the same time .
Given an integer array representing the number of dresses in each washing machine from left to right on the line,
you should find the minimum number of moves to make all the washing machines have the same number of dresses.
If it is not possible to do it, return -1.
"""
from typing import List
class Solution:
def findMinMoves(self, machines: List[int]) -> int:
"""
Do not care about individual transfers:
* [1, 0, 5] requires 3 moves for 5 to go down to 2 (only thing to care about)
* [0,3,0] requires 2 moves for 3 to go down to 1 (only thing to care about)
* [1,2,2,2,3] requires 1 move (since all can move in same direction)
But there are some subtelties:
* [3,3,0,0,0,0] => 4 because the second number 3 will not be able to decrease at first
* [0,3,3,0,0,0] => 3 because two sides for leaking (so the surplus of 4 will take 3 turns)
* [0,3,0] => 2 because there are two sides, but just 1 element (max capacity of leaking)
* [0,0,3,3,3,0,0,0,0] => 4 because of two sides
Idea is to do a cumulative sum to check how much needs to flow:
[100, 98, 104, 97, 105, 98, 106, 87 , 105, 98, 105, 97, 98, 101, 101]
[0, -2, 4, -3, 5, -2, 6, -13, 5, -2, 5, -3, -2, 1, 1]
[0, -2, 2, -1, 4, 2, 8, -5, 0, -2, 3, 0, -2, -1, 0] cum_sum_left (V)
[0, 0, 2, -2, 1, -4, -2, -8, 5, 0, 2, -3, 0, 2, 1] cum_sum_right (X)
But you should also take into account the maximum value of each node:
[0 ,3, 0] should be 2 cause you need 2 turns to empty one
[-1,2,-1]
[-1,1, 0]
"""
# Create an example
'''
import numpy as np
ex = [100] * 100
for _ in range(500):
i, j = np.random.randint(0,len(ex),size=2)
ex[i] -= 1
ex[j] += 1
print(ex)
'''
# Quick check
total = sum(machines)
N = len(machines)
middle = total // N
if middle * N != total:
return -1
# Maximum contiguous sub-array sum
max_surplus = max(max(m - middle, 0) for m in machines)
cum_surplus = 0
for hi in range(len(machines)):
cum_surplus += (machines[hi] - middle)
max_surplus = max(max_surplus, abs(cum_surplus))
return max_surplus
|
[
"senmenty@gmail.com"
] |
senmenty@gmail.com
|
ffcaeac98202cabcc7de9239e976408589ad2a24
|
b7bf93fc21ba8eec04d6f24e79689dd7bb989770
|
/test/functional/p2p_node_network_limited.py
|
84d7213b4cad926cabd2f4fcbe678d02664e31d6
|
[
"MIT"
] |
permissive
|
onuratakan/bitracoin
|
cd3ab809af1ce9e245b2fecf59e1421e1e427f90
|
1d73e0b6352d29066706ac4033dc7b4711246883
|
refs/heads/main
| 2023-04-09T03:10:58.075426
| 2021-04-16T22:06:06
| 2021-04-16T22:06:06
| 358,728,118
| 0
| 0
|
MIT
| 2021-04-16T22:06:07
| 2021-04-16T21:54:02
| null |
UTF-8
|
Python
| false
| false
| 5,030
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests NODE_NETWORK_LIMITED.
Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correctly
and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
from test_framework.messages import CInv, msg_getdata, msg_verack, NODE_BLOOM, NODE_NETWORK_LIMITED, NODE_WITNESS
from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, disconnect_nodes, connect_nodes_bi, sync_blocks, wait_until
class P2PIgnoreInv(P2PInterface):
firstAddrnServices = 0
def on_inv(self, message):
# The node will send us invs for other blocks. Ignore them.
pass
def on_addr(self, message):
self.firstAddrnServices = message.addrs[0].nServices
def wait_for_addr(self, timeout=5):
test_function = lambda: self.last_message.get("addr")
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(2, int(blockhash, 16)))
self.send_message(getdata_request)
class NodeNetworkLimitedTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [['-prune=550', '-addrmantest'], [], []]
def disconnect_all(self):
disconnect_nodes(self.nodes[0], 1)
disconnect_nodes(self.nodes[1], 0)
disconnect_nodes(self.nodes[2], 1)
disconnect_nodes(self.nodes[2], 0)
disconnect_nodes(self.nodes[0], 2)
disconnect_nodes(self.nodes[1], 2)
def setup_network(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
def run_test(self):
node = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
expected_services = NODE_BLOOM | NODE_WITNESS | NODE_NETWORK_LIMITED
self.log.info("Check that node has signalled expected services.")
assert_equal(node.nServices, expected_services)
self.log.info("Check that the localservices is as expected.")
assert_equal(int(self.nodes[0].getnetworkinfo()['localservices'], 16), expected_services)
self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
connect_nodes_bi(self.nodes, 0, 1)
blocks = self.nodes[1].generatetoaddress(292, self.nodes[1].get_deterministic_priv_key().address)
sync_blocks([self.nodes[0], self.nodes[1]])
self.log.info("Make sure we can max retrieve block at tip-288.")
node.send_getdata_for_block(blocks[1]) # last block in valid range
node.wait_for_block(int(blocks[1], 16), timeout=3)
self.log.info("Requesting block at height 2 (tip-289) must fail (ignored).")
node.send_getdata_for_block(blocks[0]) # first block outside of the 288+2 limit
node.wait_for_disconnect(5)
self.log.info("Check local address relay, do a fresh connection.")
self.nodes[0].disconnect_p2ps()
node1 = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
node1.send_message(msg_verack())
node1.wait_for_addr()
#must relay address with NODE_NETWORK_LIMITED
assert_equal(node1.firstAddrnServices, 1036)
self.nodes[0].disconnect_p2ps()
node1.wait_for_disconnect()
# connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
# because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible
connect_nodes_bi(self.nodes, 0, 2)
try:
sync_blocks([self.nodes[0], self.nodes[2]], timeout=5)
except:
pass
# node2 must remain at height 0
assert_equal(self.nodes[2].getblockheader(self.nodes[2].getbestblockhash())['height'], 0)
# now connect also to node 1 (non pruned)
connect_nodes_bi(self.nodes, 1, 2)
# sync must be possible
sync_blocks(self.nodes)
# disconnect all peers
self.disconnect_all()
# mine 10 blocks on node 0 (pruned node)
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
# connect node1 (non pruned) with node0 (pruned) and check if the can sync
connect_nodes_bi(self.nodes, 0, 1)
# sync must be possible, node 1 is no longer in IBD and should therefore connect to node 0 (NODE_NETWORK_LIMITED)
sync_blocks([self.nodes[0], self.nodes[1]])
if __name__ == '__main__':
NodeNetworkLimitedTest().main()
|
[
"binosaurcoin@gmail.com"
] |
binosaurcoin@gmail.com
|
84f0f8092f81b651d958dbf77cda36576cc605bd
|
605c10db2f950a506af60d57a2074f97ebcf89ab
|
/code/PROJECT/data_collection/tools/tool_clip_text.py
|
8e05a495532ed234d8027f39ebae50d4b03c5f0e
|
[] |
no_license
|
MulongXie/Research-ReverselyGeneratingWebCode
|
928f90d6b4f80ebff40a9a3a48f8b564277a0987
|
2c1598a765166f30786b0e6a22c485358ca2e98d
|
refs/heads/master
| 2020-05-17T18:14:02.241209
| 2020-04-10T00:19:16
| 2020-04-10T00:19:16
| 183,857,077
| 0
| 3
| null | 2020-02-03T04:31:34
| 2019-04-28T04:51:24
|
Python
|
UTF-8
|
Python
| false
| false
| 2,437
|
py
|
import pytesseract as pyt
import cv2
import numpy as np
import glob
from os.path import join as pjoin
ROOT_CLEAN_IMG = 'E:\Mulong\Datasets\dataset_webpage\page10000\ip_img_clean'
ROOT_OUTPUT = "E:/Mulong/Datasets/dataset_webpage/elements/text"
def clipping(img, corners, output_root=ROOT_OUTPUT, pad=False, show_clip=False, write_clip=True):
def padding():
height = np.shape(clip)[0]
width = np.shape(clip)[1]
pad_height = int(height / 10)
pad_wid = int(width / 10)
pad_img = np.full(((height + pad_height), (width + pad_wid), 3), 255, dtype=np.uint8)
pad_img[int(pad_height / 2):(int(pad_height / 2) + height), int(pad_wid / 2):(int(pad_wid / 2) + width)] = clip
return pad_img
for i, corner in enumerate(corners):
(top_left, bottom_right) = corner
(col_min, row_min) = top_left
(col_max, row_max) = bottom_right
clip = img[row_min:row_max, col_min:col_max]
if pad:
clip = padding()
if write_clip:
cv2.imwrite(pjoin(output_root, str(i) + '.png'), clip)
if show_clip:
cv2.imshow('clip', clip)
cv2.waitKey(0)
def text_detection(img_clean, show=False):
try:
data = pyt.image_to_data(img_clean).split('\n')
broad = img_clean.copy()
except:
return None
corners_word = []
for d in data[1:]:
d = d.split()
if d[-1] != '-1':
if d[-1] != '-' and d[-1] != '—' and 5 < int(d[-3]) < 40 and 5 < int(d[-4]) < 100:
t_l = (int(d[-6]), int(d[-5]))
b_r = (int(d[-6]) + int(d[-4]), int(d[-5]) + int(d[-3]))
corners_word.append((t_l, b_r))
cv2.rectangle(broad, t_l, b_r, (0, 0, 255), 1)
if show:
cv2.imshow('a', broad)
cv2.waitKey()
return corners_word
def read_img():
img_paths = glob.glob(pjoin(ROOT_CLEAN_IMG, '*.png'))
img_paths.sort(key=lambda x: int(x.split('\\')[-1][:-4]))
start_index = 5647
end_index = 20000
for img_path in img_paths:
index = img_path.split('\\')[-1][:-4]
if int(index) < start_index:
continue
if int(index) > end_index:
break
print(img_path)
img = cv2.imread(img_path)
corners_word = text_detection(img)
if corners_word is not None:
clipping(img, corners_word)
read_img()
|
[
"dsh15325@163.com"
] |
dsh15325@163.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.