blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
123d610b04b8cb8328ed1b6faa51e01b3cf9a9f5 | f62fd455e593a7ad203a5c268e23129473d968b6 | /python-cloudkittyclient-1.0.0/cloudkittyclient/common/base.py | 6212817e7747c35c8debdac13765dfa3e5a4de3b | [
"Apache-2.0"
] | permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 5,032 | py | # Copyright 2012 OpenStack Foundation
# Copyright 2015 Objectif Libre
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
import copy
from six.moves.urllib import parse
from cloudkittyclient.apiclient import base
from cloudkittyclient import exc
from cloudkittyclient.i18n import _
def getid(obj):
"""Extracts object ID.
Abstracts the common pattern of allowing both an object or an
object's ID (UUID) as a parameter when dealing with relationships.
"""
try:
return obj.id
except AttributeError:
return obj
class Manager(object):
"""Managers interact with a particular type of API.
It works with samples, meters, alarms, etc. and provide CRUD operations for
them.
"""
resource_class = None
def __init__(self, api):
self.api = api
@property
def client(self):
"""Compatible with latest oslo-incubator.apiclient code."""
return self.api
def _create(self, url, body):
body = self.api.post(url, json=body).json()
if body:
return self.resource_class(self, body)
def _list(self, url, response_key=None, obj_class=None, body=None,
expect_single=False):
resp = self.api.get(url)
if not resp.content:
raise exc.HTTPNotFound
body = resp.json()
if obj_class is None:
obj_class = self.resource_class
if response_key:
try:
data = body[response_key]
except KeyError:
return []
else:
data = body
if expect_single:
data = [data]
return [obj_class(self, res, loaded=True) for res in data if res]
def _update(self, url, item, response_key=None):
if not item.dirty_fields:
return item
item = self.api.put(url, json=item.dirty_fields).json()
# PUT requests may not return a item
if item:
return self.resource_class(self, item)
def _delete(self, url):
self.api.delete(url)
class CrudManager(base.CrudManager):
"""A CrudManager that automatically gets its base URL."""
base_url = None
def build_url(self, base_url=None, **kwargs):
base_url = base_url or self.base_url
return super(CrudManager, self).build_url(base_url, **kwargs)
def get(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._get(
self.build_url(**kwargs))
def create(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._post(
self.build_url(**kwargs), kwargs)
def update(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
params = kwargs.copy()
return self._put(
self.build_url(**kwargs), params)
def findall(self, base_url=None, **kwargs):
"""Find multiple items with attributes matching ``**kwargs``.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
rl = self._list(
'%(base_url)s%(query)s' % {
'base_url': self.build_url(base_url=base_url, **kwargs),
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
},
self.collection_key)
num = len(rl)
if num == 0:
msg = _("No %(name)s matching %(args)s.") % {
'name': self.resource_class.__name__,
'args': kwargs
}
raise exc.HTTPNotFound(msg)
return rl
class Resource(base.Resource):
"""A resource represents a particular instance of an object.
Resource might be tenant, user, etc.
This is pretty much just a bag for attributes.
:param manager: Manager object
:param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True
"""
key = None
def to_dict(self):
return copy.deepcopy(self._info)
@property
def dirty_fields(self):
out = self.to_dict()
for k, v in self._info.items():
if self.__dict__[k] != v:
out[k] = self.__dict__[k]
return out
def update(self):
try:
return self.manager.update(**self.dirty_fields)
except AttributeError:
raise exc.NotUpdatableError(self)
| [
"gongwayne@hotmail.com"
] | gongwayne@hotmail.com |
c07e6dc436df4a4558ff006c558c77a4e4786279 | 3cd9fc36f4abba93bffb11dc43f145db6c6f5408 | /azure-iot-device/azure/iot/device/common/models/x509.py | 8f5fdb82b63978aca852862ac2b91a43558562a9 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-iot-sdk-python | 457eb035e772268559ee8fa3310c210c84e52aa6 | 5d343d5904aaa98c6a88101e0dc40263acff4db2 | refs/heads/main | 2023-09-01T05:19:57.710222 | 2023-08-28T16:52:26 | 2023-08-28T16:52:26 | 70,936,068 | 441 | 438 | MIT | 2023-08-28T16:52:28 | 2016-10-14T18:17:15 | Python | UTF-8 | Python | false | false | 1,463 | py | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""This module represents a certificate that is responsible for providing client provided x509 certificates
that will eventually establish the authenticity of devices to IoTHub and Provisioning Services.
"""
class X509(object):
"""
A class with references to the certificate, key, and optional pass-phrase used to authenticate
a TLS connection using x509 certificates
"""
def __init__(self, cert_file, key_file, pass_phrase=None):
"""
Initializer for X509 Certificate
:param cert_file: The file path to contents of the certificate (or certificate chain)
used to authenticate the device.
:param key_file: The file path to the key associated with the certificate
:param pass_phrase: (optional) The pass_phrase used to encode the key file
"""
self._cert_file = cert_file
self._key_file = key_file
self._pass_phrase = pass_phrase
@property
def certificate_file(self):
return self._cert_file
@property
def key_file(self):
return self._key_file
@property
def pass_phrase(self):
return self._pass_phrase
| [
"noreply@github.com"
] | Azure.noreply@github.com |
3f9790b1a071e752f9653f18146064f75ac9a59f | 86d31b4e897555d67a7aed7302717c56c94bd538 | /0x08-python-more_classes/9-rectangle.py | 499f3b50d9e00af6c7990fd7618c17de0fc66ec3 | [] | no_license | jalondono/holbertonschool-higher_level_programming | bcefda6ea75d26cb44726fc74c396b1a1c22664d | a347138e3a214aa497c8a12dca702374dcd65f0d | refs/heads/master | 2020-07-22T22:53:32.929374 | 2020-02-13T20:10:08 | 2020-02-13T20:10:08 | 207,357,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,796 | py | #!/usr/bin/python3
class Rectangle:
"""Create a Rectangle class"""
number_of_instances = 0
print_symbol = '#'
def __init__(self, width=0, height=0):
self.width = width
self.height = height
Rectangle.number_of_instances += 1
@property
def width(self):
return self.__width
@width.setter
def width(self, value):
if not isinstance(value, int):
raise TypeError('width must be an integer')
if value < 0:
raise ValueError('width must be >= 0')
self.__width = value
@property
def height(self):
return self.__height
@height.setter
def height(self, value):
if not isinstance(value, int):
raise TypeError('height must be an integer')
if value < 0:
raise ValueError('height must be >= 0')
self.__height = value
def area(self):
"""
Return the area of a rectangle
Return:
- Area
"""
return self.__height * self.__width
def perimeter(self):
"""
Return the perimeter of a rectangle
Return:
- perimeter
"""
if self.__height == 0 or self.__width == 0:
return 0
else:
return 2 * (self.__height + self.__width)
def __str__(self):
"""
convert Python objects into strings
"""
if self.__width == 0 or self.__height == 0:
return ''
for i in range(0, self.__height):
for j in range(0, self.__width):
print(self.print_symbol, end='')
if i != self.__height - 1:
print()
return ''
def __repr__(self):
"""return the return value
would be a valid string object
"""
repr_str = self.__class__.__name__
return "{}({}, {})".format(repr_str, self.__width, self.__height)
def __del__(self):
"""
print a message when an instance is deleted
"""
print("Bye rectangle...")
Rectangle.number_of_instances -= 1
@staticmethod
def bigger_or_equal(rect_1, rect_2):
"""
static method to calculate
if anyone is bigger than other
:param rect_1:
:param rect_2:
:return:
"""
if not isinstance(rect_1, Rectangle):
raise TypeError("rect_1 must be an instance of Rectangle")
if not isinstance(rect_2, Rectangle):
raise TypeError("rect_2 must be an instance of Rectangle")
if (rect_1.height * rect_1.width) >= (rect_2.width * rect_2.height):
return rect_1
else:
return rect_2
@classmethod
def square(cls, size=0):
return Rectangle(size, size)
| [
"juanlondono151776@hotmail.com"
] | juanlondono151776@hotmail.com |
ac8be09ebd049d3bd03c35dfc7fc5a611fa2221a | 54bd004dd18f23b46fd75288823977a93d6c7c9d | /Python_basics/if_else_break.py | 21716c863210d80f7d34277f758adc8226939154 | [] | no_license | Gagangithub1988/Python | 13f914a200f6f4750c1b7da1467ca7e3f48814d0 | 8c9ba1902ac45841fd3145d49b08547420f15f2d | refs/heads/master | 2022-11-03T22:12:51.799829 | 2020-06-20T06:46:45 | 2020-06-20T06:46:45 | 273,642,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | numbers = [386, 462, 47, 418, 907, 344, 236, 375, 823, 566, 597, 978, 328, 615, 953, 345,399, 162, 758, 219, 918, 237, 412, 566, 826, 248, 866, 950, 626, 949, 687, 217, 815, 67, 104, 58, 512, 24, 892, 894, 767, 553, 81, 379, 843, 831, 445, 742, 717, 958,743, 527]
for i in numbers:
if i==237:
print(i)
break;
elif i%2==0:
print(i) | [
"noreply@github.com"
] | Gagangithub1988.noreply@github.com |
ecd12e83553b2c3605eb45cc536613b3ad79f4e8 | a450d455fc1da6f3a89eebb562cc2fb28784b129 | /games/views.py | 7edd5b3cae8a794ad0217bb7a542db339aa24716 | [
"MIT"
] | permissive | hawkthorne/bearweb | 6c62e0143ab6a19bee6cf340dfec81664f201dcb | 1533acd9c7610d9ea01e8413853cca70843b9d63 | refs/heads/master | 2021-05-28T05:54:55.533462 | 2014-03-16T23:12:01 | 2014-03-16T23:12:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,156 | py | from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.core.urlresolvers import reverse
from django.utils.cache import patch_response_headers
from django.template.defaultfilters import slugify
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.template.loader import render_to_string
from django.views.decorators.http import require_safe
from django.views.generic import DetailView, ListView
from django.views.generic.edit import CreateView, FormView, UpdateView
from django.conf import settings
from braces.views import LoginRequiredMixin
from .models import Game, Release, CrashReport
from .forms import LoveForm, GameForm, UpdateGameForm
from games import bundle
from games import tasks
def get_game(request, uuid):
game = get_object_or_404(Game, uuid=uuid)
if game.owner != request.user:
raise Http404
return game
class UUIDMixin(object):
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
class IdenticonDetail(UUIDMixin, DetailView):
model = Game
context_object_name = 'game'
def render_to_response(self, context, **kwargs):
response = HttpResponse(content_type="image/png")
image = context['game'].identicon(56)
image.save(response, 'PNG')
patch_response_headers(response, cache_timeout=31536000)
return response
class GameDetail(UUIDMixin, LoginRequiredMixin, DetailView):
model = Game
context_object_name = 'game'
def get_context_data(self, **kwargs):
context = super(GameDetail, self).get_context_data(**kwargs)
if context['game'].owner != self.request.user:
raise Http404
game = context['game']
context['KEEN_PROJECT_ID'] = settings.KEEN_PROJECT_ID
context['KEEN_READ_KEY'] = settings.KEEN_READ_KEY
context['releases'] = game.release_set.order_by('-created')[:10]
context['crash_reports'] = \
game.crashreport_set.order_by('-created')[:5]
return context
class ReportList(LoginRequiredMixin, ListView):
model = CrashReport
context_object_name = 'crash_reports'
def get_queryset(self):
self.game = get_game(self.request, self.kwargs['uuid'])
return self.game.crashreport_set.order_by('-created')
def get_context_data(self, **kwargs):
context = super(ReportList, self).get_context_data(**kwargs)
context['game'] = self.game
return context
class ReleaseList(LoginRequiredMixin, ListView):
model = Release
context_object_name = 'releases'
def get_queryset(self):
self.game = get_game(self.request, self.kwargs['uuid'])
return Release.objects.filter(game=self.game).order_by('-created')
def get_context_data(self, **kwargs):
context = super(ReleaseList, self).get_context_data(**kwargs)
context['game'] = self.game
context['show_love_version'] = True
return context
class ReleaseCreate(LoginRequiredMixin, FormView):
template_name = 'games/release_form.html'
form_class = LoveForm
def get_success_url(self):
return reverse('games:releases', kwargs={'uuid': self.kwargs['uuid']})
def get_context_data(self, **kwargs):
context = super(ReleaseCreate, self).get_context_data(**kwargs)
context['game'] = get_game(self.request, self.kwargs['uuid'])
return context
def form_valid(self, form):
game = get_game(self.request, self.kwargs['uuid'])
f = form.cleaned_data['lovefile']
version = form.cleaned_data['version']
if not game.valid_version(version):
errors = {
'invalid_version': version,
'game': game,
}
partial = render_to_string('games/upload_errors.html', errors)
return HttpResponseBadRequest(partial)
if not bundle.check_for_main(f):
errors = {'invalid_file': True}
partial = render_to_string('games/upload_errors.html', errors)
return HttpResponseBadRequest(partial)
love_version = bundle.detect_version(f) or "0.8.0"
# Get the latest release for the game, and increment the version
release = game.release_set.create(version=version,
love_version=love_version)
# FIXME: Abstract this away
f.name = "{}-original-{}.love".format(game.slug, version)
release.add_asset(f, tag='uploaded')
tasks.lovepackage.delay(release.pk)
return super(ReleaseCreate, self).form_valid(form)
class GameCreate(LoginRequiredMixin, CreateView):
model = Game
form_class = GameForm
def form_valid(self, form):
form.instance.owner = self.request.user
form.instance.slug = slugify(form.instance.name)
val = super(GameCreate, self).form_valid(form)
tasks.update_icns.delay(form.instance.pk)
return val
class GameUpdate(LoginRequiredMixin, UpdateView):
model = Game
template_name_suffix = '_update_form'
context_object_name = 'game'
form_class = UpdateGameForm
def get_success_url(self):
return reverse('games:edit', kwargs={'uuid': self.kwargs['uuid']})
def get_object(self, queryset=None):
return get_game(self.request, self.kwargs['uuid'])
def form_valid(self, form):
form.instance.slug = slugify(form.instance.name)
val = super(GameUpdate, self).form_valid(form)
tasks.update_icns.delay(form.instance.pk)
return val
@require_safe
def download(request, uuid, platform):
game = get_object_or_404(Game, uuid=uuid)
if not game.public:
raise Http404
if platform not in ['windows', 'osx', 'love']:
raise Http404
try:
release = game.latest_release()
except IndexError:
raise Http404
if platform == "windows":
url = release.windows_url()
elif platform == "osx":
url = release.osx_url()
else:
url = release.love_url()
if not url:
raise Http404
return redirect(url)
| [
"kyle@kyleconroy.com"
] | kyle@kyleconroy.com |
335250ce21813f53a167005fc2ebda740610ebd1 | d1fb76c0fdb08dc998a01e8eeca8bd8806db82a0 | /onionstudio/manual.py | b0e6ba28292f0d1c24853ba9d0a8445831e24cc1 | [
"MIT"
] | permissive | jarret/onionstudio | 822613dec0f72ef5db717ffdc8761dccb912de43 | 5ebf0a75cf1e7960822c96a987668be5ed82aa41 | refs/heads/master | 2020-12-03T08:45:44.808961 | 2020-07-18T16:49:57 | 2020-07-18T16:49:57 | 231,258,649 | 11 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py | # Copyright (c) 2020 Jarret Dyrbye
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php
import sys
from onionstudio.pixel import Pixel
class ManualToPixels:
def __init__(self, pixels_string):
self.pixels_string = pixels_string
def parse_pixels(self):
string_tokens = self.pixels_string.split("_")
if len(string_tokens) == 0:
return None, "no pixels given"
if len(string_tokens) % 3 != 0:
return None, 'could not parse "%s" as pixels' % self.pixels_string
pixels = []
for i in range(0, len(string_tokens), 3):
pixel_tokens = string_tokens[i:i+3]
try:
x = int(pixel_tokens[0])
y = int(pixel_tokens[1])
rgb = pixel_tokens[2]
pixels.append(Pixel(x, y, rgb))
except:
return None, "could not interpret %s as pixel" % pixel_tokens
return pixels, None
| [
"jarret.dyrbye@gmail.com"
] | jarret.dyrbye@gmail.com |
4855f4a63ce71ad5e80edbc3a6a41419c24b5520 | 13ea58f72fa96e2455609fb452b5f3b98e94f846 | /examples/diffusion/poisson_field_dependent_material.py | 4f2225b3311fd8d5dd012e8c40b8f44a9b27ba83 | [
"BSD-3-Clause"
] | permissive | vondrejc/sfepy | 4284ee47979b89d9e504b72b91689a9ce0c3a5ec | 8e427af699c4b2858eb096510057abb3ae7e28e8 | refs/heads/master | 2021-01-24T00:09:18.722674 | 2014-08-20T12:37:03 | 2014-08-20T14:25:56 | 12,810,199 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,370 | py | r"""
Laplace equation with a field-dependent material parameter.
Find :math:`T(t)` for :math:`t \in [0, t_{\rm final}]` such that:
.. math::
\int_{\Omega} c(T) \nabla s \cdot \nabla T
= 0
\;, \quad \forall s \;.
where :math:`c(T)` is the :math:`T` dependent diffusion coefficient.
Each iteration calculates :math:`T` and adjusts :math:`c(T)`.
"""
from sfepy import data_dir
from sfepy.base.base import output
filename_mesh = data_dir + '/meshes/3d/cylinder.mesh'
t0 = 0.0
t1 = 0.1
n_step = 11
def get_conductivity(ts, coors, problem, equations=None, mode=None, **kwargs):
"""
Calculates the conductivity as 2+10*T and returns it.
This relation results in larger T gradients where T is small.
"""
if mode == 'qp':
# T-field values in quadrature points coordinates given by integral i
# - they are the same as in `coors` argument.
T_values = problem.evaluate('ev_volume_integrate.i.Omega(T)',
mode='qp', verbose=False)
val = 2 + 10 * (T_values + 2)
output('conductivity: min:', val.min(), 'max:', val.max())
val.shape = (val.shape[0] * val.shape[1], 1, 1)
return {'val' : val}
materials = {
'coef' : 'get_conductivity',
}
fields = {
'temperature' : ('real', 1, 'Omega', 1),
}
variables = {
'T' : ('unknown field', 'temperature', 0),
's' : ('test field', 'temperature', 'T'),
}
regions = {
'Omega' : 'all',
'Gamma_Left' : ('vertices in (x < 0.00001)', 'facet'),
'Gamma_Right' : ('vertices in (x > 0.099999)', 'facet'),
}
ebcs = {
'T1' : ('Gamma_Left', {'T.0' : 2.0}),
'T2' : ('Gamma_Right', {'T.0' : -2.0}),
}
functions = {
'get_conductivity' : (get_conductivity,),
}
ics = {
'ic' : ('Omega', {'T.0' : 0.0}),
}
integrals = {
'i' : 1,
}
equations = {
'Temperature' : """dw_laplace.i.Omega( coef.val, s, T ) = 0"""
}
solvers = {
'ls' : ('ls.scipy_direct', {}),
'newton' : ('nls.newton', {
'i_max' : 1,
'eps_a' : 1e-10,
'eps_r' : 1.0,
'problem' : 'nonlinear'
}),
'ts' : ('ts.simple', {
't0' : t0,
't1' : t1,
'dt' : None,
'n_step' : n_step, # has precedence over dt!
'quasistatic' : True,
}),
}
options = {
'nls' : 'newton',
'ls' : 'ls',
'ts' : 'ts',
'save_steps' : -1,
}
| [
"cimrman3@ntc.zcu.cz"
] | cimrman3@ntc.zcu.cz |
5f746eab98d95e889f73892fc9d177b490c27480 | 5a017fc861db92e3a2919f260d54f1301afbb3e5 | /MIDI Remote Scripts/Akai_Force_MPC/scene_list.py | e178c94e8d3079a95f83763cd4dc3dedfafe3dce | [] | no_license | kera67/livepy_diff_ten | 8d8d0f3b76048f1fe5d4c0fbc02549dc922c7d5b | 12a0af9e9c57d0721af5036ce23af549df2c95f0 | refs/heads/master | 2023-07-14T18:26:33.591915 | 2020-11-19T07:50:28 | 2020-11-19T07:50:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,597 | py | from __future__ import absolute_import, print_function, unicode_literals
from itertools import izip_longest
from ableton.v2.base import listens
from ableton.v2.control_surface import Component
from .scene import MPCSceneComponent
class SceneListComponent(Component):
def __init__(self, session_ring = None, num_scenes = 0, *a, **k):
super(SceneListComponent, self).__init__(*a, **k)
assert session_ring is not None
self._session_ring = session_ring
self.__on_offsets_changed.subject = session_ring
self._scenes = [ MPCSceneComponent(parent=self, session_ring=session_ring) for _ in xrange(num_scenes) ]
self.__on_scene_list_changed.subject = self.song
self._reassign_scenes()
def set_scene_launch_buttons(self, buttons):
for scene, button in izip_longest(self._scenes, buttons or []):
scene.set_launch_button(button)
def set_scene_color_controls(self, controls):
for scene, control in izip_longest(self._scenes, controls or []):
scene.scene_color_control.set_control_element(control)
@listens(u'offset')
def __on_offsets_changed(self, *a):
if self.is_enabled():
self._reassign_scenes()
@listens(u'scenes')
def __on_scene_list_changed(self):
self._reassign_scenes()
def _reassign_scenes(self):
scenes = self.song.scenes
for index, scene in enumerate(self._scenes):
scene_index = self._session_ring.scene_offset + index
scene.set_scene(scenes[scene_index] if len(scenes) > scene_index else None)
| [
"aumhaa@gmail.com"
] | aumhaa@gmail.com |
1e40ba7fc6198751838b104388ecf529c831ea09 | 52ec517d8a990120b0e807e0c8f76056672c4ee1 | /python/hashing/internationalPhoneBookDynamicArray.py | 465c1d7268ceca51aa3b7302a0ee1413cb5baa86 | [] | no_license | macoto35/Data_Structures_Fundamentals | aaae053d270b6cd5472d755354b4b15d336c3c06 | f4cb82380f3b807889365514439b1a83afa07035 | refs/heads/master | 2020-03-20T20:41:15.437091 | 2019-08-20T07:10:27 | 2019-08-20T07:10:27 | 137,700,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,850 | py | import re
class InternationalPhoneBookDynamicArray:
def __init__(self):
self.arr = [None] * 1
self.size = 0
self.maxSize = 1
class Pair:
def __init__(self, number, name):
self.number = number
self.name = name
def _binarySearch(self, number, st, ed):
if st == ed:
if self._getInt(self.arr[st].number) < number:
return st + 1
else:
return st
mid = st + (ed - st) // 2
val = self._getInt(self.arr[mid].number)
if val == number:
return mid
elif val < number:
return self._binarySearch(number, mid + 1, ed)
else:
return self._binarySearch(number, st, mid - 1)
def _getInt(self, number):
return re.sub('\D', '', number)
def setName(self, number, name):
# resize
if self.size == self.maxSize:
newMaxSize = self.maxSize * 2
newArr = [None] * newMaxSize
for i in range(self.maxSize):
newArr[i] = self.arr[i]
self.arr = newArr
self.maxSize = newMaxSize
# append
pair = self.Pair(number, name)
if self.size == 0:
self.arr[self.size] = pair
else:
idx = self._binarySearch(self._getInt(number), 0, self.size - 1)
for i in range(self.size - 1, idx - 1):
self.arr[i + 1] = self.arr[i]
self.arr[idx] = pair
self.size += 1
def getName(self, number):
idx = self._binarySearch(self._getInt(number), 0, self.size - 1)
val = self.arr[idx]
if val is not None and self.arr[idx].number == number:
return self.arr[idx].name
else:
return None
| [
"sohee.um@mercer.com"
] | sohee.um@mercer.com |
072672d8fa7bf5988279befb5a3ba45eff6aafeb | 6a0a634265957e9dcd26bc80e3304e107fb004d0 | /venvflask/lib/python3.7/site-packages/Crypto/Cipher/AES.pyi | 159c4af2e9ea3f88a1f25e4b80981d6dce07c073 | [] | no_license | ogutiann/PythonEthereumSmartContracts | 8bd81aa14eab567d41b5dad74b67aba92a405ebd | d870e9fd1c7f68b8493db4c2b2af224f966d8e51 | refs/heads/master | 2023-01-04T14:23:12.396898 | 2020-10-29T12:12:46 | 2020-10-29T12:12:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,336 | pyi | from typing import Union, Tuple, Optional, Dict
from Crypto.Cipher._mode_ecb import EcbMode
from Crypto.Cipher._mode_cbc import CbcMode
from Crypto.Cipher._mode_cfb import CfbMode
from Crypto.Cipher._mode_ofb import OfbMode
from Crypto.Cipher._mode_ctr import CtrMode
from Crypto.Cipher._mode_openpgp import OpenPgpMode
from Crypto.Cipher._mode_ccm import CcmMode
from Crypto.Cipher._mode_eax import EaxMode
from Crypto.Cipher._mode_gcm import GcmMode
from Crypto.Cipher._mode_siv import SivMode
from Crypto.Cipher._mode_ocb import OcbMode
AESMode = int
MODE_ECB: AESMode
MODE_CBC: AESMode
MODE_CFB: AESMode
MODE_OFB: AESMode
MODE_CTR: AESMode
MODE_OPENPGP: AESMode
MODE_CCM: AESMode
MODE_EAX: AESMode
MODE_GCM: AESMode
MODE_SIV: AESMode
MODE_OCB: AESMode
Buffer = Union[bytes, bytearray, memoryview]
def new(key: Buffer,
mode: AESMode,
iv: Buffer = ...,
IV: Buffer = ...,
nonce: Buffer = ...,
segment_size: int = ...,
mac_len: int = ...,
assoc_len: int = ...,
initial_value: Union[int, Buffer] = ...,
counter: Dict = ...,
use_aesni: bool = ...) -> \
Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode,
OpenPgpMode, CcmMode, EaxMode, GcmMode,
SivMode, OcbMode]: ...
block_size: int
key_size: Tuple[int, int, int]
| [
"sijoythomas@pop-os.localdomain"
] | sijoythomas@pop-os.localdomain |
48603d6812cdc33953ca8edc661c65f26addb1f2 | dd3bbd4e7aaee7a8a5f26b927ce28ac472c855a5 | /eggs/Products.PluginRegistry-1.3b1-py2.7.egg/Products/PluginRegistry/exportimport.py | 0cf253c067eca3dc9ba6c34b21277a858a5aaa2e | [] | no_license | nacho22martin/tesis | ea0a822f8bdbdef6f13f41276ecd4d6e85427ca5 | e137eb6225cc5e724bee74a892567796166134ac | refs/heads/master | 2020-12-24T13:20:58.334839 | 2013-11-09T12:42:41 | 2013-11-09T12:42:41 | 14,261,570 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,876 | py | ##############################################################################
#
# Copyright (c) 2005 Zope Foundation and Contributors
# Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this
# distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
""" GenericSetup export / import support for PluginRegistry.
$Id: exportimport.py 110543 2010-04-06 03:23:52Z tseaver $
"""
from StringIO import StringIO
from Persistence import PersistentMapping
from zope.interface import implements
from Products.GenericSetup.interfaces import IFilesystemExporter
from Products.GenericSetup.interfaces import IFilesystemImporter
from Products.GenericSetup.content import FauxDAVRequest
from Products.GenericSetup.content import FauxDAVResponse
from Products.GenericSetup.utils import ExportConfiguratorBase
from Products.GenericSetup.utils import ImportConfiguratorBase
from Products.GenericSetup.utils import _getDottedName
from Products.GenericSetup.utils import _resolveDottedName
from Products.GenericSetup.utils import CONVERTER
from Products.GenericSetup.utils import DEFAULT
from Products.GenericSetup.utils import KEY
from Products.PageTemplates.PageTemplateFile import PageTemplateFile
from interfaces import IPluginRegistry
def _providedBy(obj, iface):
return iface.providedBy(obj)
_FILENAME = 'pluginregistry.xml'
def _getRegistry(site):
registries = [x for x in site.objectValues()
if _providedBy(x, IPluginRegistry)]
if len(registries) < 1:
raise ValueError, 'No plugin registries'
if len(registries) > 1:
raise ValueError, 'Too many plugin registries'
return registries[0]
def exportPluginRegistry(context):
""" Export plugin registry as an XML file.
o Designed for use as a GenericSetup export step.
"""
registry = _getRegistry(context.getSite())
pre = PluginRegistryExporter(registry).__of__(registry)
xml = pre.generateXML()
context.writeDataFile(_FILENAME, xml, 'text/xml')
return 'Plugin registry exported.'
def _updatePluginRegistry(registry, xml, should_purge, encoding=None):
if should_purge:
registry._plugin_types = []
registry._plugin_type_info = PersistentMapping()
registry._plugins = PersistentMapping()
# When PAS import is used in an extension profile, the plugin
# registry will have been deleted (content import deletes by
# default) but should_purge will be false; need to initialize
# _plugins since PluginRegistry's constructor doesn't
if registry._plugins is None:
registry._plugins = PersistentMapping()
pir = PluginRegistryImporter(registry, encoding)
reg_info = pir.parseXML(xml)
for info in reg_info['plugin_types']:
iface = _resolveDottedName(info['interface'])
# Avoid duplicate plugin types
if iface not in registry._plugin_types:
registry._plugin_types.append(iface)
registry._plugin_type_info[iface] = {'id': info['id'],
'title': info['title'],
'description': info['description'],
}
registry._plugins[iface] = tuple([x['id'] for x in info['plugins']])
def importPluginRegistry(context):
""" Import plugin registry from an XML file.
o Designed for use as a GenericSetup import step.
"""
registry = _getRegistry(context.getSite())
encoding = context.getEncoding()
xml = context.readDataFile(_FILENAME)
if xml is None:
return 'Site properties: Nothing to import.'
_updatePluginRegistry(registry, xml, context.shouldPurge(), encoding)
return 'Plugin registry imported.'
class PluginRegistryExporter(ExportConfiguratorBase):
def __init__(self, context, encoding=None):
ExportConfiguratorBase.__init__(self, None, encoding)
self.context = context
def _getExportTemplate(self):
return PageTemplateFile('xml/pirExport.xml', globals())
def listPluginTypes(self):
for info in self.context.listPluginTypeInfo():
iface = info['interface']
info['interface'] = _getDottedName(iface)
info['plugins'] = self.context.listPluginIds(iface)
yield info
class PluginRegistryImporter(ImportConfiguratorBase):
def __init__(self, context, encoding=None):
ImportConfiguratorBase.__init__(self, None, encoding)
self.context = context
def _getImportMapping(self):
return {
'plugin-registry':
{'plugin-type': {KEY: 'plugin_types', DEFAULT: ()},
},
'plugin-type':
{'id': {KEY: 'id'},
'interface': {KEY: 'interface'},
'title': {KEY: 'title'},
'description': {KEY: 'description'},
'plugin': {KEY: 'plugins', DEFAULT: ()}
},
'plugin':
{'id': {KEY: 'id'},
},
}
class PluginRegistryFileExportImportAdapter(object):
""" Designed for ues when exporting / importing PR's within a container.
"""
implements(IFilesystemExporter, IFilesystemImporter)
def __init__(self, context):
self.context = context
def export(self, export_context, subdir, root=False):
""" See IFilesystemExporter.
"""
context = self.context
pre = PluginRegistryExporter(context).__of__(context)
xml = pre.generateXML()
export_context.writeDataFile(_FILENAME,
xml,
'text/xml',
subdir,
)
def listExportableItems(self):
""" See IFilesystemExporter.
"""
return ()
def import_(self, import_context, subdir, root=False):
""" See IFilesystemImporter.
"""
data = import_context.readDataFile(_FILENAME, subdir)
if data is None:
import_context.note('SGAIFA',
'no pluginregistry.xml in %s' % subdir)
else:
request = FauxDAVRequest(BODY=data, BODYFILE=StringIO(data))
response = FauxDAVResponse()
_updatePluginRegistry(self.context,
data,
import_context.shouldPurge(),
import_context.getEncoding(),
)
| [
"ignacio@plone.(none)"
] | ignacio@plone.(none) |
9f0dd34ffaa2659b3d64db3c510cf958661ce188 | 6086817f6614063ac3c4ce148a3737820cca0d44 | /tests/test_python_library.py | fdcef75a8f7f509ce4f5bb381c2a2b150c4a679b | [
"Apache-2.0"
] | permissive | Tryweirder/synthtool | 0a42a5e5e60bbe6362e4e3c489a2a29b52d9b9eb | 082e1ca0863b13ada8594fe91845380765da5b70 | refs/heads/master | 2023-04-03T03:51:38.260575 | 2021-04-13T00:00:05 | 2021-04-13T00:00:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,828 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from pathlib import Path
import pytest
from synthtool import gcp
from synthtool.sources import templates
PYTHON_LIBRARY = Path(__file__).parent.parent / "synthtool/gcp/templates/python_library"
@pytest.mark.parametrize(
["template_kwargs", "expected_text"],
[
({}, ["import nox", 'session.install("-e", ".", "-c", constraints_path)']),
(
{"unit_test_local_dependencies": ["../testutils", "../unitutils"]},
[
'session.install("-e", "../testutils", "-c", constraints_path)',
'session.install("-e", "../unitutils", "-c", constraints_path)',
],
),
(
{"system_test_local_dependencies": ["../testutils", "../sysutils"]},
[
'session.install("-e", "../testutils", "-c", constraints_path)',
'session.install("-e", "../sysutils", "-c", constraints_path)',
],
),
(
{"unit_test_extras": ["abc", "def"]},
['session.install("-e", ".[abc,def]", "-c", constraints_path)'],
),
(
{"system_test_extras": ["abc", "def"]},
['session.install("-e", ".[abc,def]", "-c", constraints_path)'],
),
(
{"unit_test_extras_by_python": {"3.8": ["abc", "def"]}},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = ""',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
(
{"system_test_extras_by_python": {"3.8": ["abc", "def"]}},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = ""',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
(
{
"unit_test_extras": ["tuv", "wxyz"],
"unit_test_extras_by_python": {"3.8": ["abc", "def"]},
},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = "[tuv,wxyz]"',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
(
{
"system_test_extras": ["tuv", "wxyz"],
"system_test_extras_by_python": {"3.8": ["abc", "def"]},
},
[
'if session.python == "3.8":\n extras = "[abc,def]"',
'else:\n extras = "[tuv,wxyz]"',
'session.install("-e", f".{extras}", "-c", constraints_path)',
],
),
],
)
def test_library_noxfile(template_kwargs, expected_text):
t = templates.Templates(PYTHON_LIBRARY)
result = t.render("noxfile.py.j2", **template_kwargs,).read_text()
# Validate Python syntax.
result_code = compile(result, "noxfile.py", "exec")
assert result_code is not None
for expected in expected_text:
assert expected in result
def test_python_library():
os.chdir(Path(__file__).parent / "fixtures/python_library")
template_dir = Path(__file__).parent.parent / "synthtool/gcp/templates"
common = gcp.CommonTemplates(template_path=template_dir)
templated_files = common.py_library()
assert os.path.exists(templated_files / ".kokoro/docs/docs-presubmit.cfg")
assert os.path.exists(templated_files / ".kokoro/docker/docs/fetch_gpg_keys.sh")
def test_split_system_tests():
os.chdir(Path(__file__).parent / "fixtures/python_library")
template_dir = Path(__file__).parent.parent / "synthtool/gcp/templates"
common = gcp.CommonTemplates(template_path=template_dir)
templated_files = common.py_library(split_system_tests=True)
with open(templated_files / ".kokoro/presubmit/presubmit.cfg", "r") as f:
contents = f.read()
assert "RUN_SYSTEM_TESTS" in contents
assert "false" in contents
assert os.path.exists(templated_files / ".kokoro/presubmit/system-3.8.cfg")
with open(templated_files / ".kokoro/presubmit/system-3.8.cfg", "r") as f:
contents = f.read()
assert "system-3.8" in contents
| [
"noreply@github.com"
] | Tryweirder.noreply@github.com |
271210fe7b439920554148561f2c82e2d4a01235 | 3bde5908bc285abb545be0f8dc7fe698fed908ba | /Dag1/live_koding/variabler.py | 90b1202e01df2749bb8d0c9f2e246068706bd688 | [] | no_license | kodeskolen/inspiria_h21 | cdac18e9ea45845b2f5c0d820f17f7c68e2cb222 | bd7070add6675754962632f58b01bcce8b847fdc | refs/heads/main | 2023-07-13T01:56:01.118767 | 2021-08-20T07:00:51 | 2021-08-20T07:00:51 | 371,284,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 18 07:29:44 2021
@author: Marie
"""
navn = "Marie"
print("Hei", navn)
x = 5
y = 2
resultat = x % y
print(resultat) | [
"roald.marie@gmail.com"
] | roald.marie@gmail.com |
5395949eb11a74f63a417d29d597799633103517 | 91f30c829664ff409177e83776c9f4e2e98d9fc4 | /manage.py | 90058a6e64433bcd765b52dfcbb015079c97e687 | [] | no_license | TotalityHacks/madras | 3ac92dc6caf989efcb02590f6474ab333d1f93fa | 2395a703eed1a87cca3cdd6c0fb9162b69e8df27 | refs/heads/master | 2021-08-17T15:29:41.055074 | 2018-07-18T23:05:29 | 2018-07-18T23:05:29 | 105,232,414 | 4 | 5 | null | 2021-03-31T18:58:56 | 2017-09-29T05:13:41 | Python | UTF-8 | Python | false | false | 249 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madras.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"jreinstra@gmail.com"
] | jreinstra@gmail.com |
14da2992936439bfeab57f9548a5e32508e2378d | ea1ece02e9ff9981d7a28ef1944d2dbf06110e93 | /16_find_compact/combine_several_post.py | 438816000c33195429fa8b486386a0cab0d369e3 | [] | no_license | Spritea/Hotkey | 377bf5f820b076bb21cb21ce5655e10a805b9a82 | 14df02daabc26195f8f3969e27c68bc62791f4c3 | refs/heads/master | 2022-08-27T21:11:14.597257 | 2020-05-20T15:48:07 | 2020-05-20T15:48:07 | 178,776,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,884 | py | import cv2 as cv
import numpy as np
from pathlib import Path
import natsort
from PIL import Image
from tqdm import tqdm
#包括边边也拼起来
def combine_one(imgs_list, img_path, imgwidth, imgheight):
im = Image.fromarray(imgs_list[0])
width, height = im.size
row_res = imgheight % height
col_res = imgwidth % width
img_row = int(imgheight / height) if row_res == 0 else int(imgheight / height) + 1
# every row in big image contains img_row images
img_col = int(imgwidth / width) if col_res == 0 else int(imgwidth / width) + 1
blank = Image.new("RGB", (imgwidth, imgheight))
for k in range(img_row):
for j in range(img_col):
p = Image.fromarray(imgs_list[j + k * img_col])
if j + 1 == img_col and k + 1 < img_row and col_res > 0:
box = (width - col_res, 0, width, height)
p = p.crop(box)
elif j + 1 < img_col and k + 1 == img_row and row_res > 0:
box = (0, height - row_res, width, height)
p = p.crop(box)
elif j + 1 == img_col and k + 1 == img_row and col_res > 0 and row_res > 0:
box = (width - col_res, height - row_res, width, height)
p = p.crop(box)
blank.paste(p, (width * j, height * k))
if Path(out_path_prefix).is_dir():
pass
else:
print("Out path is empty!!")
exit(0)
out_path = out_path_prefix + "\\" + img_path
blank.save(out_path)
# Postdam train18
# id_list=['2_12','2_13','2_14','3_12','3_13','3_14','4_12','4_13','4_14','4_15',
# '5_12','5_13','5_14','5_15','6_12','6_13','6_14','6_15','7_12','7_13']
# Postdam train24-benchmark
id_list=['2_13','2_14','3_13','3_14','4_13','4_14','4_15',
'5_13','5_14','5_15','6_13','6_14','6_15','7_13']
IMG_Path = Path("E:\code\hotkey\\17_post_proc\Postdam\\from-pytorch-train\\train24-val14\mv3_1_true_2_res50_data15\pred")
Large_Path = Path("E:\code\hotkey\\17_post_proc\Postdam\\from-pytorch-train\\train24-val14\\val_gt_full")
Large_File = natsort.natsorted(list(Large_Path.glob("*.tif")), alg=natsort.PATH)
Large_Str = []
for j in Large_File:
Large_Str.append(str(j))
for k in tqdm(range(len(id_list))):
glob_target='*potsdam_'+id_list[k]+'_*.png'
IMG_File = natsort.natsorted(list(IMG_Path.glob(glob_target)), alg=natsort.PATH)
IMG_Str = []
for i in IMG_File:
IMG_Str.append(str(i))
pic_small=[]
for j in range(0,len(IMG_Str)):
pic_small.append(cv.cvtColor(cv.imread(IMG_Str[j], cv.IMREAD_COLOR), cv.COLOR_BGR2RGB))
large_img=cv.imread(Large_Str[k])
height,width,_=large_img.shape
out_path_prefix = "E:\code\hotkey\\17_post_proc\Postdam\\from-pytorch-train\\train24-val14\mv3_1_true_2_res50_data15\ceshi"
out_name='potsdam_'+id_list[k]+'_pred.png'
combine_one(pic_small,out_name,width,height)
| [
"461158649@qq.com"
] | 461158649@qq.com |
4e7326d8f0782d2450e71723a4e875d33608d65f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/429/usersdata/314/103216/submittedfiles/jogoDaVelha_BIB.py | b06d282cfbb729c7fadf5f046a39ffe6c78fd471 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | # -*- coding: utf-8 -*-
# Minha bib
def solicitaSimboloDoHumano ():
simboloJogador = input('Qual simbolo deseja ultilizar no jogo? ')
while (simboloJogador != 'O') and (simboloJogador != 'X'):
simboloJogador = input('Qual simbolo deseja ultilizar no jogo? ')
return simboloJogador
def mostraTabuleiro(matriz):
print(matriz[0][0] + ' | ' + matriz[0][1] + ' | ' + matriz[0][2])
print(matriz[1][0] + ' | ' + matriz[1][1] + ' | ' + matriz[1][2])
print(matriz[2][0] + ' | ' + matriz[2][1] + ' | ' + matriz[2][2])
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
6bfd57cf8429b4aba8742d9762d3b57922129d71 | c7979f4f6435fe8d0d07fff7a430da55e3592aed | /ABC015/D.py | 3bfd86c65ff0d9c6196a1bb441bc21f026524029 | [] | no_license | banboooo044/AtCoder | cee87d40bb98abafde19017f4f4e2f984544b9f8 | 7541d521cf0da848ecb5eb10ffea7d75a44cbbb6 | refs/heads/master | 2020-04-14T11:35:24.977457 | 2019-09-17T03:20:27 | 2019-09-17T03:20:27 | 163,818,272 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | #高橋くんの苦悩
#動的計画法
# dp[i][width] := (maxValue,useNum)
if __name__ == "__main__":
INF = float('inf')
W = int(input())
N,K = map(int,input().split(" "))
dp = [[0]*(W+1) for _ in range(K+1)]
A = [0] * N
B = [0] * N
for i in range(N):
a , b= map(int,input().split(" "))
A[i] = a
B[i] = b
past_max = 0
for i in range(N,0,-1):
for useNum in range(min([N-i+1,K]),0,-1):
for width in range(W+1):
if width - A[i-1] >= 0 and useNum <= K:
dp[useNum][width] = max([dp[useNum-1][width - A[i-1]] + B[i-1],dp[useNum][width]])
past_max = max([past_max,dp[useNum][width]])
print(past_max)
| [
"touhoucrisis7@gmail.com"
] | touhoucrisis7@gmail.com |
515b90f6751473f9ac482175b57299289a9c2859 | 2b3365bf01a910436edaf1448f182549ffd44525 | /_12_wikipedia.py | 6ca92cc80335bc89912f23a7a4b58cc5477fcc0a | [] | no_license | wlgud0402/croller | 2f1cb93f969d87890d2fb35992833ce3070c95dc | 56339880081042a25b8057caaca67f64a1f20df5 | refs/heads/master | 2021-05-20T23:29:57.503376 | 2020-04-29T06:41:06 | 2020-04-29T06:41:06 | 252,453,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | #위키백과 수집과 출력이 가능한 프로그램
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
pages = set()
def getLinks(pageUrl):
global pages
html = urlopen('http://en.wikipedia.org' + pageUrl)
bs = BeautifulSoup(html, 'html.parser')
try:
print(bs.h1.get_text()) #태그없이 안의 텍스트만 출력해주는 함수
print(bs.find(id = 'mw-content-text').findAll('p')[0])
print(bs.find(id = 'ca-edit').find('span').find('a').attrs['href'])
except AttributeError:
print('This page is missing something! No worries though!')
for link in bs.findAll('a', href = re.compile('^(/wiki/)')):
if 'href' in link.attrs:
if link.attrs['href'] not in pages:
newPage = link.attrs['href']
print('-----------\n' + newPage)
pages.add(newPage)
getLinks(newPage)
getLinks('') | [
"wlgudrlgus@naver.com"
] | wlgudrlgus@naver.com |
786752a2164cf9e296256aef2ffb9e724b2159c2 | 6ead0d3997aa3470fc6f49c6ccc0ac8f808ae5d7 | /problems/python/tests/test_topKFrequent.py | ff3276cafa50cdee80f65c10f1b42103e8ab649e | [] | no_license | ikedaosushi/leetcode | d405455bfffda3057259da78783901feb56d9f76 | d378f2dc5f0b2df1f00208e304979ac0f53ab385 | refs/heads/master | 2021-06-24T04:31:56.586685 | 2020-12-08T13:51:18 | 2020-12-08T13:51:18 | 178,659,078 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | import pytest
from topKFrequent import Solution
@pytest.mark.parametrize("nums, k, expected", [
([1, 1, 1, 2, 2, 3], 2, [1, 2]),
([1], 1, [1]),
])
def test_topKFrequent(nums, k, expected):
actual = Solution().topKFrequent(nums, k)
assert actual == expected
| [
"yutaro.ikeda@kaizenplatform.com"
] | yutaro.ikeda@kaizenplatform.com |
a4ef4b4d7cd9ed4952e319fab4b7a38c79db702c | c7cbbd4b1c1e281cef5f4a0c4e3d4a97cee2241e | /froide/document/models.py | 5a8eb1e966fd65554b6679d2fd3c8159cf5c00d1 | [
"MIT"
] | permissive | manonthemat/froide | 078cf78a6eb35226512c0bdfa2ac9043bcc81ad9 | 698c49935eaf2e922f3c9f6a46af0fd545ccbbbb | refs/heads/master | 2020-08-14T08:19:36.215473 | 2019-10-14T19:43:16 | 2019-10-14T19:43:16 | 215,129,869 | 0 | 0 | MIT | 2019-10-14T19:35:49 | 2019-10-14T19:35:49 | null | UTF-8 | Python | false | false | 2,203 | py | from django.db import models
from filingcabinet.models import (
AbstractDocument,
AbstractDocumentCollection,
get_page_image_filename
)
class Document(AbstractDocument):
original = models.ForeignKey(
'foirequest.FoiAttachment', null=True, blank=True,
on_delete=models.SET_NULL, related_name='original_document'
)
foirequest = models.ForeignKey(
'foirequest.FoiRequest', null=True, blank=True,
on_delete=models.SET_NULL
)
publicbody = models.ForeignKey(
'publicbody.PublicBody', null=True, blank=True,
on_delete=models.SET_NULL
)
team = models.ForeignKey(
'team.Team', null=True, blank=True,
on_delete=models.SET_NULL
)
def is_public(self):
return self.public
def get_serializer_class(self, detail=False):
from .api_views import DocumentSerializer, DocumentDetailSerializer
if detail:
return DocumentDetailSerializer
return DocumentSerializer
def get_crossdomain_auth(self, filename=None):
from .auth import DocumentCrossDomainMediaAuth
if filename is None:
filename = self.get_document_filename()
return DocumentCrossDomainMediaAuth({
'object': self,
'filename': filename
})
def get_authorized_file_url(self, filename=None):
if self.public:
return self.get_file_url(filename=filename)
return self.get_crossdomain_auth(filename=filename).get_full_media_url(
authorized=True
)
def get_page_template(self):
return self.get_authorized_file_url(filename=get_page_image_filename())
def get_cover_image(self):
return self.get_authorized_file_url(filename=get_page_image_filename(
page=1, size='small'
))
class DocumentCollection(AbstractDocumentCollection):
team = models.ForeignKey(
'team.Team', null=True, blank=True,
on_delete=models.SET_NULL
)
def is_public(self):
return self.public
def get_serializer_class(self):
from .api_views import DocumentCollectionSerializer
return DocumentCollectionSerializer
| [
"mail@stefanwehrmeyer.com"
] | mail@stefanwehrmeyer.com |
ce3dcdb6d1d8778e7ef14a5fadca90b5f9c643d5 | 27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f | /plugins/cliconf/dellos9.py | ea36ef5694e8fa93800086746101687f43395068 | [] | no_license | coll-test/notstdlib.moveitallout | eb33a560070bbded5032385d0aea2f3cf60e690b | 0987f099b783c6cf977db9233e1c3d9efcbcb3c7 | refs/heads/master | 2020-12-19T22:28:33.369557 | 2020-01-23T18:51:26 | 2020-01-23T18:51:26 | 235,865,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,293 | py | #
# (c) 2017 Red Hat Inc.
#
# (c) 2017 Dell EMC.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
---
cliconf: dellos9
short_description: Use dellos9 cliconf to run command on Dell OS9 platform
description:
- This dellos9 plugin provides low level abstraction apis for
sending and receiving CLI commands from Dell OS9 network devices.
'''
import re
import json
from itertools import chain
from ansible.errors import AnsibleConnectionFailure
from ansible_collections.notstdlib.moveitallout.plugins.module_utils._text import to_bytes, to_text
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.common._collections_compat import Mapping
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase, enable_mode
class Cliconf(CliconfBase):
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'dellos9'
reply = self.get('show version')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'Software Version (\S+)', data)
if match:
device_info['network_os_version'] = match.group(1)
match = re.search(r'System Type (\S+)', data, re.M)
if match:
device_info['network_os_model'] = match.group(1)
reply = self.get('show running-config | grep hostname')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'^hostname (.+)', data, re.M)
if match:
device_info['network_os_hostname'] = match.group(1)
return device_info
@enable_mode
def get_config(self, source='running', format='text', flags=None):
if source not in ('running', 'startup'):
return self.invalid_params("fetching configuration from %s is not supported" % source)
# if source == 'running':
# cmd = 'show running-config all'
else:
cmd = 'show startup-config'
return self.send_command(cmd)
@enable_mode
def edit_config(self, command):
for cmd in chain(['configure terminal'], to_list(command), ['end']):
self.send_command(cmd)
def get(self, command, prompt=None, answer=None, sendonly=False, newline=True, check_all=False):
return self.send_command(command=command, prompt=prompt, answer=answer, sendonly=sendonly, newline=newline, check_all=check_all)
def get_capabilities(self):
result = super(Cliconf, self).get_capabilities()
return json.dumps(result)
def run_commands(self, commands=None, check_rc=True):
if commands is None:
raise ValueError("'commands' value is required")
responses = list()
for cmd in to_list(commands):
if not isinstance(cmd, Mapping):
cmd = {'command': cmd}
output = cmd.pop('output', None)
if output:
raise ValueError("'output' value %s is not supported for run_commands" % output)
try:
out = self.send_command(**cmd)
except AnsibleConnectionFailure as e:
if check_rc:
raise
out = getattr(e, 'err', to_text(e))
responses.append(out)
return responses
def set_cli_prompt_context(self):
"""
Make sure we are in the operational cli mode
:return: None
"""
if self._connection.connected:
self._update_cli_prompt_context(config_context=')#')
| [
"wk@sydorenko.org.ua"
] | wk@sydorenko.org.ua |
82324002112c69c793b3a471952ab9a99b8d73c8 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/timmahrt_pyAcoustics/pyAcoustics-master/pyacoustics/speech_rate/dictionary_estimate.py | 3f9a3d8036bf21e8df109bb47e85b80b53cafc9f | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 3,916 | py | '''
Created on Jan 28, 2015
@author: tmahrt
'''
import os
from os.path import join
from pyacoustics.utilities import utils
from pysle import isletool
def percentInside(startTime, endTime, cmprStartTime, cmprEndTime):
if (float(startTime) <= float(cmprEndTime) and
float(endTime) >= float(cmprStartTime)):
leftEdge = cmprStartTime - startTime
rightEdge = endTime - cmprEndTime
if leftEdge < 0:
leftEdge = 0
if rightEdge < 0:
rightEdge = 0
retVal = 1 - ((rightEdge + leftEdge)) / (endTime - startTime)
# No overlap
else:
retVal = 0
return retVal
def manualPhoneCount(tgInfoPath, isleFN, outputPath, skipList=None):
if skipList is None:
skipList = []
utils.makeDir(outputPath)
isleDict = isletool.LexicalTool(isleFN)
existFNList = utils.findFiles(outputPath, filterPaths=".txt")
for fn in utils.findFiles(tgInfoPath, filterExt=".txt",
skipIfNameInList=existFNList):
if os.path.exists(join(outputPath, fn)):
continue
print(fn)
dataList = utils.openCSV(tgInfoPath, fn)
dataList = [row[2] for row in dataList] # start, stop, tmpLabel
outputList = []
for tmpLabel in dataList:
if tmpLabel not in skipList:
syllableCount, phoneCount = isletool.getNumPhones(isleDict,
tmpLabel,
maxFlag=True)
else:
syllableCount, phoneCount = 0, 0
outputList.append("%d,%d" % (syllableCount, phoneCount))
outputTxt = "\n".join(outputList)
with open(join(outputPath, fn), "w") as fd:
fd.write(outputTxt)
def manualPhoneCountForEpochs(manualCountsPath, tgInfoPath, epochPath,
outputPath):
utils.makeDir(outputPath)
skipList = utils.findFiles(outputPath, filterExt=".txt")
for fn in utils.findFiles(tgInfoPath, filterExt=".txt",
skipIfNameInList=skipList):
epochList = utils.openCSV(epochPath, fn)
tgInfo = utils.openCSV(tgInfoPath, fn)
manualCounts = utils.openCSV(manualCountsPath, fn)
epochOutputList = []
for epochTuple in epochList: # Epoch num, start, stop
epochStart, epochStop = float(epochTuple[1]), float(epochTuple[2])
# Find all of the intervals that are at least partially
# contained within the current epoch
epochSyllableCount = 0
epochPhoneCount = 0
speechDuration = 0
for info, counts in utils.safeZip([tgInfo, manualCounts],
enforceLength=True):
start, stop = float(info[0]), float(info[1])
syllableCount, phoneCount = float(counts[0]), float(counts[1])
# Accounts for intervals that straddle an epoch boundary
multiplicationFactor = percentInside(start, stop,
epochStart, epochStop)
speechDuration += (stop - start) * multiplicationFactor
epochSyllableCount += syllableCount * multiplicationFactor
epochPhoneCount += phoneCount * multiplicationFactor
epochOutputList.append("%f,%f,%f" % (epochSyllableCount,
epochPhoneCount,
speechDuration))
with open(join(outputPath, fn), "w") as fd:
fd.write("\n".join(epochOutputList))
| [
"659338505@qq.com"
] | 659338505@qq.com |
64d3e39bc45f23a960c5d92793d0f167df8476e7 | b366806c99ac30e77789f80417978902e25628da | /boto3_exceptions/workmail.py | ef7317ccd08714c962e809adfcdbf095c1236e52 | [
"MIT"
] | permissive | siteshen/boto3_exceptions | 9027b38c238030859572afec7f96323171596eb7 | d6174c2577c9d4b17a09a89cd0e4bd1fe555b26b | refs/heads/master | 2020-04-19T03:15:02.525468 | 2019-10-23T07:37:36 | 2019-10-23T07:37:36 | 167,928,540 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,172 | py | import boto3
exceptions = boto3.client('workmail').exceptions
DirectoryServiceAuthenticationFailedException = exceptions.DirectoryServiceAuthenticationFailedException
DirectoryUnavailableException = exceptions.DirectoryUnavailableException
EmailAddressInUseException = exceptions.EmailAddressInUseException
EntityAlreadyRegisteredException = exceptions.EntityAlreadyRegisteredException
EntityNotFoundException = exceptions.EntityNotFoundException
EntityStateException = exceptions.EntityStateException
InvalidConfigurationException = exceptions.InvalidConfigurationException
InvalidParameterException = exceptions.InvalidParameterException
InvalidPasswordException = exceptions.InvalidPasswordException
MailDomainNotFoundException = exceptions.MailDomainNotFoundException
MailDomainStateException = exceptions.MailDomainStateException
NameAvailabilityException = exceptions.NameAvailabilityException
OrganizationNotFoundException = exceptions.OrganizationNotFoundException
OrganizationStateException = exceptions.OrganizationStateException
ReservedNameException = exceptions.ReservedNameException
UnsupportedOperationException = exceptions.UnsupportedOperationException
| [
"xiaojiang@actwill.com.cn"
] | xiaojiang@actwill.com.cn |
270085783dfae395dfed329ee1efc4d371fe5491 | bccd16717d20d673cb514d6ac68e624c2c4dae88 | /sdk/python/pulumi_gcp/endpoints/outputs.py | bc1adc66e71939c4a0506decc6499354e78b3c93 | [
"MPL-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | permissive | dimpu47/pulumi-gcp | e78d228f7c2c929ad3e191331b75c6e4c4cc4fa9 | 38355de300a5768e11c49d344a8165ba0735deed | refs/heads/master | 2023-07-07T13:00:15.682157 | 2020-09-23T18:43:11 | 2020-09-23T18:43:11 | 173,437,663 | 0 | 0 | Apache-2.0 | 2023-07-07T01:05:58 | 2019-03-02T11:06:19 | Go | UTF-8 | Python | false | false | 5,387 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'ServiceApi',
'ServiceApiMethod',
'ServiceEndpoint',
'ServiceIamBindingCondition',
'ServiceIamMemberCondition',
]
@pulumi.output_type
class ServiceApi(dict):
def __init__(__self__, *,
methods: Optional[List['outputs.ServiceApiMethod']] = None,
name: Optional[str] = None,
syntax: Optional[str] = None,
version: Optional[str] = None):
if methods is not None:
pulumi.set(__self__, "methods", methods)
if name is not None:
pulumi.set(__self__, "name", name)
if syntax is not None:
pulumi.set(__self__, "syntax", syntax)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def methods(self) -> Optional[List['outputs.ServiceApiMethod']]:
return pulumi.get(self, "methods")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter
def syntax(self) -> Optional[str]:
return pulumi.get(self, "syntax")
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceApiMethod(dict):
def __init__(__self__, *,
name: Optional[str] = None,
request_type: Optional[str] = None,
response_type: Optional[str] = None,
syntax: Optional[str] = None):
if name is not None:
pulumi.set(__self__, "name", name)
if request_type is not None:
pulumi.set(__self__, "request_type", request_type)
if response_type is not None:
pulumi.set(__self__, "response_type", response_type)
if syntax is not None:
pulumi.set(__self__, "syntax", syntax)
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requestType")
def request_type(self) -> Optional[str]:
return pulumi.get(self, "request_type")
@property
@pulumi.getter(name="responseType")
def response_type(self) -> Optional[str]:
return pulumi.get(self, "response_type")
@property
@pulumi.getter
def syntax(self) -> Optional[str]:
return pulumi.get(self, "syntax")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceEndpoint(dict):
def __init__(__self__, *,
address: Optional[str] = None,
name: Optional[str] = None):
if address is not None:
pulumi.set(__self__, "address", address)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def address(self) -> Optional[str]:
return pulumi.get(self, "address")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceIamBindingCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceIamMemberCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| [
"public@paulstack.co.uk"
] | public@paulstack.co.uk |
d8327f24f64ffc940857d0a3ef0f3bfcf4d1ceb2 | 8ca19f1a31070738b376c0370c4bebf6b7efcb43 | /office365/communications/operations/update_recording_status.py | 562e461d09952648e16c8272843d786b126ccdf4 | [
"MIT"
] | permissive | vgrem/Office365-REST-Python-Client | 2ef153d737c6ed5445ba1e446aeaec39c4ef4ed3 | cbd245d1af8d69e013c469cfc2a9851f51c91417 | refs/heads/master | 2023-09-02T14:20:40.109462 | 2023-08-31T19:14:05 | 2023-08-31T19:14:05 | 51,305,798 | 1,006 | 326 | MIT | 2023-08-28T05:38:02 | 2016-02-08T15:24:51 | Python | UTF-8 | Python | false | false | 203 | py | from office365.communications.operations.comms import CommsOperation
class UpdateRecordingStatusOperation(CommsOperation):
"""Describes the response format of an update recording status action."""
| [
"vvgrem@gmail.com"
] | vvgrem@gmail.com |
9383aacff23eec6d52cef01ce4e047d49be74540 | 2ea5efd9ccc926e368e7132d7709f48265dae5f3 | /Codes/02_Input.py | 6290817714e68706d9ccddf2c6420490316a7f6a | [] | no_license | ravi4all/PythonWE_Morning_2020 | a843f3fde886b0be42d5f935daecada17cf4ff1c | 87d4f84ef8142ac2c7e08d70a9fab359da08287b | refs/heads/master | 2020-12-14T19:26:37.538793 | 2020-03-21T08:00:51 | 2020-03-21T08:00:51 | 234,846,451 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | # by default python takes input in strung format
name = input("Enter your name : ")
print("Hello "+name)
# we need to type cast the input()
num_1 = int(input("Enter first number : "))
num_2 = int(input("Enter second number : "))
result = num_1 + num_2
print("Sum is",result)
# Multiline Print
print("""
1. Add
2. Sub
3. Mul
4. Div
""")
| [
"noreply@github.com"
] | ravi4all.noreply@github.com |
b6ce66b2673cdd275ddff44c827d484ceec0c153 | 4fe0ed5e592641b272aa2167ae591155a9cad416 | /pyqtgraph_ex/data_tree.py | 929a705c3d51eebc6dc80cf3b7130a49086f8e52 | [] | no_license | AlexandreMarcotte/test_code | cf715caee730cfdafa7cf97bd011ac15443872f3 | 07e115055befd55d4598dd8a4b33bbdd00ba6f5a | refs/heads/master | 2021-06-07T05:06:12.085390 | 2019-05-06T23:45:38 | 2019-05-06T23:45:38 | 137,810,297 | 0 | 0 | null | 2021-06-01T23:44:40 | 2018-06-18T21:50:39 | Python | UTF-8 | Python | false | false | 722 | py | # -*- coding: utf-8 -*-
"""
Simple use of DataTreeWidget to display a structure of nested dicts, lists, and arrays
"""
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
import numpy as np
app = QtGui.QApplication([])
d = {
'list1': [1,2,3,4,5,6, {'nested1': 'aaaaa', 'nested2': 'bbbbb'}, "seven"],
'dict1': {
'x': 1,
'y': 2,
'z': 'three'
},
'array1 (20x20)': np.ones((10,10))
}
tree = pg.DataTreeWidget(data=d)
tree.show()
tree.setWindowTitle('pyqtgraph example: DataTreeWidget')
tree.resize(600,600)
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_() | [
"alexandre.marcotte.1094@gmail.com"
] | alexandre.marcotte.1094@gmail.com |
9979efac15eedc30be1b3f32a462d0c9599d248b | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/eventgrid/get_domain_topic.py | cbdaf3633482a6ab8964a94e6381f42ff672dead | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 4,056 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetDomainTopicResult',
'AwaitableGetDomainTopicResult',
'get_domain_topic',
]
@pulumi.output_type
class GetDomainTopicResult:
"""
Domain Topic.
"""
def __init__(__self__, id=None, name=None, provisioning_state=None, system_data=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified identifier of the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the domain topic.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system metadata relating to Domain Topic resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetDomainTopicResult(GetDomainTopicResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDomainTopicResult(
id=self.id,
name=self.name,
provisioning_state=self.provisioning_state,
system_data=self.system_data,
type=self.type)
def get_domain_topic(domain_name: Optional[str] = None,
domain_topic_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDomainTopicResult:
"""
Domain Topic.
API Version: 2020-06-01.
:param str domain_name: Name of the domain.
:param str domain_topic_name: Name of the topic.
:param str resource_group_name: The name of the resource group within the user's subscription.
"""
__args__ = dict()
__args__['domainName'] = domain_name
__args__['domainTopicName'] = domain_topic_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:eventgrid:getDomainTopic', __args__, opts=opts, typ=GetDomainTopicResult).value
return AwaitableGetDomainTopicResult(
id=__ret__.id,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
system_data=__ret__.system_data,
type=__ret__.type)
| [
"noreply@github.com"
] | morrell.noreply@github.com |
2933cb872811c4d984c9ae6d1e0c0d305ec26d27 | fc0eda8560a26c88b790d236070ed0559d0dc4a4 | /leetcode/basicDS06_tree/b04_lc105_build_tree.py | 72aa63933b0ddc9b7bc22fb7b3cf578bb94f79e5 | [] | no_license | pankypan/DataStructureAndAlgo | b4bd417a16cdb594bbed2ca0220dbd63eb60f3c1 | 6c5d40d57d378994236549f8dea906c75121eadf | refs/heads/master | 2021-08-03T01:22:08.442709 | 2021-07-19T14:56:44 | 2021-07-19T14:56:44 | 279,599,190 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | # https://leetcode-cn.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/
from typing import List
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def buildTree(self, preorder: List[int], inorder: List[int]) -> TreeNode:
# base case
if len(preorder) <= 1:
return TreeNode(preorder[0]) if len(preorder) == 1 else None
root_val = preorder[0]
root = TreeNode(root_val)
root.left = self.buildTree(preorder[1: inorder.index(root_val) + 1], inorder[0: inorder.index(root_val)])
root.right = self.buildTree(preorder[inorder.index(root_val) + 1:], inorder[inorder.index(root_val) + 1:])
return root
if __name__ == '__main__':
pass
| [
"1356523334@qq.com"
] | 1356523334@qq.com |
84049e1555a83e8bb2650ff6e600098232396841 | 271812d493b7e23550cbc17490d48a882c3179b1 | /brubeck/timekeeping.py | 8ba9e2e3df86fff907271bcd9a853c743de493b3 | [] | no_license | droot/brubeck | 69a5d86fc996ba367bda8d2bc9eb1075a36701fd | 35a44d62d590a5108eb4d5ba2f9f15f780cb99ce | refs/heads/master | 2021-01-16T22:49:46.263829 | 2011-09-06T14:18:49 | 2011-09-06T14:18:49 | 2,334,831 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,181 | py | import time
from datetime import datetime
from dateutil.parser import parse
from dictshield.fields import LongField
###
### Main Time Function
###
def curtime():
"""This funciton is the central method for getting the current time. It
represents the time in milliseconds and the timezone is UTC.
"""
return long(time.time() * 1000)
###
### Converstion Helpers
###
def datestring_to_millis(ds):
"""Takes a string representing the date and converts it to milliseconds
since epoch.
"""
dt = parse(ds)
return datetime_to_millis(dt)
def datetime_to_millis(dt):
"""Takes a datetime instances and converts it to milliseconds since epoch.
"""
seconds = dt.timetuple()
seconds_from_epoch = time.mktime(seconds)
return seconds_from_epoch * 1000 # milliseconds
def millis_to_datetime(ms):
"""Converts milliseconds into it's datetime equivalent
"""
seconds = ms / 1000.0
return datetime.fromtimestamp(seconds)
###
### Neckbeard date parsing (fuzzy!)
###
def prettydate(d):
"""I <3 U, StackOverflow.
http://stackoverflow.com/questions/410221/natural-relative-days-in-python
"""
diff = datetime.utcnow() - d
s = diff.seconds
if diff.days > 7 or diff.days < 0:
return d.strftime('%d %b %y')
elif diff.days == 1:
return '1 day ago'
elif diff.days > 1:
return '{} days ago'.format(diff.days)
elif s <= 1:
return 'just now'
elif s < 60:
return '{} seconds ago'.format(s)
elif s < 120:
return '1 minute ago'
elif s < 3600:
return '{} minutes ago'.format(s/60)
elif s < 7200:
return '1 hour ago'
else:
return '{} hours ago'.format(s/3600)
###
### Custom DictShield Field
###
class MillisecondField(LongField):
"""High precision time field.
"""
def __set__(self, instance, value):
"""__set__ is overriden to allow accepting date strings as input.
dateutil is used to parse strings into milliseconds.
"""
if isinstance(value, (str, unicode)):
value = datestring_to_millis(value)
instance._data[self.field_name] = value
| [
"jd@j2labs.net"
] | jd@j2labs.net |
4e3e1bd04cab8cfc321de73eaf5575d9c21fcb92 | 6b4f38370ce1126a7f74e13c2012ab238a01df93 | /azure-mgmt-sql/azure/mgmt/sql/operations/recoverable_databases_operations.py | d6bf1308fcf3724fd6009628fc5a06a4b35b5127 | [
"MIT"
] | permissive | action/azure-sdk-for-python | 52d8a278bfb2fbc9c7e11297e3bd21c604f906b1 | f06553e45451f065c87ee9ed503ac4be81e64a71 | refs/heads/master | 2020-12-03T02:13:52.566291 | 2017-06-30T18:42:49 | 2017-06-30T18:42:49 | 95,917,797 | 1 | 0 | null | 2017-06-30T19:25:58 | 2017-06-30T19:25:58 | null | UTF-8 | Python | false | false | 7,933 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class RecoverableDatabasesOperations(object):
"""RecoverableDatabasesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version to use for the request. Constant value: "2014-04-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2014-04-01"
self.config = config
def get(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a Recoverable Database, which is a resource representing a
database's Geo backup.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RecoverableDatabase
<azure.mgmt.sql.models.RecoverableDatabase>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/recoverableDatabases/{databaseName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RecoverableDatabase', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_by_server(
self, resource_group_name, server_name, custom_headers=None, raw=False, **operation_config):
"""Returns a list of Recoverable Databases.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RecoverableDatabasePaged
<azure.mgmt.sql.models.RecoverableDatabasePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/recoverableDatabases'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.RecoverableDatabasePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RecoverableDatabasePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
ee2019bff22f3d1aaa540ff78f7cad8ea8ce1e34 | 461c02a8aa79654dc85a1750a29661c95f2c3939 | /src/devilry/devilry/apps/developertools/tests.py | 948fdd077cfa007c11b86aa97a2d172767148ef6 | [] | no_license | espenak/devilry-django | 13bfdc5625218a453c336f296aff6a22d18ae03f | 0e033ebf44c03d864d0457918cf221cfcc704652 | refs/heads/master | 2021-01-23T21:11:01.651800 | 2013-08-05T13:01:23 | 2013-08-05T13:01:23 | 1,915,118 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | import unittest
from .management.commands.dev_build_extjs import SimpleJsFile
class TestJsFile(unittest.TestCase):
def testMultilineCommentPattern(self):
self.assertEquals(SimpleJsFile.MULTILINE_COMMENT_PATT.sub('', 'hello /**some \n\n comments \n\t here*/world'),
'hello world')
def testSingleLineCommentPattern(self):
self.assertEquals(SimpleJsFile.SINGLELINE_COMMENT_PATT.sub('', 'hello // this is a test\nworld'),
'hello \nworld')
| [
"post@espenak.net"
] | post@espenak.net |
b1210e5ee7ae62fa2c2d7ee61167ac2dc2fc80cf | 2f55769e4d6bc71bb8ca29399d3809b6d368cf28 | /Miniconda2/Lib/site-packages/sklearn/neighbors/setup.py | 92a2feb67ef0ea142e80503de7cee7e03cb2ba54 | [] | no_license | jian9695/GSV2SVF | e5ec08b2d37dbc64a461449f73eb7388de8ef233 | 6ed92dac13ea13dfca80f2c0336ea7006a6fce87 | refs/heads/master | 2023-03-02T03:35:17.033360 | 2023-02-27T02:01:48 | 2023-02-27T02:01:48 | 199,570,103 | 9 | 16 | null | 2022-10-28T14:31:05 | 2019-07-30T03:47:41 | Python | UTF-8 | Python | false | false | 1,300 | py | import os
def configuration(parent_package='', top_path=None):
import numpy
from numpy.distutils.misc_util import Configuration
config = Configuration('neighbors', parent_package, top_path)
libraries = []
if os.name == 'posix':
libraries.append('m')
config.add_extension('ball_tree',
sources=['ball_tree.pyx'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_extension('kd_tree',
sources=['kd_tree.pyx'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_extension('dist_metrics',
sources=['dist_metrics.pyx'],
include_dirs=[numpy.get_include(),
os.path.join(numpy.get_include(),
'numpy')],
libraries=libraries)
config.add_extension('typedefs',
sources=['typedefs.pyx'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_subpackage('tests')
return config
| [
"JLiang@esri.com"
] | JLiang@esri.com |
102e52e69cd85959fcc145cd9158976a012b9b5a | 40132307c631dccbf7aa341eb308f69389715c73 | /OLD/idmt/maya/ZoomWhiteDolphin/zm_batchrender.py | 202ca2a0e7840da7848d6ffed9d438ffb477b17d | [] | no_license | Bn-com/myProj_octv | be77613cebc450b1fd6487a6d7bac991e3388d3f | c11f715996a435396c28ffb4c20f11f8e3c1a681 | refs/heads/master | 2023-03-25T08:58:58.609869 | 2021-03-23T11:17:13 | 2021-03-23T11:17:13 | 348,676,742 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,607 | py | # -*- coding: utf-8 -*-
'''
Created on 2013-8-5
@author: liangyu
'''
import maya.cmds as mc
import maya.mel as mel
def zm_batchFX():
#导入相机
from idmt.maya.py_common import sk_infoConfig
reload(sk_infoConfig)
shotInfo = sk_infoConfig.sk_infoConfig().checkShotInfo()
camFile = '//file-cluster/GDC/Projects/ZoomWhiteDolphin/Project/scenes/Animation/episode_'+shotInfo[1]+'/episode_camera/zm_'+shotInfo[1]+'_'+shotInfo[2]+'_cam.ma'
cam='cam_'+shotInfo[1]+'_'+shotInfo[2]+'_baked'
if mc.ls(cam):
mc.delete(cam)
try:
mc.file(camFile, i=1)
except:
print u'请确保有_bake相机'
mc.error(u'请确保有_bake相机')
#导入渲染文件
from idmt.maya.ZoomWhiteDolphin import zm_AutoRenderLayer_ZoomWhiteDolphin
reload(zm_AutoRenderLayer_ZoomWhiteDolphin)
shot_info=zm_AutoRenderLayer_ZoomWhiteDolphin.zmRLConfig().ReadEXcle()
shot_sence=shot_info[4]
filepath='Z:/Projects/ZoomWhiteDolphin/ZoomWhiteDolphin_Scratch/VFX/waveBeachAutoRender/WavesBeach/'
sign=''
fileslist=mc.getFileList(folder= filepath)
if fileslist:
for list in fileslist:
if list.split(".")[0]==shot_sence:
sign=list.split(".")[0]
if mc.ls('waveBeach'):
mc.delete('waveBeach')
if sign:
objFile='Z:/Projects/ZoomWhiteDolphin/ZoomWhiteDolphin_Scratch/VFX/waveBeachAutoRender/WavesBeach/'+shot_sence+'.mb'
mc.file(objFile, i=1)
else:
print u'请确认文件夹下有相应镜头的场景文件'
#分层
layerName = 'fx_waveBeach'
rlObjs=mc.ls('waveBeach')
print rlObjs
if mc.ls(layerName):
mc.delete(layerName)
if rlObjs:
mc.createRenderLayer(rlObjs, name=layerName, noRecurse=1, makeCurrent=1)
else:
print u'没有渲染物体'
#渲染设置
zm_AutoRenderLayer_ZoomWhiteDolphin.zmRLConfig().zmRLCommonConfig()
zm_AutoRenderLayer_ZoomWhiteDolphin.zmRLConfig().mentalRayProductionLevel()
camShape = mc.listRelatives(mc.ls(cam, type='transform')[0], ni=1, s=1)[0]
if mc.ls(cam, type='transform'):
mc.setAttr((camShape + '.renderable'), 1)
try:
mc.setAttr(('perspShape.renderable'), 0)
except:
pass
#回到MASTER层
mel.eval('editRenderLayerGlobals -currentRenderLayer "defaultRenderLayer"')
mc.setAttr("defaultRenderLayer.renderable", 0)
def zmbatchFXwautoCreate():
print ('=================================================================')
print '====================!!!Start AutoRenderLayer!!!===================='
from idmt.maya.ZoomWhiteDolphin import zm_batchrender
reload(zm_batchrender)
zm_batchrender.zm_batchFX()
# save
from idmt.maya.py_common import sk_infoConfig
reload(sk_infoConfig)
shotInfo = sk_infoConfig.sk_infoConfig().checkShotInfo()
pathLocal = sk_infoConfig.sk_infoConfig().checkRenderLayerLocalPath()
fileName = pathLocal + shotInfo[0] + '_' + shotInfo[1] + '_' + shotInfo[2]
fileType = '_render_ef_c001.mb'
fileName = fileName + fileType
mc.file(rename=fileName)
mc.file(save=1)
mel.eval('zwMusterCheckin2 "" "" 0 0 0;')
print '=======================!!!All Done!!!======================='
print ('===========================================================')
| [
"snakelonely@outlook.com"
] | snakelonely@outlook.com |
594f066131d1f193284788d929b0b60281ba0d9e | 54020c34ed23a086e2bfbfb874a400f3a11293cb | /ST_vars_and_ic.py | 4761d35359cfcd43ec36389e847eb4c71dc719e3 | [] | no_license | ModelDBRepository/187605 | bd26e6b9af407f157a1d9baae42a9d4fe6157a20 | 739605333f3768f9684b217f52a375e6ddd32bb1 | refs/heads/master | 2020-05-29T18:27:58.771998 | 2019-05-31T04:56:54 | 2019-05-31T04:56:54 | 189,300,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,243 | py | # -*- coding: utf-8 -*-
# (c) 2016 - Ilya Prokin - isprokin@gmail.com - https://sites.google.com/site/ilyaprokin
# INRIA Rhone-Alpes
# STDP model : variables' names and initial values
import numpy as np
ST_vars_and_ic = {
'h_caL13': 0.99182137906713796,
'm_caL13': 0.0036977671871038817,
'o_AMPA': 0.0,
'd_AMPA': 0.0,
'o_NMDA': 0.0,
'h_CICR': 0.82466766689469506,
'Ca_cyt': 0.12132718966407073,
'Ca_ER': 63.348087686853646,
'IP3': 0.057291400446753571,
'DAG': 0.005734867663641929,
'DAGLP': 4.1969621599776083e-07,
'twoAG': 3.2085896623941232e-06,
'AEA': 0.0061033848099783438,
'fpre': 1.0,
'I1P': 0.042380592866431144,
'PP1': 0.00093939509311232795,
'V': -69.999016204528218,
'o_CB1R': 3.4373437854140236e-07,
'd_CB1R': 0.002994487796947427
}
NEQ = len(ST_vars_and_ic)+13
CaMKII_ic=np.array([0.23316029213700182, 0.0034298074889746086, 0.00028889779878196254, 0.00013756133483052541, 3.6365976788029681e-05, 4.1274017451676494e-06, 4.2498580055485264e-06, 1.2513942987290664e-07, 3.2696082960591099e-07, 4.5484170099234244e-08, 3.078127923587743e-08, 2.7970211543431621e-09, 1.3221817318283754e-11])
si = lambda x, SK=ST_vars_and_ic.keys(): 13+SK.index(x)
| [
"tom.morse@yale.edu"
] | tom.morse@yale.edu |
a9992f4f561abbeb7842d4945ff18b0ff57b5493 | 0b8575873d696562c8279c9361433d38626bc6cc | /qa/rpc-tests/python-bogcoinrpc/bogcoinrpc/authproxy.py | e686f50b95493df4ad6b8326839167e87f4e7056 | [
"MIT"
] | permissive | colombiacoin/newbogcoin | 1be9b8f432a1281457b33a0313cef8f0127c3ea8 | d36069738e7e514b3f6d8c5b58bd4692b70b1a2e | refs/heads/master | 2020-07-24T15:27:04.311029 | 2019-09-13T12:16:24 | 2019-09-13T12:16:24 | 207,967,982 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,784 | py |
"""
Copyright 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("BogcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
Exception.__init__(self)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return round(o, 8)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
self.__service_url = service_url
self.__service_name = service_name
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
None, None, False,
timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
False, timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self.__service_name is not None:
name = "%s.%s" % (self.__service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name,
json.dumps(args, default=EncodeDecimal)))
postdata = json.dumps({'version': '1.1',
'method': self.__service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
response = self._get_response()
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal)
log.debug("--> "+postdata)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
return self._get_response()
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal)))
else:
log.debug("<-- "+responsedata)
return response
| [
"rishabhshukla@opulasoft.com"
] | rishabhshukla@opulasoft.com |
45202ab545dae9546344b844321a47b93b3fe7ab | 5bc369d49b16bc46e23b76621144223dc4226997 | /model/valueset.py | 204d668f16ce93ac8d577d91ca0b5aedbefd6d55 | [
"MIT"
] | permissive | beda-software/fhir-py-experements | 90d8e802f92f9e691d47d6ea4b33fda47957383a | 363cfb894fa6f971b9be19340cae1b0a3a4377d8 | refs/heads/master | 2022-12-17T05:19:59.294901 | 2020-02-26T03:54:13 | 2020-02-26T03:54:13 | 241,292,789 | 0 | 0 | MIT | 2022-12-08T03:38:55 | 2020-02-18T06:53:02 | Python | UTF-8 | Python | false | false | 6,429 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.1-9346c8cc45 (http://hl7.org/fhir/StructureDefinition/ValueSet) on 2020-02-03.
# 2020, SMART Health IT.
import sys
from dataclasses import dataclass, field
from typing import ClassVar, Optional, List
from .backboneelement import BackboneElement
from .codeableconcept import CodeableConcept
from .coding import Coding
from .contactdetail import ContactDetail
from .domainresource import DomainResource
from .fhirdate import FHIRDate
from .identifier import Identifier
from .usagecontext import UsageContext
@dataclass
class ValueSetExpansionParameter(BackboneElement):
""" Parameter that controlled the expansion process.
A parameter that controlled the expansion process. These parameters may be
used by users of expanded value sets to check whether the expansion is
suitable for a particular purpose, or to pick the correct expansion.
"""
resource_type: ClassVar[str] = "ValueSetExpansionParameter"
name: str = None
valueString: Optional[str] = field(default=None, metadata=dict(one_of_many='value',))
valueBoolean: Optional[bool] = field(default=None, metadata=dict(one_of_many='value',))
valueInteger: Optional[int] = field(default=None, metadata=dict(one_of_many='value',))
valueDecimal: Optional[float] = field(default=None, metadata=dict(one_of_many='value',))
valueUri: Optional[str] = field(default=None, metadata=dict(one_of_many='value',))
valueCode: Optional[str] = field(default=None, metadata=dict(one_of_many='value',))
valueDateTime: Optional[FHIRDate] = field(default=None, metadata=dict(one_of_many='value',))
@dataclass
class ValueSetExpansionContains(BackboneElement):
""" Codes in the value set.
The codes that are contained in the value set expansion.
"""
resource_type: ClassVar[str] = "ValueSetExpansionContains"
system: Optional[str] = None
abstract: Optional[bool] = None
inactive: Optional[bool] = None
version: Optional[str] = None
code: Optional[str] = None
display: Optional[str] = None
designation: Optional[List["ValueSetComposeIncludeConceptDesignation"]] = None
contains: Optional[List["ValueSetExpansionContains"]] = None
@dataclass
class ValueSetComposeIncludeConceptDesignation(BackboneElement):
""" Additional representations for this concept.
Additional representations for this concept when used in this value set -
other languages, aliases, specialized purposes, used for particular
purposes, etc.
"""
resource_type: ClassVar[str] = "ValueSetComposeIncludeConceptDesignation"
language: Optional[str] = None
use: Optional[Coding] = None
value: str = None
@dataclass
class ValueSetComposeIncludeConcept(BackboneElement):
""" A concept defined in the system.
Specifies a concept to be included or excluded.
"""
resource_type: ClassVar[str] = "ValueSetComposeIncludeConcept"
code: str = None
display: Optional[str] = None
designation: Optional[List[ValueSetComposeIncludeConceptDesignation]] = None
@dataclass
class ValueSetComposeIncludeFilter(BackboneElement):
""" Select codes/concepts by their properties (including relationships).
Select concepts by specify a matching criterion based on the properties
(including relationships) defined by the system, or on filters defined by
the system. If multiple filters are specified, they SHALL all be true.
"""
resource_type: ClassVar[str] = "ValueSetComposeIncludeFilter"
property: str = None
op: str = None
value: str = None
@dataclass
class ValueSetComposeInclude(BackboneElement):
""" Include one or more codes from a code system or other value set(s).
"""
resource_type: ClassVar[str] = "ValueSetComposeInclude"
system: Optional[str] = None
version: Optional[str] = None
concept: Optional[List[ValueSetComposeIncludeConcept]] = None
filter: Optional[List[ValueSetComposeIncludeFilter]] = None
valueSet: Optional[List[str]] = None
@dataclass
class ValueSetCompose(BackboneElement):
""" Content logical definition of the value set (CLD).
A set of criteria that define the contents of the value set by including or
excluding codes selected from the specified code system(s) that the value
set draws from. This is also known as the Content Logical Definition (CLD).
"""
resource_type: ClassVar[str] = "ValueSetCompose"
lockedDate: Optional[FHIRDate] = None
inactive: Optional[bool] = None
include: List[ValueSetComposeInclude] = field(default_factory=list)
exclude: Optional[List[ValueSetComposeInclude]] = None
@dataclass
class ValueSetExpansion(BackboneElement):
""" Used when the value set is "expanded".
A value set can also be "expanded", where the value set is turned into a
simple collection of enumerated codes. This element holds the expansion, if
it has been performed.
"""
resource_type: ClassVar[str] = "ValueSetExpansion"
identifier: Optional[str] = None
timestamp: FHIRDate = None
total: Optional[int] = None
offset: Optional[int] = None
parameter: Optional[List[ValueSetExpansionParameter]] = None
contains: Optional[List[ValueSetExpansionContains]] = None
@dataclass
class ValueSet(DomainResource):
""" A set of codes drawn from one or more code systems.
A ValueSet resource instance specifies a set of codes drawn from one or
more code systems, intended for use in a particular context. Value sets
link between [CodeSystem](codesystem.html) definitions and their use in
[coded elements](terminologies.html).
"""
resource_type: ClassVar[str] = "ValueSet"
url: Optional[str] = None
identifier: Optional[List[Identifier]] = None
version: Optional[str] = None
name: Optional[str] = None
title: Optional[str] = None
status: str = None
experimental: Optional[bool] = None
date: Optional[FHIRDate] = None
publisher: Optional[str] = None
contact: Optional[List[ContactDetail]] = None
description: Optional[str] = None
useContext: Optional[List[UsageContext]] = None
jurisdiction: Optional[List[CodeableConcept]] = None
immutable: Optional[bool] = None
purpose: Optional[str] = None
copyright: Optional[str] = None
compose: Optional[ValueSetCompose] = None
expansion: Optional[ValueSetExpansion] = None | [
"ir4y.ix@gmail.com"
] | ir4y.ix@gmail.com |
f904c5c7289553d7bb7d0f03975aa6101230c77f | bfbe642d689b5595fc7a8e8ae97462c863ba267a | /bin/Python27/Lib/site-packages/scipy-0.14.0-py2.7-win32.egg/scipy/sparse/linalg/isolve/tests/test_lsmr.py | 519d4498fffe9e1262b2432c530da07a2eddc333 | [
"MIT",
"LicenseRef-scancode-other-permissive"
] | permissive | mcanthony/meta-core | 0c0a8cde1669f749a4880aca6f816d28742a9c68 | 3844cce391c1e6be053572810bad2b8405a9839b | refs/heads/master | 2020-12-26T03:11:11.338182 | 2015-11-04T22:58:13 | 2015-11-04T22:58:13 | 45,806,011 | 1 | 0 | null | 2015-11-09T00:34:22 | 2015-11-09T00:34:22 | null | UTF-8 | Python | false | false | 4,754 | py | """
Copyright (C) 2010 David Fong and Michael Saunders
Distributed under the same license as Scipy
Testing Code for LSMR.
03 Jun 2010: First version release with lsmr.py
David Chin-lung Fong clfong@stanford.edu
Institute for Computational and Mathematical Engineering
Stanford University
Michael Saunders saunders@stanford.edu
Systems Optimization Laboratory
Dept of MS&E, Stanford University.
"""
from __future__ import division, print_function, absolute_import
from numpy import arange, eye, zeros, ones, sqrt, transpose, hstack
from numpy.linalg import norm
from numpy.testing import run_module_suite, assert_almost_equal
from scipy.sparse import coo_matrix
from scipy.sparse.linalg.interface import aslinearoperator
from scipy.sparse.linalg import lsmr
class TestLSMR:
def setUp(self):
self.n = 10
self.m = 10
def assertCompatibleSystem(self, A, xtrue):
Afun = aslinearoperator(A)
b = Afun.matvec(xtrue)
x = lsmr(A, b)[0]
assert_almost_equal(norm(x - xtrue), 0, decimal=5)
def testIdentityACase1(self):
A = eye(self.n)
xtrue = zeros((self.n, 1))
self.assertCompatibleSystem(A, xtrue)
def testIdentityACase2(self):
A = eye(self.n)
xtrue = ones((self.n,1))
self.assertCompatibleSystem(A, xtrue)
def testIdentityACase3(self):
A = eye(self.n)
xtrue = transpose(arange(self.n,0,-1))
self.assertCompatibleSystem(A, xtrue)
def testBidiagonalA(self):
A = lowerBidiagonalMatrix(20,self.n)
xtrue = transpose(arange(self.n,0,-1))
self.assertCompatibleSystem(A,xtrue)
class TestLSMRReturns:
def setUp(self):
self.n = 10
self.A = lowerBidiagonalMatrix(20,self.n)
self.xtrue = transpose(arange(self.n,0,-1))
self.Afun = aslinearoperator(self.A)
self.b = self.Afun.matvec(self.xtrue)
self.returnValues = lsmr(self.A,self.b)
def testNormr(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normr, norm(self.b - self.Afun.matvec(x)))
def testNormar(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normar,
norm(self.Afun.rmatvec(self.b - self.Afun.matvec(x))))
def testNormx(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normx, norm(x))
def lowerBidiagonalMatrix(m, n):
# This is a simple example for testing LSMR.
# It uses the leading m*n submatrix from
# A = [ 1
# 1 2
# 2 3
# 3 4
# ...
# n ]
# suitably padded by zeros.
#
# 04 Jun 2010: First version for distribution with lsmr.py
if m <= n:
row = hstack((arange(m, dtype=int),
arange(1, m, dtype=int)))
col = hstack((arange(m, dtype=int),
arange(m-1, dtype=int)))
data = hstack((arange(1, m+1, dtype=float),
arange(1,m, dtype=float)))
return coo_matrix((data, (row, col)), shape=(m,n))
else:
row = hstack((arange(n, dtype=int),
arange(1, n+1, dtype=int)))
col = hstack((arange(n, dtype=int),
arange(n, dtype=int)))
data = hstack((arange(1, n+1, dtype=float),
arange(1,n+1, dtype=float)))
return coo_matrix((data,(row, col)), shape=(m,n))
def lsmrtest(m, n, damp):
"""Verbose testing of lsmr"""
A = lowerBidiagonalMatrix(m,n)
xtrue = arange(n,0,-1, dtype=float)
Afun = aslinearoperator(A)
b = Afun.matvec(xtrue)
atol = 1.0e-7
btol = 1.0e-7
conlim = 1.0e+10
itnlim = 10*n
show = 1
x, istop, itn, normr, normar, norma, conda, normx \
= lsmr(A, b, damp, atol, btol, conlim, itnlim, show)
j1 = min(n,5)
j2 = max(n-4,1)
print(' ')
print('First elements of x:')
str = ['%10.4f' % (xi) for xi in x[0:j1]]
print(''.join(str))
print(' ')
print('Last elements of x:')
str = ['%10.4f' % (xi) for xi in x[j2-1:]]
print(''.join(str))
r = b - Afun.matvec(x)
r2 = sqrt(norm(r)**2 + (damp*norm(x))**2)
print(' ')
str = 'normr (est.) %17.10e' % (normr)
str2 = 'normr (true) %17.10e' % (r2)
print(str)
print(str2)
print(' ')
if __name__ == "__main__":
# Comment out the next line to run unit tests only
lsmrtest(20,10,0)
run_module_suite()
| [
"kevin.m.smyth@gmail.com"
] | kevin.m.smyth@gmail.com |
ba29ed82314cae6ed3ee61c684bfe1f5a68b82f6 | 5ca5a7120c3c147b3ae86c2271c60c82745997ea | /my_selenium/web_driver_three/data_frame/pageObjects/LoginPage.py | ba2b5ecb7dc39b416962ddc7f40bc3d108534d64 | [] | no_license | JR1QQ4/auto_test | 6b9ea7bd317fd4338ac0964ffd4042b293640af3 | 264b991b4dad72986e2aeb1a30812baf74e42bc6 | refs/heads/main | 2023-03-21T01:32:29.192030 | 2021-03-16T14:07:11 | 2021-03-16T14:07:11 | 321,591,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,437 | py | #!/usr/bin/python
# -*- coding:utf-8 -*-
from my_selenium.web_driver_three.data_frame.util.ObjectMap import *
from my_selenium.web_driver_three.data_frame.util.parse_configuration_file import ParseConfigFile
from selenium import webdriver
class LoginPage(object):
def __init__(self, driver: webdriver):
self.driver = driver
self.parseCF = ParseConfigFile()
self.loginOPtions = self.parseCF.getItemsSection("163mail_login")
# print(self.loginOPtions)
def switchToFrame(self):
try:
locate_type, locator = self.loginOPtions["loginPage.frame".lower()].split(">")
# frame = get_element(self.driver, 'css selector', 'iframe[id^="x-URS-iframe"]')
frame = get_element(self.driver, locate_type, locator)
self.driver.switch_to.frame(frame)
except Exception as e:
raise e
def switchToDefaultFrame(self):
self.driver.switch_to.default_content()
def userNameObj(self):
try:
locate_type, locator = self.loginOPtions["loginPage.username".lower()].split(">")
# elementObj = get_element(self.driver, "xpath", '//input[@name="email"]')
elementObj = get_element(self.driver, locate_type, locator)
return elementObj
except Exception as e:
raise e
def passwordObj(self):
try:
locate_type, locator = self.loginOPtions["loginPage.password".lower()].split(">")
elementObj = get_element(self.driver, locate_type, locator)
return elementObj
except Exception as e:
raise e
def loginButton(self):
try:
locate_type, locator = self.loginOPtions["loginPage.loginButton".lower()].split(">")
elementObj = get_element(self.driver, locate_type, locator)
return elementObj
except Exception as e:
raise e
# if __name__ == '__main__':
# from selenium import webdriver
# driver = webdriver.Chrome(executable_path=r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe')
# driver.get("https://mail.163.com/")
# import time
# time.sleep(5)
# login = LoginPage(driver)
# login.switchToFrame()
# login.userNameObj().send_keys("")
# login.passwordObj().send_keys("")
# login.loginButton().click()
# login.switchToDefaultFrame()
# time.sleep(5)
# driver.quit()
| [
"chenjunrenyx@163.com"
] | chenjunrenyx@163.com |
20234811a48b9100c4a4d892de7cbdf8b671601c | fc3f784c8d00f419b11cbde660fe68a91fb080ca | /algoritm/20상반기 코딩테스트/네트워크연결/bj3780.py | 81a9c28d91d9f9bda43b57b0f9ea4cab4770f1c5 | [] | no_license | choo0618/TIL | 09f09c89c8141ba75bf92657ac39978913703637 | 70437a58015aecee8f3d86e6bfd0aa8dc11b5447 | refs/heads/master | 2021-06-25T07:01:34.246642 | 2020-12-21T04:57:13 | 2020-12-21T04:57:13 | 163,782,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | import sys
sys.stdin = open('bj3780.txt','r')
def find(x):
if P[x]==x:return Dis[x]
Dis[x]+=find(P[x])
P[x]=P[P[x]]
return Dis[x]
T=int(input())
for t in range(T):
N=int(input())
P=list(range(N+1))
Dis=[0]*(N+1)
while True:
L=list(map(str,input().split()))
if len(L)==1:break
if L[0]=='E':print(find(int(L[1])))
else:
a,b=int(L[1]),int(L[2])
Dis[a]=abs(a-b)%1000
P[a]=b
| [
"choo0618@naver.com"
] | choo0618@naver.com |
78b0a50ab9ca8a2b3bdda54f39c9760a8e3ba0f5 | cda2c95ee6167a34ce9ba3ea25707469a2c357ca | /neural_augmented_simulator/old-code/data-collection/3-collect_mujoco_episodes_cheetah.py | 4496daf7cbcffb9a7fc72bdaaf3d5687de2c0805 | [] | no_license | fgolemo/neural-augmented-simulator | f43d9a88187fbef478aba9b4399eaa59d8795746 | eb02f20d92e6775824dbac221771f8b8c6dda582 | refs/heads/master | 2020-06-23T15:50:35.957495 | 2020-03-06T21:16:34 | 2020-03-06T21:16:34 | 198,666,041 | 0 | 1 | null | 2019-12-04T22:19:27 | 2019-07-24T15:49:48 | Jupyter Notebook | UTF-8 | Python | false | false | 5,037 | py | #import torch
import math
import h5py
from fuel.datasets.hdf5 import H5PYDataset
import gym
import gym_throwandpush
import numpy as np
from scipy.misc import imresize
from utils.buffer_images import BufferImages as Buffer
import matplotlib.pyplot as plt
from tqdm import tqdm
from hyperdash import Experiment
env = gym.make('HalfCheetah2Pixel-v0')
env2 = gym.make('HalfCheetah2Pixel-v0')
env2.env.env._init( # real robot
torques={
"bthigh": 120,
"bshin": 90,
"bfoot": 60,
"fthigh": 120,
"fshin": 60,
"ffoot": 30
},
colored=False
)
env.env.env._init( # simulator
torques={
"bthigh": 600,
"bshin": 18,
"bfoot": 300,
"fthigh": 24,
"fshin": 300,
"ffoot": 6
},
colored=True
)
image_dim = (128, 128, 3)
observation_dim = int(env.observation_space[0].shape[0])
action_dim = int(env.action_space.shape[0])
print ("obs dim: {}, act dim: {}".format(observation_dim, action_dim))
rng = np.random.RandomState(seed=22)
max_steps = 1000
episode_length = 300
split = 0.90
action_steps = 5
# Creating the h5 dataset
name = '/Tmp/mujoco_data1_cheetah.h5'
assert 0 < split <= 1
size_train = math.floor(max_steps * split)
size_val = math.ceil(max_steps * (1 - split))
f = h5py.File(name, mode='w')
images = f.create_dataset('images', (size_train+size_val, episode_length) + image_dim, dtype='uint8')
observations = f.create_dataset('obs', (size_train+size_val, episode_length, observation_dim), dtype='float32')
actions = f.create_dataset('actions', (size_train+size_val, episode_length, action_dim), dtype='float32')
s_transition_img = f.create_dataset('s_transition_img', (size_train+size_val, episode_length) + image_dim, dtype='uint8')
r_transition_img = f.create_dataset('r_transition_img', (size_train+size_val, episode_length) + image_dim, dtype='uint8')
s_transition_obs = f.create_dataset('s_transition_obs', (size_train+size_val, episode_length, observation_dim), dtype='float32')
r_transition_obs = f.create_dataset('r_transition_obs', (size_train+size_val, episode_length, observation_dim), dtype='float32')
reward_sim = f.create_dataset('reward_sim', (size_train+size_val,episode_length), dtype='float32')
reward_real = f.create_dataset('reward_real', (size_train+size_val,episode_length), dtype='float32')
split_dict = {
'train': {
'images': (0, size_train),
'obs': (0, size_train),
'actions': (0, size_train),
's_transition_img': (0, size_train),
'r_transition_img': (0, size_train),
's_transition_obs': (0, size_train),
'r_transition_obs': (0, size_train),
'reward_sim': (0, size_train),
'reward_real': (0, size_train)
},
'valid': {
'images': (size_train, size_train+size_val),
'obs': (size_train, size_train+size_val),
'actions': (size_train, size_train+size_val),
's_transition_img': (size_train, size_train+size_val),
'r_transition_img': (size_train, size_train+size_val),
's_transition_obs': (size_train, size_train+size_val),
'r_transition_obs': (size_train, size_train+size_val),
'reward_sim': (size_train, size_train+size_val),
'reward_real': (size_train, size_train+size_val),
}
}
f.attrs['split'] = H5PYDataset.create_split_array(split_dict)
def match_env(ev1, ev2):
# set env1 (simulator) to that of env2 (real robot)
ev1.env.env.set_state(
ev2.env.env.model.data.qpos.ravel(),
ev2.env.env.model.data.qvel.ravel()
)
i = 0
exp = Experiment("dataset cheetah")
for i in tqdm(range(max_steps)):
exp.metric("episode", i)
obs = env.reset()
obs2 = env2.reset()
match_env(env, env2)
for j in range(episode_length):
# env.render()
# env2.render()
if j % action_steps == 0:
action = env.action_space.sample()
new_obs, reward, done, info = env.step(action)
new_obs2, reward2, done2, info2 = env2.step(action)
# print (j, done, new_obs[0][0])
images[i, j, :, :, :] = imresize(obs[1], [128, 128, 3])
observations[i, j, :] = obs[0]
actions[i, j, :] = action
s_transition_img[i, j, :, :, :] = imresize(new_obs[1], [128, 128, 3])
r_transition_img[i, j, :, :, :] = imresize(new_obs2[1], [128, 128, 3])
s_transition_obs[i, j, :] = new_obs[0]
r_transition_obs[i, j, :] = new_obs2[0]
reward_sim[i] = reward
reward_real[i] = reward2
# we have to set the state to be the old state in the next timestep.
# Otherwise the old state is constant
obs = new_obs
match_env(env, env2)
if done2:
# print("Episode finished after {} timesteps".format(t+1))
break
if i % 200 == 0:
print("Buffer currently filled at: {}%".format(int(i*100./max_steps)))
if i % 100 == 0:
print ("{} done".format(i))
f.flush()
f.flush()
f.close()
print('Created h5 dataset with {} elements'.format(max_steps))
| [
"fgolemo@gmail.com"
] | fgolemo@gmail.com |
1f076e31be7002240dc6cc7edc06f527821ec557 | de4d88db6ea32d20020c169f734edd4b95c3092d | /aiotdlib/api/functions/set_custom_language_pack.py | dd86e04e684d5f47e8ed95dcffb27b3d5a653339 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | thiagosm/aiotdlib | 5cc790a5645f7e4cc61bbd0791433ed182d69062 | 4528fcfca7c5c69b54a878ce6ce60e934a2dcc73 | refs/heads/main | 2023-08-15T05:16:28.436803 | 2021-10-18T20:41:27 | 2021-10-18T20:41:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py | # =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import LanguagePackInfo
from ..types import LanguagePackString
class SetCustomLanguagePack(BaseObject):
"""
Adds or changes a custom local language pack to the current localization target
:param info: Information about the language pack. Language pack ID must start with 'X', consist only of English letters, digits and hyphens, and must not exceed 64 characters. Can be called before authorization
:type info: :class:`LanguagePackInfo`
:param strings: Strings of the new language pack
:type strings: :class:`list[LanguagePackString]`
"""
ID: str = Field("setCustomLanguagePack", alias="@type")
info: LanguagePackInfo
strings: list[LanguagePackString]
@staticmethod
def read(q: dict) -> SetCustomLanguagePack:
return SetCustomLanguagePack.construct(**q)
| [
"pylakey@protonmail.com"
] | pylakey@protonmail.com |
2be4b98c022d59fb74c3e543370f9f90e68cf785 | d28f0c984bbd4f80b770259ceb884f38e1cc0db8 | /bin/find-image-dirs.py | 54be7c595d65e0f9836b50b9be5cb1327028f05c | [] | no_license | cloudmesh-community/book | 435a96e2a8d2b8438428880b5f39a56d2c1c63b7 | f971bc1d15c82375d4c765b33c6fbe383b5819de | refs/heads/main | 2022-11-09T16:57:26.495231 | 2022-10-27T21:27:37 | 2022-10-27T21:27:37 | 135,431,595 | 29 | 134 | null | 2022-02-01T02:04:03 | 2018-05-30T11:14:59 | Jupyter Notebook | UTF-8 | Python | false | false | 494 | py | #!/usr/bin/env python
import sys
import glob
import os
directories = [".", "chapters"]
for root, dirs, files in os.walk("../chapters", topdown=False):
for name in dirs:
path = os.path.join(root, name)
if 'images' in path:
path = os.path.dirname(path)
# hack to remove ../ should in future use pathlib no time to implement
path = path.replace("../","")
directories.append(path)
print (":".join(directories))
| [
"laszewski@gmail.com"
] | laszewski@gmail.com |
a7ddf9fb5c1a07cb43829f558c8bec12271f05fa | 2391ff81d33e05efb1f38f7a67eff13dbb6a7b23 | /open_seq2seq/model/slstm.py | a93beee9507c8799bf6946c38afd75c4d0d30998 | [
"MIT"
] | permissive | matanhs/OpenSeq2Seq | 3b79222e114db75c1bf0f6b4b1eb0d231b8f0d0b | 540a1a230eff7c4cefcbb094ddc65aa11f64c9b3 | refs/heads/master | 2021-08-31T01:49:01.519514 | 2017-12-20T02:43:24 | 2017-12-20T04:47:16 | 113,621,881 | 1 | 0 | null | 2017-12-08T22:10:20 | 2017-12-08T22:10:20 | null | UTF-8 | Python | false | false | 6,106 | py | """Implement https://arxiv.org/abs/1709.02755
Copy from LSTM, and make it functionally correct with minimum code change
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import rnn_cell
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
_BIAS_VARIABLE_NAME = "biases" if tf.__version__ < "1.2.0" else "bias"
_WEIGHTS_VARIABLE_NAME = "weights" if tf.__version__ < "1.2.0" else "kernel"
class BasicSLSTMCell(rnn_cell.RNNCell):
"""Basic SLSTM recurrent network cell.
The implementation is based on: https://arxiv.org/abs/1709.02755.
"""
def __init__(self, num_units, forget_bias=1.0,
state_is_tuple=True, activation=None, reuse=None):
"""Initialize the basic SLSTM cell.
Args:
num_units: int, The number of units in the SLSTM cell.
forget_bias: float, The bias added to forget gates (see above).
Must set to `0.0` manually when restoring from CudnnLSTM-trained
checkpoints.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. The latter behavior will soon be deprecated.
activation: Activation function of the inner states. Default: `tanh`.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
"""
super(BasicSLSTMCell, self).__init__(_reuse=reuse)
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
self._num_units = num_units
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
self._activation = activation or math_ops.tanh
@property
def state_size(self):
return (rnn_cell.LSTMStateTuple(self._num_units, self._num_units)
if self._state_is_tuple else 2 * self._num_units)
@property
def output_size(self):
return self._num_units
def call(self, inputs, state):
"""Long short-term memory cell (LSTM).
Args:
inputs: `2-D` tensor with shape `[batch_size x input_size]`.
state: An `LSTMStateTuple` of state tensors, each shaped
`[batch_size x self.state_size]`, if `state_is_tuple` has been set to
`True`. Otherwise, a `Tensor` shaped
`[batch_size x 2 * self.state_size]`.
Returns:
A pair containing the new hidden state, and the new state (either a
`LSTMStateTuple` or a concatenated state, depending on
`state_is_tuple`).
"""
sigmoid = math_ops.sigmoid
# Parameters of gates are concatenated into one multiply for efficiency.
if self._state_is_tuple:
c, h = state
else:
c, h = array_ops.split(value=state, num_or_size_splits=2, axis=1)
#concat = _linear([inputs, h], 4 * self._num_units, True)
concat = _linear(inputs, 4 * self._num_units, True)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = array_ops.split(value=concat, num_or_size_splits=4, axis=1)
new_c = (
c * sigmoid(f + self._forget_bias) + sigmoid(i) * self._activation(j))
new_h = self._activation(new_c) * sigmoid(o)
if self._state_is_tuple:
new_state = rnn_cell.LSTMStateTuple(new_c, new_h)
else:
new_state = array_ops.concat([new_c, new_h], 1)
return new_h, new_state
def _linear(args,
output_size,
bias,
bias_initializer=None,
kernel_initializer=None):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch x n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_initializer: starting value to initialize the bias
(default is all zeros).
kernel_initializer: starting value to initialize the weight.
Returns:
A 2D Tensor with shape [batch x output_size] equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape[1].value is None:
raise ValueError("linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
_WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size],
dtype=dtype,
initializer=kernel_initializer)
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
else:
res = math_ops.matmul(array_ops.concat(args, 1), weights)
if not bias:
return res
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
if bias_initializer is None:
bias_initializer = init_ops.constant_initializer(0.0, dtype=dtype)
biases = vs.get_variable(
_BIAS_VARIABLE_NAME, [output_size],
dtype=dtype,
initializer=bias_initializer)
return nn_ops.bias_add(res, biases)
| [
"okuchaiev@nvidia.com"
] | okuchaiev@nvidia.com |
1e067717037ea679c74fa4d55b9080f7baba735a | bade79e88dd32b42b4841a1e1a94eddfc86652bf | /communication/communicationWindow.py | b1145de35a61d32076eb03a2f4d120f36de0285e | [] | no_license | a452669850/DCSNEW | 927171b10f455b396b50e8400001efcdb5fd1217 | 7578b6b18e021108c3e8b31c5dab2a17ac16a79d | refs/heads/master | 2023-01-04T11:25:09.787625 | 2020-10-20T13:16:54 | 2020-10-20T13:16:54 | 305,712,694 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,057 | py | import sys
from pathlib import Path
sys.path.append('D:\\dcstms')
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QApplication, QMainWindow, QMdiArea, QHBoxLayout
from communication import skio, iomapping
from communication.view.IntermediateVariable import intermediateVarWindow
from communication.view.databaseManagement import databaseManageWindow
from communication.view.deviceVariables import deviceVarWindow
from communication.view.myTree import TreeDockWidget
from communication.view.systemParameter import sysParameterWindow
path = Path(__file__).absolute().parent.parent.joinpath('static')
class comWindow(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle('Dcs自动化测试软件-[工程管理器]')
skio.setup(path.joinpath('demo'))
iomapping.setup_Current()
self.createMenue()
self.initUI()
def initUI(self):
layout = QHBoxLayout()
self.items = TreeDockWidget(self)
self.items.tree_Signal.connect(self.windowAction)
self.items.setFloating(False)
self.mdi = QMdiArea()
self.setCentralWidget(self.mdi)
self.addDockWidget(Qt.LeftDockWidgetArea, self.items)
self.setLayout(layout)
def createMenue(self):
self.menubar = self.menuBar()
self.viewMenu1 = self.menubar.addMenu('&工程')
self.viewMenu2 = self.menubar.addMenu('&查看')
self.viewMenu3 = self.menubar.addMenu('&工具')
self.viewMenu4 = self.menubar.addMenu('&操作')
self.viewMenu5 = self.menubar.addMenu('&帮助')
self.viewMenu1.addAction('新建工程')
self.viewMenu1.addAction('打开')
self.viewMenu1.addAction('保存')
self.viewMenu1.addAction('退出')
self.viewMenu2.addAction('工具栏')
self.viewMenu2.addAction('状态栏')
self.viewMenu2.addAction('工作区')
self.viewMenu2.addAction('显示区')
self.viewMenu2.addAction('编辑')
self.viewMenu3.addAction('模拟')
self.viewMenu3.addAction('运行')
self.viewMenu3.addAction('下载工程')
self.viewMenu3.addAction('上传工程')
self.viewMenu3.addAction('标准modbus点表')
self.viewMenu3.addAction('模板导入')
self.viewMenu4.addAction('增加')
self.viewMenu4.addAction('追加')
self.viewMenu4.addAction('行拷')
self.viewMenu4.addAction('列拷')
self.viewMenu4.addAction('修改')
self.viewMenu4.addAction('删除')
self.viewMenu4.addAction('导出')
self.viewMenu4.addAction('导入')
self.viewMenu5.addAction('帮助')
self.viewMenu5.addAction('关于')
self.viewMenu1.triggered.connect(self.menueAction1)
self.viewMenu2.triggered.connect(self.menueAction2)
self.viewMenu3.triggered.connect(self.menueAction3)
self.viewMenu4.triggered.connect(self.menueAction4)
self.viewMenu5.triggered.connect(self.menueAction5)
def windowAction(self, text):
if len(self.mdi.subWindowList()) < 1:
if text == 'sysParameter':
sub = sysParameterWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'comEqu':
sub = deviceVarWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'deviceVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'intermediateVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'databaseManage':
sub = databaseManageWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if len(self.mdi.subWindowList()) == 1:
self.mdi.subWindowList()[0].close()
if text == 'sysParameter':
sub = sysParameterWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'comEqu':
sub = deviceVarWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'deviceVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'intermediateVar':
sub = intermediateVarWindow()
sub.threadings.start()
self.mdi.addSubWindow(sub)
sub.showMaximized()
if text == 'databaseManage':
sub = databaseManageWindow()
self.mdi.addSubWindow(sub)
sub.showMaximized()
# winDict = {
# 'sysParameter': sysParameterWindow(),
# 'comEqu': deviceVarWindow(),
# 'deviceVar': intermediateVarWindow(),
# 'intermediateVar': intermediateVarWindow(),
# 'databaseManage': databaseManageWindow()
# }
# if len(self.mdi.subWindowList()) < 1:
# sub = winDict[text]
# if hasattr(sub, 'threadings'):
# print(1)
# self.mdi.addSubWindow(sub)
# sub.showMaximized()
# if len(self.mdi.subWindowList()) == 1:
# self.mdi.subWindowList()[0].close()
# sub = winDict[text]
# self.mdi.addSubWindow(sub)
# sub.showMaximized()
def menueAction1(self):
print(1)
def menueAction2(self):
print(2)
def menueAction3(self):
print(3)
def menueAction4(self):
print(4)
def menueAction5(self):
print(5)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = comWindow()
ex.show()
sys.exit(app.exec_())
| [
"a452669850@outlook.com"
] | a452669850@outlook.com |
0d6be0c4188a1d84cc9f5b5b16671e49c1e36a28 | eb66f122da246af7e5f342c5a3be3a02d0957d04 | /oldBoy/upload_client.py | b80d3719e98c46debe6404bff1dbc93271e267d8 | [] | no_license | zhchwolf/pylearn | 00838be6f5f68176ba3d0131f223839469a16a55 | 5dc2d39a90dd064ac88669d4535ad08b082bf04f | refs/heads/master | 2022-08-27T03:25:10.854780 | 2019-09-27T09:38:47 | 2019-09-27T09:38:47 | 157,136,924 | 0 | 1 | null | 2022-08-06T05:22:44 | 2018-11-12T00:56:12 | Python | UTF-8 | Python | false | false | 690 | py | import socket
import os
sk = socket.socket()
address =('127.0.0.1',8000)
sk.connect(address)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
while True:
inp = input('>>>') # post|123.jpg
cmd,path = inp.split('|')
path = os.path.join(BASE_DIR,path)
filename = os.path.basename(path)
file_size = os.stat(path).st_size
file_info = 'post|%s|%s' % (filename,file_size)
sk.sendall(bytes(file_info,'utf8'))
with open(path,'rb') as f:
has_send = 0
while has_send != file_size:
data = f.read(1024)
sk.sendall(data)
has_send += len(data)
f.close()
print('upload finished.')
sk.close() | [
"zhchwolf@sina.com"
] | zhchwolf@sina.com |
47afd7e643d3c1fb17daceaa3e205522b12cb9b4 | 13ce655f82b93fb4089b29e62a8e33dd7ff05493 | /src/wai/json/error/_RequiredDisallowed.py | 5305a58f8080b5fe1c0cc7bf695504fde72de1f4 | [
"MIT"
] | permissive | waikato-datamining/wai-json | 603b90b13155114bbfb60b40f45100248c03d710 | cb013fb16e7c1b8d91e040a387a143d29d4ced96 | refs/heads/master | 2021-01-07T15:06:22.957223 | 2020-03-17T23:59:14 | 2020-03-17T23:59:14 | 241,736,670 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 211 | py | from ._JSONPropertyError import JSONPropertyError
class RequiredDisallowed(JSONPropertyError):
"""
Error type for when trying to set a property as required,
but it is not allowed.
"""
pass
| [
"coreytsterling@gmail.com"
] | coreytsterling@gmail.com |
59789cb9c983d28dc952d267e6db4245284bedb2 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/132/usersdata/260/41472/submittedfiles/al14.py | c446200a7c5705038df4fc037e84245344b06221 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | # -*- coding: utf-8 -*-
n=int(input("digite o número de pessoas:"))
altura=0
for i in range (1,n+1,1):
alturas=int(input("digite a altura dessas pessoa:"+str(i)))
total=alturas+total
media=total//n
print(media)
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
017d4a1b0484ade3ebe995b017041b678fbe1c5d | 5c00df958c8010c3bea2e5d5ef63d63642526495 | /setup.py | 278b6215ba952ddf33240cd2b2bf597c3adf6e25 | [
"MIT"
] | permissive | ppdebreuck/matbench | 32e9be021349c8bcb04efd992205f21e9850912f | 4bc373d86671e17aeba3ecdd8a49c3ad555d4513 | refs/heads/main | 2023-08-25T19:05:55.508133 | 2021-10-11T15:39:47 | 2021-10-11T15:39:47 | 393,661,109 | 0 | 0 | MIT | 2021-08-07T11:11:38 | 2021-08-07T11:11:38 | null | UTF-8 | Python | false | false | 1,468 | py | import os
from setuptools import setup, find_packages
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(MODULE_DIR, "requirements.txt"), "r") as f:
requirements = f.read().replace(" ", "").split("\n")
# source of version is in the constants file
VERSION_FILE = os.path.join(MODULE_DIR, "matbench/constants.py")
token = "VERSION = "
with open(VERSION_FILE, "r") as f:
version = None
for line in f.readlines():
if token in line:
version = line.replace(token, "").strip()
# Double quotes are contained in the read line, remove them
version = version.replace("\"", "")
if __name__ == "__main__":
setup(
name='matbench',
version=version,
description='a machine learning benchmark for materials science',
long_description="A machine learning benchmark for materials science. "
"https://github.com/hackingmaterials/matbench",
url='https://github.com/hackingmaterials/matbench',
author=['Alex Dunn', 'Anubhav Jain'],
author_email='ardunn@lbl.gov',
license='modified BSD',
packages=find_packages(where="."),
package_data={
"matbench": ["*.json"],
"matbench.tests": ["*.json"]
},
zip_safe=False,
install_requires=requirements,
extras_require={},
test_suite='matbench',
tests_require='tests',
include_package_data=True
)
| [
"ardunn@lbl.gov"
] | ardunn@lbl.gov |
62f74a9cfefeb751139c8a1fb60850c830101bfb | 287792543e5f15cd912661ffe0575d4fc3d03a49 | /backend/src/baserow/core/registries.py | c5f640f0e9fe3b433d04f42104a84869f0fef686 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | jacklicn/baserow | 27f7566a164127d8b4571be4493447347d8aa3ed | 978d9462ededbaa96674a6653028ba19876ea273 | refs/heads/master | 2023-04-02T19:06:20.961729 | 2021-04-08T16:00:37 | 2021-04-08T16:00:37 | 357,790,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,506 | py | from .registry import (
Instance, Registry, ModelInstanceMixin, ModelRegistryMixin, APIUrlsRegistryMixin,
APIUrlsInstanceMixin, ImportExportMixin
)
from .exceptions import ApplicationTypeAlreadyRegistered, ApplicationTypeDoesNotExist
class Plugin(APIUrlsInstanceMixin, Instance):
"""
This abstract class represents a custom plugin that can be added to the plugin
registry. It must be extended so customisation can be done. Each plugin can register
urls to the root and to the api.
The added API urls will be available under the namespace 'api'. So if a url
with name 'example' is returned by the method it will available under
reverse('api:example').
Example:
from django.http import HttpResponse
from baserow.core.registries import Plugin, plugin_registry
def page_1(request):
return HttpResponse('Page 2')
class ExamplePlugin(Plugin):
type = 'a-unique-type-name'
# Will be added to the root.
def get_urls(self):
return [
url(r'^page-1$', page_1, name='page_1')
]
# Will be added to the API.
def get_api_urls(self):
return [
path('application-type/', include(api_urls, namespace=self.type)),
]
plugin_registry.register(ExamplePlugin())
"""
def get_urls(self):
"""
If needed root urls related to the plugin can be added here.
Example:
def get_urls(self):
from . import api_urls
return [
path('some-url/', include(api_urls, namespace=self.type)),
]
# api_urls.py
from django.conf.urls import url
urlpatterns = [
url(r'some-view^$', SomeView.as_view(), name='some_view'),
]
:return: A list containing the urls.
:rtype: list
"""
return []
def user_created(self, user, group, group_invitation):
"""
A hook that is called after a new user has been created. This is the place to
create some data the user can start with. A group has already been created
for the user to that one is passed as a parameter.
:param user: The newly created user.
:type user: User
:param group: The newly created group for the user.
:type group: Group
:param group_invitation: Is provided if the user has signed up using a valid
group invitation token.
:type group_invitation: GroupInvitation
"""
class PluginRegistry(APIUrlsRegistryMixin, Registry):
"""
With the plugin registry it is possible to register new plugins. A plugin is an
abstraction made specifically for Baserow. It allows a plugin developer to
register extra api and root urls.
"""
name = 'plugin'
@property
def urls(self):
"""
Returns a list of all the urls that are in the registered instances. They
are going to be added to the root url config.
:return: The urls of the registered instances.
:rtype: list
"""
urls = []
for types in self.registry.values():
urls += types.get_urls()
return urls
class ApplicationType(APIUrlsInstanceMixin, ModelInstanceMixin, ImportExportMixin,
Instance):
"""
This abstract class represents a custom application that can be added to the
application registry. It must be extended so customisation can be done. Each
application will have his own model that must extend the Application model, this is
needed so that the user can set custom settings per application instance he has
created.
The added API urls will be available under the namespace 'api'. So if a url
with name 'example' is returned by the method it will available under
reverse('api:example').
Example:
from baserow.core.models import Application
from baserow.core.registries import ApplicationType, application_type_registry
class ExampleApplicationModel(Application):
pass
class ExampleApplication(ApplicationType):
type = 'a-unique-type-name'
model_class = ExampleApplicationModel
def get_api_urls(self):
return [
path('application-type/', include(api_urls, namespace=self.type)),
]
application_type_registry.register(ExampleApplication())
"""
instance_serializer_class = None
"""This serializer that is used to serialize the instance model."""
def pre_delete(self, application):
"""
A hook that is called before the application instance is deleted.
:param application: The application model instance that needs to be deleted.
:type application: Application
"""
def export_serialized(self, application):
"""
Exports the application to a serialized dict that can be imported by the
`import_serialized` method. The dict is JSON serializable.
:param application: The application that must be exported.
:type application: Application
:return: The exported and serialized application.
:rtype: dict
"""
return {
'id': application.id,
'name': application.name,
'order': application.order,
'type': self.type
}
def import_serialized(self, group, serialized_values, id_mapping):
"""
Imports the exported serialized application by the `export_serialized` as a new
application to a group.
:param group: The group that the application must be added to.
:type group: Group
:param serialized_values: The exported serialized values by the
`export_serialized` method.
:type serialized_values: dict`
:param id_mapping: The map of exported ids to newly created ids that must be
updated when a new instance has been created.
:type id_mapping: dict
:return: The newly created application.
:rtype: Application
"""
if 'applications' not in id_mapping:
id_mapping['applications'] = {}
serialized_copy = serialized_values.copy()
application_id = serialized_copy.pop('id')
serialized_copy.pop('type')
application = self.model_class.objects.create(group=group, **serialized_copy)
id_mapping['applications'][application_id] = application.id
return application
class ApplicationTypeRegistry(APIUrlsRegistryMixin, ModelRegistryMixin, Registry):
"""
With the application registry it is possible to register new applications. An
application is an abstraction made specifically for Baserow. If added to the
registry a user can create new instances of that application via the app and
register api related urls.
"""
name = 'application'
does_not_exist_exception_class = ApplicationTypeDoesNotExist
already_registered_exception_class = ApplicationTypeAlreadyRegistered
# A default plugin and application registry is created here, this is the one that is
# used throughout the whole Baserow application. To add a new plugin or application use
# these registries.
plugin_registry = PluginRegistry()
application_type_registry = ApplicationTypeRegistry()
| [
"bramw@protonmail.com"
] | bramw@protonmail.com |
58b07d55f2442753f0175fb19c79c359c7b655d5 | 2f0d56cdcc4db54f9484b3942db88d79a4215408 | /.history/Python_Learning/Condiion_20200410231844.py | 807f9f0e50b806d27bf01dda56519bf8f565b699 | [] | no_license | xiangxing98/xiangxing98.github.io | 8571c8ee8509c0bccbb6c2f3740494eedc53e418 | 23618666363ecc6d4acd1a8662ea366ddf2e6155 | refs/heads/master | 2021-11-17T19:00:16.347567 | 2021-11-14T08:35:01 | 2021-11-14T08:35:01 | 33,877,060 | 7 | 1 | null | 2017-07-01T16:42:49 | 2015-04-13T15:35:01 | HTML | UTF-8 | Python | false | false | 3,178 | py | import time
print('如果你想拥有读心术,那选择X教授')
time.sleep(2)
print('如果你想干扰地球磁场,那选择万磁王')
time.sleep(2)
print('如果你想急速自愈能力,野兽般的感知能力,那选择金刚狼')
time.sleep(2)
print('如果你想拥有拥有念力移位和心电感应,那选择凤凰女')
time.sleep(2)
print('如果你想拥有拥有能随意控制气候的能力,那选择暴风女')
time.sleep(2)
print('那么,如果让你来选择的话,你想选择哪个人物?')
time.sleep(2)
print('请在以下六个选项【1 X教授 ;2 万磁王;3 金刚狼 ;4 凤凰女;5 暴风女 ;】中,选择你最想成为的人物吧!')
time.sleep(3)
answer = input('请将对应数字输入在冒号后: ')
if answer == '1':
print('我是教授,通过其能力剥夺并控制他人的思维同时操纵他人的行动。')
time.sleep(3)
elif answer == '2':
print('我X万磁王,通过干扰地球磁场达到飞行的能力。')
time.sleep(3)
elif answer == '3':
print('我是金刚狼,天生双臂长有可伸出体外的利爪')
time.sleep(3)
elif answer == '4':
print('我是凤凰女,预知未来,并能抗拒他人的精神攻击。')
time.sleep(3)
elif answer == '5':
print('我是暴风女,被称作天气女神。')
time.sleep(3)
# 为钱包赋值
money = 1000
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
# 结果:显示‘金刚狼带凤凰女去吃日料’的结果
print('金刚狼带凤凰女去吃日料')
# IndentationError: expected an indented block (缩进错误)
# if
# 为钱包赋值
money = 500
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
# 结果:显示‘你去吃日料’的结果
print('金刚狼带凤凰女去吃日料')
# if else
# 为钱包赋值
money = 1000
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
print('金刚狼带凤凰女去吃日料')
# 条件:当不满足if条件,执行else条件下语句
else:
print('金刚狼带凤凰女去吃KFC')
# 赋值语句:为体重赋值
weight = 98
# if else
# 条件:满足体重超过90(包含90)
if weight>=90:
# 结果:显示‘跑步’的结果
print('她就每天去跑步')
# 条件:当赋值不满足if条件时,执行else下的命令
else:
# 结果:显示‘散步’的结果
print('她就每天去散步')
# 为钱包赋值
money = 999
# if elseif else
# 条件:如果有1000块钱以上(包含1000块),就去吃日料
if money>=1000:
print('金刚狼带凤凰女去吃日料')
# 条件:如果有800-1000块钱之间(包含800块)
elif money>=800:
print('金刚狼带凤凰女去吃披萨')
#不满足条件
else:
print('金刚狼带凤凰女去吃KFC')
# 今天晚上,小K要加班了。如果加班时间超过10点(包含10点),小K就打车回家;如果加班时间在8点-10点之间(包含8点),小K坐地铁回家;如果加班时间早于8点,小K骑单车回家。 | [
"xiangxing985529@163.com"
] | xiangxing985529@163.com |
3886165407d0ba5f66efa43613de88aafdc77bbe | 5e8e4ca2205ccecf3b5b1ce12ae202883d4a5237 | /anchore_engine/clients/policy_engine/__init__.py | 481856d7a178a192d14edd75658cd2fa2d624924 | [
"Apache-2.0"
] | permissive | jasonwilk/anchore-engine | e6fb35060942688164e93a099559aa9be8900502 | 3b587a597be985cf5895f4a249418855d4be3fae | refs/heads/master | 2020-03-28T04:02:00.007092 | 2018-09-06T14:55:43 | 2018-09-06T14:55:43 | 147,689,738 | 0 | 0 | Apache-2.0 | 2018-09-06T14:53:27 | 2018-09-06T14:52:01 | Python | UTF-8 | Python | false | false | 1,577 | py | import random
import anchore_engine.clients.common
from anchore_engine.subsys.discovery import get_endpoints
from .generated import DefaultApi, configuration, ApiClient
from anchore_engine.subsys import logger
import anchore_engine.configuration.localconfig
SERVICE_NAME = 'policy_engine'
def get_client(host=None, user=None, password=None, verify_ssl=None):
"""
Returns an initialize client withe credentials and endpoint set properly
:param host: hostname including port for the destination, will be looked up if not provided
:param user: username for the request auth
:param password: password for the request auth
:return: initialized client object
"""
if not host:
try:
endpoint = anchore_engine.clients.common.get_service_endpoint((user, password), SERVICE_NAME)
if endpoint:
host = endpoint
else:
raise Exception("cannot find endpoint for service: {}".format(SERVICE_NAME))
except Exception as err:
raise err
config = configuration.Configuration()
if host:
config.host = host
if user:
config.username = user
if password:
config.password = password
if verify_ssl == None:
localconfig = anchore_engine.configuration.localconfig.get_config()
verify_ssl = localconfig.get('internal_ssl_verify', True)
config.verify_ssl = verify_ssl
a = ApiClient(configuration=config)
c = DefaultApi(api_client=a)
#configuration.api_client = None
return c
| [
"nurmi@anchore.com"
] | nurmi@anchore.com |
8153e6afaf7e26c90c5d6e9b2a1aa321af0382d5 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /grafeas/v1/grafeas-v1-py/grafeas/grafeas_v1/types/deployment.py | 69ac2dac990e1a4f42b3f2fae3580581b4827d37 | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,995 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='grafeas.v1',
manifest={
'DeploymentNote',
'DeploymentOccurrence',
},
)
class DeploymentNote(proto.Message):
r"""An artifact that can be deployed in some runtime.
Attributes:
resource_uri (Sequence[str]):
Required. Resource URI for the artifact being
deployed.
"""
resource_uri = proto.RepeatedField(
proto.STRING,
number=1,
)
class DeploymentOccurrence(proto.Message):
r"""The period during which some deployable was active in a
runtime.
Attributes:
user_email (str):
Identity of the user that triggered this
deployment.
deploy_time (google.protobuf.timestamp_pb2.Timestamp):
Required. Beginning of the lifetime of this
deployment.
undeploy_time (google.protobuf.timestamp_pb2.Timestamp):
End of the lifetime of this deployment.
config (str):
Configuration used to create this deployment.
address (str):
Address of the runtime element hosting this
deployment.
resource_uri (Sequence[str]):
Output only. Resource URI for the artifact
being deployed taken from the deployable field
with the same name.
platform (grafeas.grafeas_v1.types.DeploymentOccurrence.Platform):
Platform hosting this deployment.
"""
class Platform(proto.Enum):
r"""Types of platforms."""
PLATFORM_UNSPECIFIED = 0
GKE = 1
FLEX = 2
CUSTOM = 3
user_email = proto.Field(
proto.STRING,
number=1,
)
deploy_time = proto.Field(
proto.MESSAGE,
number=2,
message=timestamp_pb2.Timestamp,
)
undeploy_time = proto.Field(
proto.MESSAGE,
number=3,
message=timestamp_pb2.Timestamp,
)
config = proto.Field(
proto.STRING,
number=4,
)
address = proto.Field(
proto.STRING,
number=5,
)
resource_uri = proto.RepeatedField(
proto.STRING,
number=6,
)
platform = proto.Field(
proto.ENUM,
number=7,
enum=Platform,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
0251b865943a31f95d2d8fcca8ff63221627c2f1 | 59e59cbc24650b557f41c041cbeb8dad10c4d2b1 | /01 Array/169. Majority Element.py | 718cf01d75ceb6a3273c129b36510ee9634562bf | [] | no_license | stungkit/Leetcode-Data-Structures-Algorithms | 5345211f4ceb7dc7651360f0ca0a7f48f2434556 | a3a341369a8acd86c29f8fba642f856d6ea2fd0a | refs/heads/master | 2023-07-26T18:14:17.993112 | 2021-09-13T15:40:47 | 2021-09-13T15:40:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,566 | py | # Given an array of size n, find the majority element. The majority element is the element that appears more than ⌊ n/2 ⌋ times.
# You may assume that the array is non-empty and the majority element always exist in the array.
# Example 1:
# Input: [3,2,3]
# Output: 3
# Example 2:
# Input: [2,2,1,1,1,2,2]
# Output: 2
class Solution:
def majorityElement(self, nums):
nums.sort()
return nums[len(nums)//2]
# Time: O(nlgn)
# Space: O(1) or O(n) which depends on if we want to save the sorted list
from collections import Counter
class Solution:
def majorityElement(self, nums: List[int]) -> int:
count = Counter(nums)
for k,v in count.items():
if v > len(nums)//2:
return k
return -1
# Time: O(n)
# Space: O(n)
class Solution:
def majorityElement(self, nums: List[int]) -> int:
d = {}
for i in nums:
if i not in d:
d[i] = 1
else:
d[i] += 1
for k, v in d.items():
if v > len(nums)//2:
return k
return -1
# Time: O(n)
# Space: O(n)
class Solution:
def majorityElement(self, nums: List[int]) -> int:
d = {}
for i in nums:
d[i] = d.get(i, 0) + 1
for k, v in d.items():
if v > len(nums)//2:
return k
return -1
# Time: O(n)
# Space: O(n) | [
"noreply@github.com"
] | stungkit.noreply@github.com |
fc679ef509cb54b6f6526cdeede624b74324deaa | e5f194129752f3f89eed53478416d2c92cde0259 | /.cache/Microsoft/Python Language Server/stubs.v4/RAbd3hOzVRIjl5T9N_Z-WymMnylM5TJUMWmbys-xAPg=/speedups.cpython-38-x86_64-linux-gnu.pyi | 227c530185cbd8d826a94731aef6899a79524680 | [] | no_license | stepin-s/st | 1677fc25cb42c36afd76d2e3a48a1c0a5daf1b93 | b4cf346a446d57210197ee7f6f809cbc0a5b8799 | refs/heads/master | 2023-07-27T17:37:39.268414 | 2021-05-25T12:08:10 | 2021-05-25T12:08:10 | 405,090,749 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | pyi | __doc__ = None
__file__ = '/home/st/.local/lib/python3.8/site-packages/tornado/speedups.cpython-38-x86_64-linux-gnu.so'
__name__ = 'tornado.speedups'
__package__ = 'tornado'
def websocket_mask():
pass
| [
"stpn.s@yandex.ru"
] | stpn.s@yandex.ru |
3af7a3b933bf513f93b14404512dd68ad5d4de48 | a689a72d3699883d7b58bd4ee3103373270bd0d5 | /BOJ/Python/BOJ1020.py | 12f8f79a460103398fcfb7620d39798d82a6ee6c | [] | no_license | Oizys18/Algo | 4670748c850dc9472b6cfb9f828a3ccad9c18981 | 45caafe22a8a8c9134e4ff3b227f5f0be94eefe7 | refs/heads/master | 2022-05-11T08:35:06.812539 | 2022-05-07T01:30:41 | 2022-05-07T01:30:41 | 202,690,024 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | import sys
sys.stdin = open('BOJ1020.txt','r')
from pprint import pprint as pp
time = [int(i) for i in input()]
N = len(time)
cnt = {
1:2,
2:5,
3:5,
4:4,
5:5,
6:6,
7:3,
8:7,
9:5,
0:6
}
def count(time):
total = 0
for t in time:
total += cnt[t]
return total
def solve(time):
if count(time) == N:
return time
else:
solve(time+1)
| [
"oizys18@gmail.com"
] | oizys18@gmail.com |
38528b4c400b8e661dc7ca3250f4afcfea523742 | f1e9f557c5d724dcabbfa17903de93bb82767e35 | /py_ffmpeg_snapshot_mp4.py | 260b61a4ce66d6d9b5ea7477b98639ad3e9ab092 | [] | no_license | gregsheu/python | e5e9ff83dc0ce90541591e726c940e8a1f71a3d4 | 4a77295d58a522974ee85b201ab99cdbe410fd08 | refs/heads/master | 2023-08-18T08:30:15.611727 | 2023-08-08T06:55:44 | 2023-08-08T06:55:44 | 181,270,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,075 | py | import requests
import urllib
import ffmpeg
import os
import time
import threading
from requests.auth import HTTPDigestAuth
def convert_jpgmp4(ip):
t = 0
k = 'KingSolarman'
while t < 6:
t = t + 1
for i in range(1, 5):
try:
#r1 = ffmpeg.input('rtsp://admin:admin12345@' + str(v['ip']) + ':554/cam/realmonitor?channel=%s&subtype=0' % i, rtsp_transport = 'tcp')
r1 = ffmpeg.input('rtsp://admin:admin12345@%s:554/cam/realmonitor?channel=%s&subtype=0' % (ip, i), rtsp_transport = 'tcp')
(
ffmpeg
.output(r1, k + '%s-%s.jpg' % (t, i), vframes=1)
.overwrite_output()
.run()
)
except:
print('error on %s cam' % i)
#for t in range(1, 5):
for i in range(1, 5):
(
ffmpeg.input('./KingSolarman*-%s.jpg' % i, pattern_type='glob', framerate=1)
#.crop(0, 0, 1280, 720)
.filter('scale',1280, 720)
.filter('setsar', sar=1/1)
.drawtext('King Solarman Inc.', 10, 400, fontcolor='red', fontsize=48, fontfile='/usr/share/fonts/truetype/freefont/FreeSansBold.ttf')
.output('ks-gif-%s.mp4' % i, t=5, r=29.97)
.overwrite_output()
.run()
)
v1 = ffmpeg.input('ks-gif-%s.mp4' % i)
(
ffmpeg
.concat(
v1.setpts('PTS-STARTPTS'),
#a1.filter('atrim', 45, 55).filter('asetpts', 'PTS-STARTPTS').filter('volume', 0.8),
v=1,
a=0,
)
.output('KingSolarmanTW-%s.mp4' % i)
.overwrite_output()
.run()
)
def make_tile(ip):
in1 = None
in2 = None
in3 = None
in4 = None
k = 'KingSolarmanFront'
for i in range(1, 5):
try:
#r1 = ffmpeg.input('rtsp://admin:admin12345@' + str(v['ip']) + ':554/cam/realmonitor?channel=%s&subtype=0' % i, rtsp_transport = 'tcp')
r1 = ffmpeg.input('rtsp://admin:admin12345@%s:554/cam/realmonitor?channel=%s&subtype=0' % (ip, i), rtsp_transport = 'tcp')
(
ffmpeg
.output(r1, k + '-%s.jpg' % i, vframes=1)
.overwrite_output()
.run()
)
except:
print('error on %s cam' % i)
in1 = ffmpeg.input(k + '-1.jpg')
in2 = ffmpeg.input(k + '-2.jpg')
in3 = ffmpeg.input(k + '-3.jpg')
in4 = ffmpeg.input(k + '-4.jpg')
in5 = ffmpeg.input(k + '-t1.jpg')
in6 = ffmpeg.input(k + '-t2.jpg')
(
ffmpeg
.concat(
in1.filter('scale', '1280', '720'),
in2.filter('scale', '1280', '720'),
)
.filter('tile', '1x2')
.filter('setsar', '16', '9')
.output(k + '-t1.jpg')
.overwrite_output()
.run()
)
(
ffmpeg
.concat(
in3.filter('scale', '1280', '720'),
in4.filter('scale', '1280', '720'),
)
.filter('tile', '1x2')
.filter('setsar', '16', '9')
.output(k + '-t2.jpg')
.overwrite_output()
.run()
)
(
ffmpeg
.concat(
in5,
in6,
)
.filter('tile', '2x1')
.filter('setsar', '16', '9')
.output(k + '-tile.jpg')
.overwrite_output()
.run()
)
def convert_dav(ip, i, eventstart, eventend):
newtime = eventstart.replace(' ', '')
newtime = newtime.replace(':', '')
payload = {'action': 'startLoad', 'channel': i, 'startTime': eventstart, 'endTime': eventend, 'subtype': '0'}
param = urllib.parse.urlencode(payload, quote_via=urllib.parse.quote)
video_url = 'http://%s/cgi-bin/loadfile.cgi?' % ip
user = 'admin'
password = 'admin12345'
video_resp = requests.get(video_url, params=param, auth=HTTPDigestAuth(user, password), stream=True)
with open('tripvideo-%s-%s.dav' % (i, newtime), 'wb') as f:
f.write(video_resp.content)
tripvideo = 'tripvideo-%s-%s.dav' % (i, newtime)
r = ffmpeg.input(tripvideo)
(
ffmpeg
.output(r, tripvideo[0:-4]+'.mp4', format='mp4')
.overwrite_output()
.run()
)
def main():
ip = '166.149.88.121'
#ip = '192.168.1.109'
#make_tile(ip)
#convert_jpgmp4(ip)
t = 0
cur_time = time.time()
eventstart = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(cur_time-2))
eventend = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(cur_time+5))
curtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(cur_time))
print(curtime)
print(eventstart)
print(eventend)
time.sleep(11)
convert_dav(ip, 1, eventstart, eventend)
convert_dav(ip, 2, eventstart, eventend)
convert_dav(ip, 3, eventstart, eventend)
convert_dav(ip, 4, eventstart, eventend)
#t1 = threading.Thread(target=get_con_dav, args=(ip, 1, eventstart, eventend,))
#t1.start()
if __name__ == '__main__':
main()
| [
"greg@mymacpro.bluefinops.io"
] | greg@mymacpro.bluefinops.io |
034aa04bd4df8e40af2fcc06330674f531b2564b | 1061216c2c33c1ed4ffb33e6211565575957e48f | /python-blueplanet/app/openapi_server/models/realm_domain.py | 5d2b636ee80ac8d5bad11d57f9865d8f0818a6ce | [] | no_license | MSurfer20/test2 | be9532f54839e8f58b60a8e4587348c2810ecdb9 | 13b35d72f33302fa532aea189e8f532272f1f799 | refs/heads/main | 2023-07-03T04:19:57.548080 | 2021-08-11T19:16:42 | 2021-08-11T19:16:42 | 393,920,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,622 | py | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from app.openapi_server.models.base_model_ import Model
from openapi_server import util
class RealmDomain(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, domain: str=None, allow_subdomains: bool=None): # noqa: E501
"""RealmDomain - a model defined in Swagger
:param domain: The domain of this RealmDomain. # noqa: E501
:type domain: str
:param allow_subdomains: The allow_subdomains of this RealmDomain. # noqa: E501
:type allow_subdomains: bool
"""
self.swagger_types = {
'domain': str,
'allow_subdomains': bool
}
self.attribute_map = {
'domain': 'domain',
'allow_subdomains': 'allow_subdomains'
}
self._domain = domain
self._allow_subdomains = allow_subdomains
@classmethod
def from_dict(cls, dikt) -> 'RealmDomain':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The RealmDomain of this RealmDomain. # noqa: E501
:rtype: RealmDomain
"""
return util.deserialize_model(dikt, cls)
@property
def domain(self) -> str:
"""Gets the domain of this RealmDomain.
The new allowed domain. # noqa: E501
:return: The domain of this RealmDomain.
:rtype: str
"""
return self._domain
@domain.setter
def domain(self, domain: str):
"""Sets the domain of this RealmDomain.
The new allowed domain. # noqa: E501
:param domain: The domain of this RealmDomain.
:type domain: str
"""
self._domain = domain
@property
def allow_subdomains(self) -> bool:
"""Gets the allow_subdomains of this RealmDomain.
Whether subdomains are allowed for this domain. # noqa: E501
:return: The allow_subdomains of this RealmDomain.
:rtype: bool
"""
return self._allow_subdomains
@allow_subdomains.setter
def allow_subdomains(self, allow_subdomains: bool):
"""Sets the allow_subdomains of this RealmDomain.
Whether subdomains are allowed for this domain. # noqa: E501
:param allow_subdomains: The allow_subdomains of this RealmDomain.
:type allow_subdomains: bool
"""
self._allow_subdomains = allow_subdomains
| [
"suyash.mathur@research.iiit.ac.in"
] | suyash.mathur@research.iiit.ac.in |
853844b8902491afe57cebbbb509d1c6a63f6d96 | 8a1144dd38388992c7e35a4cc84002e381f2cf1f | /python/django_fundamentals/login_and_registration/apps/login_registration/migrations/0001_initial.py | 9ea1b5fcdbc6512bea58f815ae55408d3fa91aa6 | [] | no_license | vin792/dojo_assignments | 18472e868610bacbd0b5141a5322628f4afefb5b | 449b752f92df224285bfd5d03901a3692a98562e | refs/heads/master | 2021-01-20T00:20:09.896742 | 2017-05-26T17:37:09 | 2017-05-26T17:37:09 | 82,735,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 20:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('hash_password', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
]
| [
"vin792@gmail.com"
] | vin792@gmail.com |
21816f17050910e9e18ccd56d0411ceab4b65bd8 | a8139ccd50a27861d3c5a4168fd0e4b351c0a514 | /material/code/advanced_oop_and_python_topics/4_ManagedAttributeDemo/ManagedAttributeDemo2.py | 79ffd4b1f17b51ffde72f399ac413b1c616988d1 | [] | no_license | shambhand/pythontraining | a124aa1485c3ce0e589fc2cd93c1e991746432e4 | 24dd923e2b2c07c70500775e3665e2a527240329 | refs/heads/master | 2021-05-17T22:54:45.331127 | 2019-01-11T03:12:59 | 2019-01-11T03:12:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | # Attribute properties are inherited by the
# derived class
import sys
class Person:
def __init__(self, name):
self._name = name
def getName(self):
print('fetch...')
return self._name
def setName(self, value):
print('change...')
self._name = value
def delName(self):
print('remove...')
del self._name
name = property(getName, setName, delName, "name property docs")
class Employee (Person):
pass
def main ():
bob = Employee('Bob Smith')
print(bob.name)
bob.name = 'Robert Smith'
print(bob.name)
del bob.name
print('-'*20)
sue = Employee('Sue Jones')
print(sue.name)
print(Person.name.__doc__)
sys.exit (0)
main ()
| [
"amit2766@gmail.com"
] | amit2766@gmail.com |
4ffaf02529c48182b78088a71725d0fd48b1e682 | b945118b1344f05921af1f9974e5d6e370967253 | /cloudflu/r0.5/cloudflu/amazon/apps/reservation_rm.py | e97806bf40cf663f3ed7a01019d84af190e32399 | [
"Apache-2.0"
] | permissive | asimurzin/balloon-testing | aab85316d2a63c3a497b5afe46467c78c17a2691 | 35d72685a319fa66ee7006841b75c54bd62434e0 | refs/heads/master | 2016-09-06T20:16:59.157112 | 2011-07-26T11:51:49 | 2011-07-26T11:51:49 | 989,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,803 | py | #!/usr/bin/env python
#--------------------------------------------------------------------------------------
## Copyright 2010 Alexey Petrov
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
## See http://sourceforge.net/apps/mediawiki/cloudflu
##
## Author : Alexey Petrov
##
#--------------------------------------------------------------------------------------
"""
Deletes the appointed Amazon EC2 reservation and release all its incorporated resources
"""
#--------------------------------------------------------------------------------------
import cloudflu.common as common
from cloudflu.common import print_d, print_e, sh_command
from cloudflu.common import Timer, WorkerPool
from cloudflu import amazon
from cloudflu.amazon import ec2
#--------------------------------------------------------------------------------------
def execute( the_ec2_conn, the_cluster_id ) :
try:
a_reservation = ec2.use.get_reservation( the_ec2_conn, the_cluster_id )
print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) )
except Exception, exc:
print_e( '%s\n' % exc, False )
return
a_security_group = None
try:
a_security_group = ec2.use.get_security_group( the_ec2_conn, a_reservation )
print_d( "< %r > : %s\n" % ( a_security_group, a_security_group.rules ) )
except:
from cloudflu.common import print_traceback
print_traceback()
pass
an_instance = a_reservation.instances[ 0 ]
an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name )
for an_instance in a_reservation.instances:
an_instance.terminate()
pass
try:
the_ec2_conn.delete_key_pair( an_instance.key_name )
import os; os.remove( an_identity_file )
except:
from cloudflu.common import print_traceback
print_traceback()
pass
try:
the_ec2_conn.delete_security_group( a_security_group.name )
except:
from cloudflu.common import print_traceback
print_traceback()
pass
print_d( '%s ' % an_instance.update() )
while an_instance.update() != 'terminated' :
print_d( '.' )
continue
print_d( ' %s\n' % an_instance.update() )
pass
#--------------------------------------------------------------------------------------
def main() :
#----------------------- Defining utility command-line interface -------------------------
an_usage_description = "%prog"
from reservation_rm_options import usage_description as usage_description_options
an_usage_description += usage_description_options()
from cloudflu import VERSION
a_version = "%s" % VERSION
from optparse import IndentedHelpFormatter
a_help_formatter = IndentedHelpFormatter( width = 127 )
from optparse import OptionParser
an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )
#----------------------- Definition of the command line arguments ------------------------
ec2.use.options.add( an_option_parser )
amazon.security_options.add( an_option_parser )
common.options.add( an_option_parser )
#------------------ Extracting and verifying command-line arguments ----------------------
an_options, an_args = an_option_parser.parse_args()
common.options.extract( an_option_parser )
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )
from reservation_rm_options import extract as extract_options
a_cluster_ids = extract_options( an_option_parser )
from cloudflu.preferences import get
a_cluster_location = get( 'amazon.cluster.location' )
print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
import sys; an_engine = sys.argv[ 0 ]
from reservation_rm_options import compose as compose_options
a_call = "%s %s" % ( an_engine, compose_options( a_cluster_ids ) )
print_d( a_call + '\n' )
print_d( "\n----------------------- Running actual functionality ----------------------\n" )
a_spent_time = Timer()
an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )
a_worker_pool = WorkerPool( len( a_cluster_ids ) )
for a_cluster_id in a_cluster_ids :
a_worker_pool.charge( execute, ( an_ec2_conn, a_cluster_id ) )
pass
a_worker_pool.shutdown()
a_worker_pool.join()
print_d( "a_spent_time = %s, sec\n" % a_spent_time )
print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
# There are no - it is a terminal step
print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
print_d( a_call + '\n' )
print_d( "\n-------------------------------------- OK ---------------------------------\n" )
pass
#------------------------------------------------------------------------------------------
if __name__ == '__main__' :
main()
pass
#------------------------------------------------------------------------------------------
| [
"asimurzin@gmail.com"
] | asimurzin@gmail.com |
ee05403c9afac4b0de051068d81af1443f0da820 | 9b0babcf6849e11b3f208e702d2b36fd049f63f2 | /Nov03/04Pattern638.py | 3c7b60988e41f0991146ca7c16ac43e3bf4cb094 | [] | no_license | nish235/PythonPrograms | d5ec56647d06136aef9501d732e7da32e82f3947 | f657c1263098665a50b1b1fcbfc49bea6ce7af6f | refs/heads/main | 2023-03-18T23:02:34.974009 | 2021-03-13T06:18:00 | 2021-03-13T06:18:00 | 302,834,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | n = 5
a = n
b = n
for x in range(1, n+1):
for y in range(1, n * 2 + 1):
if y == a or y == b:
print(chr(a+64), end="")
else:
print(" ", end="")
a -= 1
b += 1
print()
| [
"noreply@github.com"
] | nish235.noreply@github.com |
7b52b03525110b80daf809be6771e62af13a65de | b1ddcf4bac9ca603a7a2333912eb29da8bf2cb7b | /ReadOnlyModelViewset/ReadOnlyModelViewset/wsgi.py | f5e6d48aa8a7cf1cd91df179630338e674686911 | [] | no_license | sankethalake/django_practice | e9477ae0beee4923cd6758cc6d37517ea5979610 | 9877304f0c6415ae8979e5cc13a49559155fdd9d | refs/heads/main | 2023-07-07T07:07:35.598657 | 2021-08-14T06:26:23 | 2021-08-14T06:26:23 | 389,917,128 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | """
WSGI config for ReadOnlyModelViewset project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ReadOnlyModelViewset.settings')
application = get_wsgi_application()
| [
"sankethalake@gmail.com"
] | sankethalake@gmail.com |
eef03c572aa4c91aa3ca9b0b876c4ab34a9a2947 | 677ccdf2a0459179f03ef543c83d52f3b3174b98 | /django_app/wishlist/apis/wishlist.py | 060cb3d166eb89bc386295e20f2cbc039960e8f1 | [] | no_license | wps5-airbnb/airbnb | 6c4de1d5d6e7f9b2bd9190a9a84b8e19e845b761 | 68068def521bd911fbc462cb10eae23aa7b9b436 | refs/heads/master | 2021-01-01T16:18:43.854381 | 2018-10-29T06:22:18 | 2018-10-29T06:22:18 | 97,808,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,374 | py | from rest_framework import authentication, status
from rest_framework.response import Response
from rest_framework.views import APIView
from house.models import House
from house.serializers.house import HouseSerializer
from ..models import Wishlist
__all__ = [
'WishlistView',
]
class WishlistView(APIView):
authentication_classes = (authentication.TokenAuthentication,)
def post(self, request):
user = request.user
house_pk = int(request.query_params['house'])
house = House.objects.get(pk=house_pk)
instance, created = Wishlist.objects.get_or_create(
liker=user,
house=house,
)
if not created:
instance.delete()
return Response(
'{}유저가 pk={}인 하우스의 좋아요를 취소하였습니다.'.format(user.username, house.pk),
status=status.HTTP_200_OK,
)
else:
return Response(
'{}유저가 pk={}인 하우스를 좋아합니다.'.format(user.username, house.pk),
status=status.HTTP_201_CREATED,
)
def get(self, request):
user = request.user
like_houses = user.get_wishlist.order_by('wishlist__created_date').reverse()
serializer = HouseSerializer(like_houses, many=True)
return Response(serializer.data)
| [
"gaius827@gmail.com"
] | gaius827@gmail.com |
3b8f928c5a21247a282fb340146ff95376ee343a | ad05a747dc8f23822c51b02486b29c5cd2001369 | /releases/cws_toolbox/cws_tbx_1.4.6/cws_toolbox/transform_lidar/remove_blanks.py | 107baf1cd1f503a0dda18928dbbd91e908e0ea23 | [] | no_license | nickrsan/sierra-code-library | 9e2bcb51135b2f33c1ff4230ec21c0f30ab19c2c | 91abf016b63da901d4cc182f4761fe1d7f46cfe4 | refs/heads/master | 2022-04-11T10:53:17.439366 | 2020-03-24T00:32:06 | 2020-03-24T00:32:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,043 | py | import sys
import arcpy
from cws_toolbox.transform_lidar.cwslidar import *
class func_wrapper:
def __init__(self):
pass
def processor(self,data,mm,output = None): # mm is the minmax object
output.write("x y z i r g b\n")
for line in data:
match = re.search('^(\d*)\s*$',line) # if this line is just the digits
if match is not None and match.group(0) is not None: # if it matches
continue # we don't care about this line
else:
try:
newmatch = re.search('^\d+\.?\d*\s+\d+\.?\d*\s+\d+\.?\d*\s+(-?\d+)\s+',line) # find the fourth group of numbers and include the sign
if newmatch is not None and newmatch.group(1) is not None:
mm.track(newmatch.group(1))
else:
log("Problem matching line for intensity minmax")
except:
log("Problem reading and scaling intensity")
output.write("%s" % line)
log(mm.report(return_text = True))
output_dir = arcpy.GetParameterAsText(1)
setup(output_dir)
blanker = func_wrapper()
process_data(blanker)
shutdown() | [
"nickrsan@users.noreply.github.com"
] | nickrsan@users.noreply.github.com |
95f0622de154b99f837e5337ee28442eac324b12 | bb7909c8b1906d43836d798e3bf6e5d3aa12a2ea | /groovebox/utils/__init__.py | 2d84afcc8d1cf4cd0542eec3dbb20bca74d8ac5c | [] | no_license | thegroovebox/api.groovebox.org | 9e55002f22196ae4a94b05d3e6297de0d9d33c4f | 99701d24e686b5d2d0c339e5dbe6cb74a939b3d9 | refs/heads/master | 2021-01-22T14:11:44.163885 | 2016-07-30T07:47:21 | 2016-07-30T07:47:21 | 38,858,649 | 4 | 2 | null | 2016-07-30T07:47:21 | 2015-07-10T03:22:56 | Python | UTF-8 | Python | false | false | 1,795 | py | #!/usr/bin/env python
# -*-coding: utf-8 -*-
"""
utils
~~~~~
Various utilities (not groovebox specific)
:copyright: (c) 2015 by Mek
:license: see LICENSE for more details.
"""
from datetime import datetime, date
import json
import string
def subdict(d, keys, required=True):
"""Create a dictionary containing only `keys`
move to utils
"""
if required:
return dict([(k, d[k]) for k in keys])
return dict([(k, d[k]) for k in keys if k in d])
def time2sec(t):
t = str(t)
if ":" in t:
m, s = t.split(":")
return (60 * int(m)) + int(s)
if "." in t:
return t.split(".")[0]
return t
class DatetimeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
elif isinstance(obj, date):
return obj.strftime('%Y-%m-%d')
return json.JSONEncoder.default(self, obj)
class Numcoder(object):
ALPHABET = string.ascii_uppercase + string.ascii_lowercase + \
string.digits
ALPHABET_REVERSE = dict((c, i) for (i, c) in enumerate(ALPHABET))
BASE = len(ALPHABET)
@classmethod
def encode_many(cls, *ns, delim="$"):
return delim.join([str(cls.encode(n)) for n in ns])
@classmethod
def encode(cls, n):
s = []
while True:
n, r = divmod(n, cls.BASE)
s.append(cls.ALPHABET[r])
if n == 0: break
return ''.join(reversed(s))
@classmethod
def decode_many(cls, n, delim='$'):
ns = n.split(delim)
return [cls.decode(n) for n in ns]
@classmethod
def decode(cls, s):
n = 0
for c in s:
n = n * cls.BASE + cls.ALPHABET_REVERSE[c]
return n
| [
"michael.karpeles@gmail.com"
] | michael.karpeles@gmail.com |
32d7879356d81e175fd2276ed3fce8cab3d00e97 | 34ed44cdbbb641c6ace37b5caa05a850b5928980 | /impression_client/backends.py | 1ac8234b840f4300a722991080bc09b743d9a274 | [
"MIT"
] | permissive | gregschmit/django-impression-client | 627dfe89a2053e9965dc7e3ad8069e16dad55e4f | a2f4328024a67865eccaeff79567320842ab5d5c | refs/heads/master | 2020-12-01T13:35:35.092702 | 2019-12-29T02:17:15 | 2019-12-29T02:17:15 | 230,642,917 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,287 | py | """
This module implements our remote email backend.
"""
import requests
from django.core.mail.backends.base import BaseEmailBackend
from .settings import get_setting
class RemoteEmailBackend(BaseEmailBackend):
"""
This backend sends a RESTful request to the target Impression server, and allows
that remote installation of Impression to send the email(s). This backend will send
the remote server the raw from/to/cc/bcc fields, however it's up to the remote
service if it will trust you enough to use these fields.
"""
@staticmethod
def send_message(message):
"""
Send a RESTful request to the target impression server and return the response.
"""
# get target/token
try:
from .models import RemoteImpressionServer
target, token = RemoteImpressionServer.get_target_and_token()
except RuntimeError:
target = get_setting("IMPRESSION_CLIENT_DEFAULT_TARGET")
token = get_setting("IMPRESSION_CLIENT_DEFAULT_TOKEN")
# build headers
headers = {"Authorization": "Token {}".format(token)}
# determine if we should interpret the first address in "to" as the service
if message.to and not "@" in message.to[0]:
service_name = message.to[0]
to_emails = message.to[1:]
else:
service_name = get_setting("IMPRESSION_CLIENT_DEFAULT_SERVICE")
to_emails = message.to
# send the request
payload = {
"service_name": service_name,
"subject": message.subject,
"body": message.body,
"from": message.from_email,
"to": to_emails or [],
}
if message.cc:
payload["cc"] = message.cc
if message.bcc:
payload["bcc"] = message.bcc
return requests.post(target, data=payload, headers=headers)
def send_messages(self, email_messages):
"""
For each email message, send RESTful request to the remote server and return the
number which returned non-error response codes.
"""
count = 0
for msg in email_messages:
response = self.send_message(msg)
count += response.ok
return count
| [
"schmitgreg@gmail.com"
] | schmitgreg@gmail.com |
641d4a77c65b117c530880486b96976994997758 | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/core/framework/variable_pb2.py | d039b2253deae01739d32df4de63b92afd561ed4 | [] | no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:44f055eb2279cd13afd13956996ab45ffe6357c2bc77bbe30d0d98318245f426
size 11678
| [
"business030301@gmail.com"
] | business030301@gmail.com |
d09592cff798e5f6b30194c839b285354a53054a | 28a124b6a2f22a53af3b6bb754e77af88b4138e1 | /DJANGO/DjangoRestFramework/intro/comment/api/views.py | ec8c91dabbf7cefb14cd4267f581b6dfe6440a51 | [] | no_license | mebaysan/LearningKitforBeginners-Python | f7c6668a9978b52cad6cc2b969990d7bbfedc376 | 9e1a47fb14b3d81c5b009b74432902090e213085 | refs/heads/master | 2022-12-21T03:12:19.892857 | 2021-06-22T11:58:27 | 2021-06-22T11:58:27 | 173,840,726 | 18 | 4 | null | 2022-12-10T03:00:22 | 2019-03-04T23:56:27 | Python | UTF-8 | Python | false | false | 1,924 | py | from rest_framework.generics import CreateAPIView, ListAPIView, DestroyAPIView, RetrieveUpdateAPIView
from comment.models import Comment
from comment.api.serializers import CommentCreateSerializer, CommentListSerializer, CommentDeleteUpdateSerializer
from comment.api.permissions import IsOwner
from rest_framework.permissions import IsAuthenticated
from comment.api.paginators import CommentPaginator
from rest_framework.mixins import UpdateModelMixin, DestroyModelMixin
class CommentCreateAPIView(CreateAPIView):
queryset = Comment.objects.all()
serializer_class = CommentCreateSerializer
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class CommentListAPIView(ListAPIView):
# queryset = Comment.objects.all()
serializer_class = CommentListSerializer
pagination_class = CommentPaginator
def get_queryset(self):
queryset = Comment.objects.filter(parent=None)
query = self.request.GET.get('q')
if query:
queryset = Comment.objects.filter(post=query)
return queryset
# class CommentDeleteAPIView(DestroyAPIView, UpdateModelMixin):
class CommentDeleteAPIView(DestroyAPIView):
queryset = Comment.objects.all()
serializer_class = CommentDeleteUpdateSerializer
lookup_field = 'pk'
permission_classes = [IsAuthenticated, IsOwner]
# def put(self, request, *args, **kwargs): # bu view'a put isteği gelirse
# return self.update(request, *args, **kwargs)
class CommentUpdateAPIView(RetrieveUpdateAPIView, DestroyModelMixin): # bu view'da hem update hem delete yapabiliriz
queryset = Comment.objects.all()
serializer_class = CommentDeleteUpdateSerializer
lookup_field = 'pk'
permission_classes = [IsAuthenticated, IsOwner]
def delete(self, request, *args, **kwargs): # bu view'da hem update hem delete yapabiliriz
self.destroy(request, *args, **kwargs)
| [
"menesbaysan@gmail.com"
] | menesbaysan@gmail.com |
eb9f93c41f17e6c67016513077e632e91c2443b2 | 8ee5dcbdbd407eb5f294d430813b16eca22f571c | /data/HW5/hw5_308.py | 2c3b780ef113894c2152aaba21d6a3219530477f | [] | no_license | MAPLE-Robot-Subgoaling/IPT | 5e60e2ee4d0a5688bc8711ceed953e76cd2ad5d9 | f512ea3324bfdceff8df63b4c7134b5fcbb0514e | refs/heads/master | 2021-01-11T12:31:00.939051 | 2018-08-13T23:24:19 | 2018-08-13T23:24:19 | 79,373,489 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 622 | py | def main():
width = int(input("Please enter the width of the box: "))
height = int(input("Please enter the height of the box: "))
outline = str(input("Please enter a symbol for the box outline: "))
fill = str(input("Please enter a symbol for the box fill: "))
for i in range(height):
line = ""
for k in range(width):
if i == 0 or i == height-1:
line = line + outline
else:
if k == 0 or k == width-1:
line = line + outline
else:
line = line + fill
print(line)
main()
| [
"mneary1@umbc.edu"
] | mneary1@umbc.edu |
c56630841c8602bde83bb5f0a0ab2c6ffcd7ceb2 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/pa1/sample/def_func_nonlocal-67.py | 097609d7e0d61bbab6660375189cf84a37495645 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 172 | py |
def foo(x:int) -> bool:
a:int = 0
b:int = 1
def bar(y: int) -> int:
nonlocal a
a = 2
return y
return bar($Parameters) > a
foo(1)
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
75f658448722da512c74e14e4e1266419cd6b885 | b2b9be47ce8c2cd0985eb39fa4489e149c78a085 | /nintendo/common/http.py | 3562997ac828fce35d724486d11d1eb836251c34 | [
"MIT"
] | permissive | SmmServer/NintendoClients | 9e15801f73d35877b5e3a47830c2a5f2778547ad | e1d2fec34e460cd87330a8cc886e54479d701469 | refs/heads/master | 2022-09-10T13:06:35.800799 | 2019-08-29T17:23:46 | 2019-08-29T17:23:46 | 218,143,700 | 0 | 3 | null | 2019-10-28T20:56:14 | 2019-10-28T20:56:13 | null | UTF-8 | Python | false | false | 5,291 | py |
from . import socket, signal, util, scheduler
import logging
logger = logging.getLogger(__name__)
class HTTPFormData:
def __init__(self):
self.fields = {}
def parse(self, data):
fields = data.split("&")
for field in fields:
if not "=" in field:
logger.warning("Malformed form parameter")
return False
key, value = field.split("=", 1)
self.fields[key] = value
return True
def __contains__(self, item):
return item in self.fields
def __getitem__(self, item):
return self.fields[item]
class HTTPRequest:
def __init__(self, client):
self.client = client
self.method = None
self.path = None
self.version = None
self.headers = util.CaseInsensitiveDict()
self.body = ""
self.params = HTTPFormData()
self.form = HTTPFormData()
def process(self):
if "?" in self.path:
self.path, params = self.path.split("?", 1)
if not self.params.parse(params):
return False
if self.headers.get("Content-Type") == "application/x-www-form-urlencoded":
if not self.form.parse(self.body):
return False
return True
RESPONSE_TEMPLATE = "%s %i %s\r\n%s\r\n"
class HTTPResponse:
status_names = {
200: "OK",
400: "Bad Request",
401: "Unauthorized",
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed"
}
def __init__(self, status):
self.version = "HTTP/1.1"
self.status = status
self.headers = util.CaseInsensitiveDict()
self.body = ""
def encode(self):
self.headers["Content-Length"] = len(self.body)
headers = ""
for key, value in self.headers.items():
headers += "%s: %s\r\n" %(key, value)
header = RESPONSE_TEMPLATE %(
self.version, self.status,
self.status_names[self.status],
headers
)
return (header + self.body).encode("ascii")
class HTTPState:
RESULT_OK = 0
RESULT_INCOMPLETE = 1
RESULT_ERROR = 2
def __init__(self, socket):
self.socket = socket
self.buffer = b""
self.state = self.state_header
self.event = scheduler.add_socket(self.handle_recv, socket)
self.request = HTTPRequest(socket)
self.message_event = signal.Signal()
def handle_recv(self, data):
if not data:
scheduler.remove(self.event)
return
self.buffer += data
result = self.state()
while self.buffer and result == self.RESULT_OK:
result = self.state()
if result == self.RESULT_ERROR:
logger.warning("Failed to parse HTTP request")
response = HTTPResponse(400)
self.socket.send(response.encode())
scheduler.remove(self.event)
self.socket.close()
def finish(self):
if not self.request.process():
return self.RESULT_ERROR
self.message_event(self.request)
self.request = HTTPRequest(self.socket)
self.state = self.state_header
return self.RESULT_OK
def handle_header(self, data):
try:
lines = data.decode("ascii").splitlines()
except UnicodeDecodeError:
logger.warning("Failed to decode HTTP request")
return self.RESULT_ERROR
fields = lines[0].split()
if len(fields) != 3:
logger.warning("Invalid HTTP request")
return self.RESULT_ERROR
self.request.method = fields[0]
self.request.path = fields[1]
self.request.version = fields[2]
for header in lines[1:]:
if not ": " in header:
logger.warning("Invalid HTTP request header")
return self.RESULT_ERROR
key, value = header.split(": ", 1)
self.request.headers[key.lower()] = value
if "Content-Length" in self.request.headers:
if not util.is_numeric(self.request.headers["Content-Length"]):
logger.warning("Invalid Content-Length header")
return self.RESULT_ERROR
self.state = self.state_body
else:
return self.finish()
return self.RESULT_OK
def state_header(self):
if b"\r\n\r\n" in self.buffer:
header, self.buffer = self.buffer.split(b"\r\n\r\n", 1)
return self.handle_header(header)
return self.RESULT_INCOMPLETE
def state_body(self):
length = int(self.request.headers["Content-Length"])
if len(self.buffer) < length:
return self.RESULT_INCOMPLETE
try:
self.request.body = self.buffer[:length].decode("ascii")
except UnicodeDecodeError:
logger.warning("Failed to decode HTTP request body")
return self.RESULT_ERROR
self.buffer = self.buffer[length:]
return self.finish()
class HTTPServer:
def __init__(self, ssl, server=None):
self.ssl = ssl
self.server = server
if not self.server:
if ssl:
self.server = socket.SocketServer(socket.TYPE_SSL)
else:
self.server = socket.SocketServer(socket.TYPE_TCP)
def set_certificate(self, cert, key):
self.server.set_certificate(cert, key)
def start(self, host, port):
logger.info("Starting HTTP server at %s:%i", host, port)
self.server.start(host, port)
scheduler.add_server(self.handle_conn, self.server)
def handle_conn(self, socket):
address = socket.remote_address()
logger.debug("New HTTP connection: %s:%i", address[0], address[1])
state = HTTPState(socket)
state.message_event.add(self.handle_req)
def handle_req(self, request):
logger.debug("Received HTTP request: %s %s", request.method, request.path)
response = self.handle(request)
logger.debug("Sending HTTP response (%i)", response.status)
request.client.send(response.encode())
def handle(self, request):
pass
| [
"ymarchand@me.com"
] | ymarchand@me.com |
3f0d5dd795c63efece0b18782e3ac5fed79b6dfd | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.5_rd=0.5_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=9/params.py | c43dddd0986e94512fb896453546bc28545453ac | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | {'cpus': 4,
'duration': 30,
'final_util': '2.516595',
'max_util': '2.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.5',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'GSN-EDF',
'trial': 9,
'utils': 'uni-medium-3'}
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
d05fc8d82d239460c8948bcb2604441baf39c5b5 | 32c56293475f49c6dd1b0f1334756b5ad8763da9 | /google-cloud-sdk/lib/surface/iot/devices/credentials/describe.py | c6e806c9fe1370cae03c45a6ce6d6cc152c4def6 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | bopopescu/socialliteapp | b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494 | 85bb264e273568b5a0408f733b403c56373e2508 | refs/heads/master | 2022-11-20T03:01:47.654498 | 2020-02-01T20:29:43 | 2020-02-01T20:29:43 | 282,403,750 | 0 | 0 | MIT | 2020-07-25T08:31:59 | 2020-07-25T08:31:59 | null | UTF-8 | Python | false | false | 2,028 | py | # -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud iot devices credentials describe` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudiot import devices
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iot import flags
from googlecloudsdk.command_lib.iot import resource_args
from googlecloudsdk.command_lib.iot import util
class Describe(base.DescribeCommand):
"""Show details about a specific device credential."""
detailed_help = {
'EXAMPLES':
"""\
To describe the first credential of a device in region 'us-central1', run:
$ {command} --region=us-central1 --registry=my-registry --device=my-device 0
""",
}
@staticmethod
def Args(parser):
resource_args.AddDeviceResourceArg(parser,
'to which the credential belongs',
positional=False)
flags.GetIndexFlag('credential', 'to describe').AddToParser(parser)
def Run(self, args):
client = devices.DevicesClient()
device_ref = args.CONCEPTS.device.Parse()
credentials = client.Get(device_ref).credentials
try:
return credentials[args.index]
except IndexError:
raise util.BadCredentialIndexError(device_ref.Name(), credentials,
args.index)
| [
"jonathang132298@gmail.com"
] | jonathang132298@gmail.com |
f086c3c60ee2807d697de9ada4ec168df5ca413c | 9eaa2c64a777bd24a3cccd0230da5f81231ef612 | /study/1905/month01/code/Stage1/day15/exercise02.py | 5a783276705d8406f1385d3bdd97764fd8574235 | [
"MIT"
] | permissive | Dython-sky/AID1908 | 4528932f2ca66b844d8a3fcab5ed8bf84d20eb0c | 46cd54a7b36b5f009974f2bbb7005a4ad440ca1a | refs/heads/master | 2022-04-14T12:23:30.426270 | 2020-04-01T18:05:19 | 2020-04-01T18:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | """
练习2:根据生日(年月日),计算活了多少天
思路
年月日 --> 时间
当前时间 --> 出生时间
计算天
"""
import time
def life_days(year, month, day):
"""
根据生日活了多少天
:param year: 年
:param month:月
:param day:日
:return: 活的天数
"""
tuple_time = time.strptime("{}-{}-{}".format(year, month, day), "%Y-%m-%d")
lift_second = time.time() - time.mktime(tuple_time)
return int(lift_second / 60 / 60 // 24)
result = life_days(1998,4,4)
print(result)
| [
"dong_1998_dream@163.com"
] | dong_1998_dream@163.com |
578fcf3f4a42677b26d57fc4c57d9470a1df953a | a37c93503ecb205b16f860664629a41b7c48250e | /initiation/migrations/0003_auto_20200913_0148.py | baad8b906b1e677c71d249c5286e339f7fb44344 | [] | no_license | samuelitwaru/PMS | f86681eaec2e34142447c3e66ab8d0939f4e0dd0 | 7bf7c9c511dd727479020540eef2a86ef561369e | refs/heads/master | 2023-01-10T04:35:16.852447 | 2020-10-22T09:03:48 | 2020-10-22T09:03:48 | 290,486,771 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,491 | py | # Generated by Django 2.2.3 on 2020-09-13 01:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('initiation', '0002_auto_20200912_0001'),
]
operations = [
migrations.RemoveField(
model_name='requisition',
name='file_attachment',
),
migrations.RemoveField(
model_name='specification',
name='requisition',
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('quantity', models.IntegerField()),
('unit_of_measure', models.CharField(max_length=64)),
('unit_cost', models.IntegerField()),
('description', models.CharField(max_length=512)),
('file_attachment', models.FileField(null=True, upload_to='')),
('requisition_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='initiation.Requisition')),
],
),
migrations.AddField(
model_name='specification',
name='item_id',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='initiation.Item'),
preserve_default=False,
),
]
| [
"samuelitwaru@gmail.com"
] | samuelitwaru@gmail.com |
86df83c6ac010a214be5f8c2906b4e742ea73a87 | 5a17226264d9abe7ff99c7da76512551b3df86a5 | /leetcode_algorithm/hacker_rank_easy_grid_challenge.py | 3e9122d701492f26548736c95604398bd5809d3f | [] | no_license | ccubc/DS_self_learning | cfddf6a02926884fae5a0b2798a20a4470674101 | 40725ee6d699c19c25dfbd81363564e15707c448 | refs/heads/small_potato | 2021-06-30T18:35:04.463732 | 2021-02-12T05:59:01 | 2021-02-12T05:59:01 | 219,366,357 | 3 | 2 | null | 2020-11-12T04:34:16 | 2019-11-03T21:13:49 | Jupyter Notebook | UTF-8 | Python | false | false | 1,656 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 10 12:50:41 2020
gridChallenge
Given a square grid of characters in the range ascii[a-z], rearrange elements of each row alphabetically, ascending. Determine if the columns are also in ascending alphabetical order, top to bottom. Return YES if they are or NO if they are not.
For example, given:
a b c
a d e
e f g
The rows are already in alphabetical order. The columns a a e, b d f and c e g are also in alphabetical order, so the answer would be YES. Only elements within the same row can be rearranged. They cannot be moved to a different row.
@author: chengchen
"""
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the gridChallenge function below.
def gridChallenge(grid):
# to number
num = []
num_row = []
res = 'YES'
for row in grid:
for x in row:
num_row.append(ord(x))
num_row.sort()
num.append(num_row)
num_row = []
print(num)
print(len(grid[0]))
print(len(grid)-1)
for i in range(len(grid)-1):
for j in range(len(grid[0])):
print(i)
print(j)
if num[i][j] > num[i+1][j]:
res = 'NO'
return res
return res
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
t = int(input())
for t_itr in range(t):
n = int(input())
grid = []
for _ in range(n):
grid_item = input()
grid.append(grid_item)
result = gridChallenge(grid)
fptr.write(result + '\n')
fptr.close()
| [
"chengchen1215@gmail.com"
] | chengchen1215@gmail.com |
6c60eb36eeb0a4af34726a6095e3e87304254470 | e7300321d37036463fabb4e959d4b22389d3bca8 | /snmpresponder/macro.py | 7c793889037ca6b6e33d4d54f07f1dcf729e02c0 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | etingof/snmpresponder | e9b3d3ad4ff06be599f6984a280692dda389fa99 | 6e69f6168b0993cbc36e83cb44245c8776f7427a | refs/heads/master | 2023-03-27T16:56:02.021452 | 2019-12-15T18:23:27 | 2019-12-15T18:23:27 | 164,206,184 | 7 | 2 | BSD-2-Clause | 2019-01-30T22:49:07 | 2019-01-05T10:50:45 | Python | UTF-8 | Python | false | false | 544 | py | #
# This file is part of snmpresponder software.
#
# Copyright (c) 2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/snmpresponder/license.html
#
def expandMacro(option, context):
for k in context:
pat = '${%s}' % k
if option and '${' in option:
option = option.replace(pat, str(context[k]))
return option
def expandMacros(options, context):
options = list(options)
for idx, option in enumerate(options):
options[idx] = expandMacro(option, context)
return options
| [
"etingof@gmail.com"
] | etingof@gmail.com |
9befa19f962e8ba4171f6cb4701bc4fcf00e3c6f | fc27e1e21ad4891b1d4e769170671da1a4d32ed2 | /aliyun-python-sdk-cbn/aliyunsdkcbn/request/v20170912/CreateCenRequest.py | fb1574c13c4f51a6a4ae5ea1896c4783115ff5c7 | [
"Apache-2.0"
] | permissive | yonzhan2/aliyun-openapi-python-sdk | 3d05f7e83aeb286ad553a6a36c42ce932a1ece3e | e64873f9b528e1a83e3ea27d583f3f7998e7650b | refs/heads/master | 2020-04-11T10:22:48.511973 | 2018-12-13T09:29:21 | 2018-12-13T09:29:21 | 161,712,443 | 1 | 0 | null | 2018-12-14T00:52:39 | 2018-12-14T00:52:39 | null | UTF-8 | Python | false | false | 2,226 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CreateCenRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'CreateCen','cbn')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_Description(self):
return self.get_query_params().get('Description')
def set_Description(self,Description):
self.add_query_param('Description',Description)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | [
"haowei.yao@alibaba-inc.com"
] | haowei.yao@alibaba-inc.com |
ee77ce284052c960131e82d10421401c2060f43e | 594f60b6a536b831d0df38abea7f0ffc0a2fd3cb | /utils/caller_var_name.py | 3ef1f66a6d0fe81c11dad3400acbaf9a1f8b60a5 | [] | no_license | mh70cz/py | 1478439fe939076cca3a30be2f2d29fb4e8a3cd9 | 7fc23f2133624c787e1dd4856322d48251cc6f0e | refs/heads/master | 2022-08-12T06:08:30.720164 | 2022-08-08T23:16:19 | 2022-08-08T23:16:19 | 95,386,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py | """ get the original variable name of a variable passed to a function """
# https://stackoverflow.com/questions/2749796/how-to-get-the-original-variable-name-of-variable-passed-to-a-function
# %%
import traceback
# %%
def get_arg_var_name(var):
stack = traceback.extract_stack()
filename, lineno, function_name, code = stack[-2]
# return filename, lineno, function_name, code
# extract single argument variable name
arg_var_name = code.rpartition("func(")[2].strip(")")
return arg_var_name
foo = "myfoo"
arg_var_name = get_arg_var_name(foo)
print (arg_var_name)
# %%
| [
"mh70@mh70.cz"
] | mh70@mh70.cz |
c58a77cf8ea4664d61a385917b75010f625b5d7e | 0f1c6902dcd6c067a059b4d155d21a885da07a94 | /pre_commit/languages/docker_image.py | 980c6ef3376f6398379306c27f11b5ffa1688a16 | [
"MIT"
] | permissive | andrewhare/pre-commit | 618985e86df8c2db1dde912ca98d9d20137bddea | 1c641b1c28ecc1005f46fdc76db4bbb0f67c82ac | refs/heads/master | 2021-01-06T23:30:11.927884 | 2020-02-18T18:53:53 | 2020-02-18T18:53:53 | 241,513,355 | 0 | 0 | MIT | 2020-02-19T02:20:45 | 2020-02-19T02:20:44 | null | UTF-8 | Python | false | false | 667 | py | from typing import Sequence
from typing import Tuple
from pre_commit.hook import Hook
from pre_commit.languages import helpers
from pre_commit.languages.docker import assert_docker_available
from pre_commit.languages.docker import docker_cmd
ENVIRONMENT_DIR = None
get_default_version = helpers.basic_get_default_version
healthy = helpers.basic_healthy
install_environment = helpers.no_install
def run_hook(
hook: Hook,
file_args: Sequence[str],
color: bool,
) -> Tuple[int, bytes]: # pragma: windows no cover
assert_docker_available()
cmd = docker_cmd() + hook.cmd
return helpers.run_xargs(hook, cmd, file_args, color=color)
| [
"asottile@umich.edu"
] | asottile@umich.edu |
4149bd0580c1c224bd8b0ae991d8f5da06923ea1 | 3123d95637dd6628e7cc58ec1711c965b8aa911c | /dataset_clustering/dataset_clustering/sample_map.py | bd65f8ddc77fe62f2d7ba26b1827233ef81e24f3 | [] | no_license | ConorFWild/pandda_2_tools | d9e27e33d231982dfaf09644a81a0259df332762 | 9b95c3005153a3ae8ba2bcffc699a07a139ca6df | refs/heads/master | 2021-05-24T11:52:30.107356 | 2020-04-06T16:01:28 | 2020-04-06T16:01:28 | 253,547,288 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,020 | py | import numpy as np
import mdc3
from mdc3.types.real_space import interpolate_uniform_grid
class Sampler:
def __init__(self, xmap,
alignment_moving_to_ref,
res_centre_coords,
grid_params=[20, 20, 20],
offset=[10, 10, 10],
):
self.xmap = xmap
self.alignment_moving_to_ref = alignment_moving_to_ref
self.res_centre_coords = res_centre_coords
self.grid_params = grid_params
self.offset = offset
def __call__(self):
return sample_map(self.xmap,
self.alignment_moving_to_ref,
self.res_centre_coords,
self.grid_params,
self.offset,
)
def sample_map(xmap,
alignment_moving_to_ref,
res_centre_coords,
grid_params=[20, 20, 20],
offset=[10, 10, 10],
):
# Align and Get RTop to moving protein frame from alignment
# print("\tres_centre_coords: {}".format(res_centre_coords))
moving_to_ref_translation = alignment_moving_to_ref.rotran[1]
# print("\tmoving_to_ref_translation: {}".format(moving_to_ref_translation))
rotation = alignment_moving_to_ref.rotran[0]
# print("\talignment_moving_to_ref: {}".format(alignment_moving_to_ref))
rotated_offset = np.matmul(rotation, offset)
# print("\trotated_offset: {}".format(rotated_offset))
translation = moving_to_ref_translation - res_centre_coords - rotated_offset
# print("\tSampling around point: {}".format(translation))
# Interpolate NX map in moving protein frame
nxmap = interpolate_uniform_grid(xmap,
translation,
np.transpose(rotation),
grid_params=grid_params,
)
nxmap_data = nxmap.export_numpy()
return nxmap_data
| [
"conor.wild@sky.com"
] | conor.wild@sky.com |
673dd580d8d3ff7e098704d6b2d67bf18eb392a0 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03128/s516473124.py | 57bfe64ffea6df0a31f4b06a5dc26e6f1fbefec3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 853 | py | # ABC 118 D
def resolve():
N, M = map(int, input().split())
A = list(map(int, input().split()))
A.sort(reverse=True)
costs = [0, 2, 5, 5, 4, 5, 6, 3, 7, 6]
dp = [-1] * (N+1)
dp[0] = 0
for i in range(1, N+1):
for a in A:
if i - costs[a] < 0:
continue
dp[i] = max(dp[i-costs[a]] + 1, dp[i])
ans = ""
remain = dp[N]
match = N
minCosts = 10
for a in A:
minCosts = min(minCosts, costs[a])
while match > 0:
for a in A:
if match - costs[a] < 0 or 1<= match - costs[a] < minCosts:
continue
if dp[match-costs[a]] == remain-1:
ans += str(a)
match -= costs[a]
remain -= 1
break
print(ans)
if __name__ == "__main__":
resolve()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
c1eb625c0c3ac788a3bdba3802974dba9f5a8fec | 149baa65329d0e13ae3189b8127d2eff5f5fdf77 | /bot_ws/devel/lib/python2.7/dist-packages/rbt_baxter_msgs/msg/_AssemblyStates.py | a9e882da171b4221de92892bbf8f8e1754454571 | [] | no_license | mtbthebest/imitation_learning | 20b990aa7396fecbe5433c7703f353bf99fa5f2c | 4c08192e31062f69056cc36efffb7a2ce0264244 | refs/heads/master | 2020-05-21T17:05:26.567273 | 2019-09-16T12:20:19 | 2019-09-16T12:20:19 | 186,111,790 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,949 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from rbt_baxter_msgs/AssemblyStates.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import rbt_baxter_msgs.msg
class AssemblyStates(genpy.Message):
_md5sum = "63427318d41dbd2077c105027ad82a2b"
_type = "rbt_baxter_msgs/AssemblyStates"
_has_header = False #flag to mark the presence of a Header object
_full_text = """string[] names
AssemblyState[] states
================================================================================
MSG: rbt_baxter_msgs/AssemblyState
bool ready # true if enabled and ready to operate, e.g., not homing
bool enabled # true if enabled
bool stopped # true if stopped -- e-stop asserted
bool error # true if a component of the assembly has an error
#
# The following are specific to the robot top-level assembly:
uint8 estop_button # One of the following:
uint8 ESTOP_BUTTON_UNPRESSED = 0 # Robot is not stopped and button is not pressed
uint8 ESTOP_BUTTON_PRESSED = 1
uint8 ESTOP_BUTTON_UNKNOWN = 2 # STATE_UNKNOWN when estop was asserted by a non-user source
uint8 ESTOP_BUTTON_RELEASED = 3 # Was pressed, is now known to be released, but robot is still stopped.
#
uint8 estop_source # If stopped is true, the source of the e-stop. One of the following:
uint8 ESTOP_SOURCE_NONE = 0 # e-stop is not asserted
uint8 ESTOP_SOURCE_USER = 1 # e-stop source is user input (the red button)
uint8 ESTOP_SOURCE_UNKNOWN = 2 # e-stop source is unknown
uint8 ESTOP_SOURCE_FAULT = 3 # MotorController asserted e-stop in response to a joint fault
uint8 ESTOP_SOURCE_BRAIN = 4 # MotorController asserted e-stop in response to a lapse of the brain heartbeat
"""
__slots__ = ['names','states']
_slot_types = ['string[]','rbt_baxter_msgs/AssemblyState[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
names,states
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(AssemblyStates, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.names is None:
self.names = []
if self.states is None:
self.states = []
else:
self.names = []
self.states = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
length = len(self.names)
buff.write(_struct_I.pack(length))
for val1 in self.names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *val1))
else:
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.states)
buff.write(_struct_I.pack(length))
for val1 in self.states:
_x = val1
buff.write(_struct_6B.pack(_x.ready, _x.enabled, _x.stopped, _x.error, _x.estop_button, _x.estop_source))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.states is None:
self.states = None
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.states = []
for i in range(0, length):
val1 = rbt_baxter_msgs.msg.AssemblyState()
_x = val1
start = end
end += 6
(_x.ready, _x.enabled, _x.stopped, _x.error, _x.estop_button, _x.estop_source,) = _struct_6B.unpack(str[start:end])
val1.ready = bool(val1.ready)
val1.enabled = bool(val1.enabled)
val1.stopped = bool(val1.stopped)
val1.error = bool(val1.error)
self.states.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
length = len(self.names)
buff.write(_struct_I.pack(length))
for val1 in self.names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *val1))
else:
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.states)
buff.write(_struct_I.pack(length))
for val1 in self.states:
_x = val1
buff.write(_struct_6B.pack(_x.ready, _x.enabled, _x.stopped, _x.error, _x.estop_button, _x.estop_source))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.states is None:
self.states = None
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.states = []
for i in range(0, length):
val1 = rbt_baxter_msgs.msg.AssemblyState()
_x = val1
start = end
end += 6
(_x.ready, _x.enabled, _x.stopped, _x.error, _x.estop_button, _x.estop_source,) = _struct_6B.unpack(str[start:end])
val1.ready = bool(val1.ready)
val1.enabled = bool(val1.enabled)
val1.stopped = bool(val1.stopped)
val1.error = bool(val1.error)
self.states.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_6B = struct.Struct("<6B")
| [
"mtbthebest11@gmail.com"
] | mtbthebest11@gmail.com |
befdbf9d49ecbacc576d8f26b2fa4fffff3aa151 | 82f36fc7f70a63499c9c29d031dc7b07a4e4ab88 | /vi/mappings.py | 9ba29b7af38016b203d195ebaede821278469584 | [
"MIT"
] | permissive | theicfire/Vintageous | f89ca1463b452c6a53eb1bd26595149a23359197 | 43cf37ac2cc2494f6f73102e4157442b5ae56925 | refs/heads/master | 2021-01-17T22:54:34.991987 | 2015-01-23T18:45:44 | 2015-01-23T18:45:44 | 18,224,457 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,431 | py | from Vintageous.vi import utils
from Vintageous.vi.keys import mappings
from Vintageous.vi.keys import seq_to_command
from Vintageous.vi.keys import to_bare_command_name
from Vintageous.vi.keys import KeySequenceTokenizer
from Vintageous.vi.utils import modes
from Vintageous.vi.cmd_base import cmd_types
from Vintageous.vi import variables
_mappings = {
modes.INSERT: {},
modes.NORMAL: {},
modes.VISUAL: {},
modes.VISUAL_LINE: {},
modes.OPERATOR_PENDING: {},
modes.VISUAL_BLOCK: {},
modes.SELECT: {},
}
class mapping_status:
INCOMPLETE = 1
COMPLETE = 2
class Mapping(object):
def __init__(self, head, mapping, tail, status):
self.mapping = mapping
self.head = head
self.tail = tail
self.status = status
@property
def sequence(self):
try:
return self.head + self.tail
except TypeError:
raise ValueError('no mapping found')
class Mappings(object):
def __init__(self, state):
self.state = state
def _get_mapped_seqs(self, mode):
return sorted(_mappings[mode].keys())
def _find_partial_match(self, mode, seq):
return list(x for x in self._get_mapped_seqs(mode)
if x.startswith(seq))
def _find_full_match(self, mode, seq):
partials = self._find_partial_match(mode, seq)
try:
self.state.logger.info("[Mappings] checking partials {0} for {1}".format(partials, seq))
name = list(x for x in partials if x == seq)[0]
# FIXME: Possibly related to #613. We're not returning the view's
# current mode.
return (name, _mappings[mode][name])
except IndexError:
return (None, None)
def expand(self, seq):
pass
def expand_first(self, seq):
head = ''
keys, mapped_to = self._find_full_match(self.state.mode, seq)
if keys:
self.state.logger.info("[Mappings] found full command: {0} -> {1}".format(keys, mapped_to))
return Mapping(seq, mapped_to['name'], seq[len(keys):],
mapping_status.COMPLETE)
for key in KeySequenceTokenizer(seq).iter_tokenize():
head += key
keys, mapped_to = self._find_full_match(self.state.mode, head)
if keys:
self.state.logger.info("[Mappings] found full command: {0} -> {1}".format(keys, mapped_to))
return Mapping(head, mapped_to['name'], seq[len(head):],
mapping_status.COMPLETE)
else:
break
if self._find_partial_match(self.state.mode, seq):
self.state.logger.info("[Mappings] found partial command: {0}".format(seq))
return Mapping(seq, '', '', mapping_status.INCOMPLETE)
return None
# XXX: Provisional. Get rid of this as soon as possible.
def can_be_long_user_mapping(self, key):
full_match = self._find_full_match(self.state.mode, key)
partial_matches = self._find_partial_match(self.state.mode, key)
if partial_matches:
self.state.logger.info("[Mappings] user mapping found: {0} -> {1}".format(key, partial_matches))
return (True, full_match[0])
self.state.logger.info("[Mappings] user mapping not found: {0} -> {1}".format(key, partial_matches))
return (False, True)
# XXX: Provisional. Get rid of this as soon as possible.
def incomplete_user_mapping(self):
(maybe_mapping, complete) = \
self.can_be_long_user_mapping(self.state.partial_sequence)
if maybe_mapping and not complete:
self.state.logger.info("[Mappings] incomplete user mapping {0}".format(self.state.partial_sequence))
return True
def resolve(self, sequence=None, mode=None, check_user_mappings=True):
"""
Looks at the current global state and returns the command mapped to
the available sequence. It may be a 'missing' command.
@sequence
If a @sequence is passed, it is used instead of the global state's.
This is necessary for some commands that aren't name spaces but act
as them (for example, ys from the surround plugin).
@mode
If different than `None`, it will be used instead of the global
state's. This is necessary when we are in operator pending mode
and we receive a new action. By combining the existing action's
name with name of the action just received we could find a new
action.
For example, this is the case of g~~.
"""
# we usually need to look at the partial sequence, but some commands do weird things,
# like ys, which isn't a namespace but behaves as such sometimes.
seq = sequence or self.state.partial_sequence
seq = to_bare_command_name(seq)
# TODO: Use same structure as in mappings (nested dicst).
command = None
if check_user_mappings:
self.state.logger.info('[Mappings] checking user mappings')
# TODO: We should be able to force a mode here too as, below.
command = self.expand_first(seq)
if command:
self.state.logger.info('[Mappings] {0} equals command: {1}'.format(seq, command))
return command
# return {'name': command.mapping, 'type': cmd_types.USER}
else:
self.state.logger.info('[Mappings] looking up >{0}<'.format(seq))
command = seq_to_command(self.state, seq, mode=mode)
self.state.logger.info('[Mappings] got {0}'.format(command))
return command
def add(self, mode, new, target):
new = variables.expand_keys(new)
_mappings[mode][new] = {'name': target, 'type': cmd_types.USER}
def remove(self, mode, new):
try:
del _mappings[mode][new]
except KeyError:
raise KeyError('mapping not found')
def clear(self):
_mappings[modes.NORMAL] = {}
_mappings[modes.VISUAL] = {}
_mappings[modes.VISUAL_LINE] = {}
_mappings[modes.VISUAL_BLOCK] = {}
_mappings[modes.OPERATOR_PENDING] = {}
| [
"guillermo.lopez@outlook.com"
] | guillermo.lopez@outlook.com |
67f6d7067c52342313b9cecb20a8d9484514d1d9 | 3ffeeae8a9a3245d8998d94aa08f680f00056cad | /669.修剪二叉搜索树.py | 1ea81092b2bc054b50373284e38fc7407cf9e7ad | [] | no_license | Ezi4Zy/leetcode | 6e293e5c07a7d8c3e38f9445ff24330134ef6c48 | 9d394cd2862703cfb7a7b505b35deda7450a692e | refs/heads/master | 2022-04-09T14:11:36.957861 | 2022-03-09T10:30:30 | 2022-03-09T10:30:30 | 57,290,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | #
# @lc app=leetcode.cn id=669 lang=python
#
# [669] 修剪二叉搜索树
#
# @lc code=start
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution(object):
def trimBST(self, root, low, high):
"""
:type root: TreeNode
:type low: int
:type high: int
:rtype: TreeNode
"""
if root:
if root.val < low:
root = root.right
return self.trimBST(root, low, high)
if root.val > high:
root = root.left
return self.trimBST(root, low, high)
root.left = self.trimBST(root.left, low, high)
root.right =self.trimBST(root.right, low, high)
return root
# @lc code=end
| [
"Ezi4zy@163.com"
] | Ezi4zy@163.com |
daa21c4b97b6966cef4a6950ad18f755e47dd0be | 8f6c9203b260d832860aa6e434ab668e170cfb1d | /test/conftest.py | e855b32ee851a344a6029c086ad23319e6cab159 | [
"Apache-2.0",
"FSFAP",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | igalic/mod_md | 66ad0938d4ca71f995f61a7d487ce5ade0d8c5cc | 105fca1a8a87c46c105cb9b38d13f998318c1db2 | refs/heads/master | 2023-08-15T11:01:24.290650 | 2021-10-19T12:27:17 | 2021-10-19T12:27:17 | 419,234,407 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,118 | py | import logging
import os
import time
from datetime import timedelta
import pytest
from md_certs import CertificateSpec, MDTestCA
from md_conf import HttpdConf
from md_env import MDTestEnv
from md_acme import MDPebbleRunner, MDBoulderRunner
def pytest_report_header(config, startdir):
env = MDTestEnv()
return "mod_md: {version} [apache: {aversion}({prefix}), mod_{ssl}, ACME server: {acme}]".format(
version=env.md_version,
prefix=env.prefix,
aversion=env.get_httpd_version(),
ssl=env.get_ssl_type(),
acme=env.acme_server,
)
@pytest.fixture(scope="session")
def env(pytestconfig) -> MDTestEnv:
level = logging.INFO
console = logging.StreamHandler()
console.setLevel(level)
console.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logging.getLogger('').addHandler(console)
logging.getLogger('').setLevel(level=level)
env = MDTestEnv(pytestconfig=pytestconfig)
env.apache_error_log_clear()
cert_specs = [
CertificateSpec(domains=['localhost'], key_type='rsa2048'),
CertificateSpec(domains=env.domains, key_type='rsa4096'),
CertificateSpec(domains=env.expired_domains, key_type='rsa2048',
valid_from=timedelta(days=-91),
valid_to=timedelta(days=-1)),
]
ca = MDTestCA.create_root(name=env.http_tld,
store_dir=os.path.join(env.server_dir, 'ca'), key_type="rsa4096")
ca.issue_certs(cert_specs)
env.set_ca(ca)
yield env
HttpdConf(env).install()
assert env.apache_stop() == 0
#env.apache_errors_check()
@pytest.fixture(scope="session")
def acme(env):
acme_server = None
if env.acme_server == 'pebble':
acme_server = MDPebbleRunner(env, configs={
'default': os.path.join(env.server_dir, 'conf/pebble.json'),
'eab': os.path.join(env.server_dir, 'conf/pebble-eab.json'),
})
elif env.acme_server == 'boulder':
acme_server = MDBoulderRunner(env)
yield acme_server
if acme_server is not None:
acme_server.stop()
| [
"stefan.eissing@greenbytes.de"
] | stefan.eissing@greenbytes.de |
db2aa6414dc0fb9d7936579b117bce3f7587c9cc | ef4749b76a1dbb79e664c8fe1c3a13c6197c1557 | /seleniumbase/fixtures/constants.py | 37a036def8261360cebd61de644111f9964dfc17 | [
"MIT"
] | permissive | PrabhuLoganathan/Python-SeleniumBase | 78f493ad4433c33b9e3171036b9edf1f1c2fd336 | 86fc61ad083b6dd5c7cce18b7ecfb87f73f16f56 | refs/heads/master | 2021-09-02T03:27:20.327020 | 2017-12-24T23:40:42 | 2017-12-24T23:40:42 | 115,760,528 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | """
This class containts some frequently-used constants
"""
class Environment:
QA = "qa"
STAGING = "staging"
PRODUCTION = "production"
MASTER = "master"
LOCAL = "local"
TEST = "test"
class Files:
DOWNLOADS_FOLDER = "downloaded_files"
ARCHIVED_DOWNLOADS_FOLDER = "archived_files"
class ValidBrowsers:
valid_browsers = ["firefox", "ie", "edge", "safari", "chrome", "phantomjs"]
class Browser:
FIREFOX = "firefox"
INTERNET_EXPLORER = "ie"
EDGE = "edge"
SAFARI = "safari"
GOOGLE_CHROME = "chrome"
PHANTOM_JS = "phantomjs"
HTML_UNIT = "htmlunit"
VERSION = {
"firefox": None,
"ie": None,
"edge": None,
"safari": None,
"chrome": None,
"phantomjs": None,
"htmlunit": None
}
LATEST = {
"firefox": None,
"ie": None,
"edge": None,
"safari": None,
"chrome": None,
"phantomjs": None,
"htmlunit": None
}
class State:
NOTRUN = "NotRun"
ERROR = "Error"
FAILURE = "Fail"
PASS = "Pass"
SKIP = "Skip"
BLOCKED = "Blocked"
DEPRECATED = "Deprecated"
| [
"mdmintz@gmail.com"
] | mdmintz@gmail.com |
b2d7423066c6259571568c9d3141fefe06959f0d | cf06fdf4fada6a39c661b890c184d2a3dea9d23f | /programs/pgm0A_12.py | 88945b331e21e4f0f5437ce6c44e593feda16b48 | [
"Apache-2.0"
] | permissive | danielsunzhongyuan/python_practice | f11d582c2147ad3d07ace1b31e6d7ace1da31e7c | 79bc88db1c52ee2f5607f6f9fec1bbacea2804ff | refs/heads/master | 2021-01-25T13:12:07.950283 | 2019-06-12T09:36:02 | 2019-06-12T09:36:02 | 123,542,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 525 | py | #
# This file contains the Python code from Program A.12 of
# "Data Structures and Algorithms
# with Object-Oriented Design Patterns in Python"
# by Bruno R. Preiss.
#
# Copyright (c) 2003 by Bruno R. Preiss, P.Eng. All rights reserved.
#
# http://www.brpreiss.com/books/opus7/programs/pgm0A_12.txt
#
class Rectangle(GraphicalObject):
def __init__(self, center, height, width):
super(Rectangle, self).__init__(center)
self._height = height
self._width = width
def draw(self):
pass
| [
"sunzhongyuan@lvwan.com"
] | sunzhongyuan@lvwan.com |
27024f2ab6b8e5c801b2575bf0e11ff404c9d1a7 | 8457291a531a08e6249c674f6b4943388d110f77 | /程序员面试经典/面试题 16.01. 交换数字 .py | a2f484604c3ee24053fd93149faa8f6601c014d8 | [
"Apache-2.0"
] | permissive | oscarhscc/algorithm-with-python | e190511844aaeacff14381000e371945f74aba14 | 2d67e2da11045199b9ab6c0b2f01e239255196ef | refs/heads/master | 2020-12-03T12:31:37.764545 | 2020-11-13T03:01:04 | 2020-11-13T03:01:04 | 231,317,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | '''
编写一个函数,不用临时变量,直接交换numbers = [a, b]中a与b的值。
示例:
输入: numbers = [1,2]
输出: [2,1]
提示:
numbers.length == 2
'''
class Solution(object):
def swapNumbers(self, numbers):
"""
:type numbers: List[int]
:rtype: List[int]
"""
numbers[0], numbers[1] = numbers[1], numbers[0]
return numbers | [
"noreply@github.com"
] | oscarhscc.noreply@github.com |
5c0f0d1e84727690497ad03f096246c4ec7d966e | 12fd778ef6756a3fcb6dc5fc2b59a0ba4a4cb911 | /aosd/downloader/update.py | 9eb2992674871673ee079c11be67eff8590bcf47 | [] | no_license | jevinskie/AOS-Downloader | ce66b024485d3a3eb05e89da8f7077cc96b49f2f | d956c8652f0a3caab3a56f441218b766b08e9130 | refs/heads/master | 2021-01-18T14:49:20.887713 | 2015-12-19T23:37:27 | 2015-12-19T23:37:27 | 57,457,504 | 1 | 0 | null | 2016-04-30T19:06:41 | 2016-04-30T19:06:41 | null | UTF-8 | Python | false | false | 1,151 | py | from ..helpers.logging_helper import logging_helper
import os
from .utilities import utilities
from .releases import releases
from .config import config
from .manager import manager
class update(object):
@classmethod
def fetch(cls):
logging_helper.getLogger().info('Updating package data...')
hashes_plist_url = os.path.join(config.getUpdateURL(), 'hashes.plist')
hashes_plist_path = utilities.getlookupplistpath('hashes')
manager.DownloadFileFromURLToPath(hashes_plist_url, hashes_plist_path)
release_plist_url = os.path.join(config.getUpdateURL(), 'releases.plist')
release_plist_path = utilities.getreleaseplistpath()
manager.DownloadFileFromURLToPath(release_plist_url, release_plist_path)
if os.path.exists(release_plist_path) == True:
for release_type in releases.get():
release_type_plist_url = os.path.join(config.getUpdateURL(), release_type+'.plist')
release_type_plist_path = utilities.getlookupplistpath(release_type)
manager.DownloadFileFromURLToPath(release_type_plist_url, release_type_plist_path)
| [
"me@samdmarshall.com"
] | me@samdmarshall.com |
e7a8ebdffb9c5478b1fc4030ede408f87828012e | 09fc5379e5ecafc66eee7eac5ba8cf51244c7aa9 | /min_max.py | a95e8b684a7360eda42ca80a1a91de96fbacd8cb | [] | no_license | aquibjamal/hackerrank_solutions | 8a9317afc3fc8c6b2f4d0542e5c979ad2c93f849 | bc27ea787743893c60d00c517ce133b6cea84942 | refs/heads/master | 2020-07-24T07:40:23.877863 | 2020-02-29T02:03:55 | 2020-02-29T02:03:55 | 207,850,323 | 0 | 0 | null | 2020-02-29T02:03:56 | 2019-09-11T15:50:47 | Python | UTF-8 | Python | false | false | 251 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 12 02:21:03 2019
@author: aquib
"""
import numpy
N,M=map(int,input().split())
x=numpy.array([input().split() for _ in range(N)], int)
mn=numpy.min(x,1)
mx=numpy.max(mn)
print(mx)
| [
"noreply@github.com"
] | aquibjamal.noreply@github.com |
35607a345ddf69b06bdf279af5cf89bc391593ae | 46096a52c39cb6b32923e6a968a5538579e6d73b | /18nov20_tae.py | 9fad529a54004e123a2e73d140a7772b2d699b3d | [] | no_license | JoinNova/practice_py | 16671c4b91af77a492a089b1d7a48d029da91cf8 | 5fbd8ac0b5c46c68d2a84b7e35454982ae6d637a | refs/heads/master | 2020-04-16T18:19:02.709408 | 2019-01-15T08:35:23 | 2019-01-15T08:35:23 | 165,814,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | from selenium import webdriver
from bs4 import BeautifulSoup
driver = webdriver.Chrome()
driver.implicitly_wait(3)
driver.get('http://www.google.com')
driver.find_element_by_xpath('//*[@id="tsf"]/div[2]/div[1]/div[1]/div/div[1]/input').click
driver.find_element_by_name('q').send_keys('제주 여행지')
driver.find_element_by_xpath('//*[@id="tsf"]/div[2]/div/div[3]/center/input[1]').click()
driver.get('https://www.google.com/destination/map/topsights?q=%EC%A0%9C%EC%A3%BC+%EC%97%AC%ED%96%89%EC%A7%80&site=search&output=search&dest_mid=/m/01rffr&sa=X&ved=2ahUKEwjYuNLUhOLeAhUDzbwKHQA9CQsQzTooATAjegQIChAv')
driver.implicitly_wait(4)
html = driver.page_source
soup = BeautifulSoup(html, 'html.parser')
for i in range(10):
namelist = soup.findAll("h2", {"class" : "NbdpWc"})[i]
print(namelist)
| [
"noreply@github.com"
] | JoinNova.noreply@github.com |
1ac32f1684de191dbb46b89dec66aee789ae73d8 | b7492b70e345e248f5562393ce4fe98e043ea1b9 | /data/config.py | c4c2e33bd25591173e8d4b7ae12282cf4b748bcd | [] | no_license | wobjtushisui/S3FD_pytorch | 3037176716d0b29d1f86f43da133614d451f619b | d5616dc1a08d11a670497b68a9a26cbd97adaed1 | refs/heads/master | 2020-12-29T05:04:08.873873 | 2019-11-18T09:13:56 | 2019-11-18T09:13:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,085 | py | #-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import os
from easydict import EasyDict
import numpy as np
_C = EasyDict()
cfg = _C
# data augument config
_C.expand_prob = 0.5
_C.expand_max_ratio = 4
_C.hue_prob = 0.5
_C.hue_delta = 18
_C.contrast_prob = 0.5
_C.contrast_delta = 0.5
_C.saturation_prob = 0.5
_C.saturation_delta = 0.5
_C.brightness_prob = 0.5
_C.brightness_delta = 0.125
_C.data_anchor_sampling_prob = 0.5
_C.min_face_size = 6.0
_C.apply_distort = True
_C.apply_expand = False
_C.img_mean = np.array([104., 117., 123.])[:, np.newaxis, np.newaxis].astype(
'float32')
_C.resize_width = 640
_C.resize_height = 640
_C.scale = 1 / 127.0
_C.anchor_sampling = True
_C.filter_min_face = True
# train config
#_C.LR_STEPS = (120, 198, 250)
_C.MAX_STEPS = 200000
_C.LR_STEPS = (80000,100000,120000)
_C.EPOCHES = 300
# anchor config
_C.FEATURE_MAPS = [160, 80, 40, 20, 10, 5]
_C.INPUT_SIZE = 640
_C.STEPS = [4, 8, 16, 32, 64, 128]
_C.ANCHOR_SIZES = [16, 32, 64, 128, 256, 512]
_C.CLIP = False
_C.VARIANCE = [0.1, 0.2]
# detection config
_C.NMS_THRESH = 0.3
_C.NMS_TOP_K = 5000
_C.TOP_K = 750
_C.CONF_THRESH = 0.05
# loss config
_C.NEG_POS_RATIOS = 3
_C.NUM_CLASSES = 2
_C.USE_NMS = True
# dataset config
_C.HOME = '/home/robin/datasets/widerface'
# hand config
_C.HAND = EasyDict()
_C.HAND.TRAIN_FILE = './data/hand_train.txt'
_C.HAND.VAL_FILE = './data/hand_val.txt'
_C.HAND.DIR = '/home/data/lj/egohands/'
_C.HAND.OVERLAP_THRESH = 0.35
# face config
_C.FACE = EasyDict()
_C.FACE.TRAIN_FILE = './data/face_train.txt'
_C.FACE.VAL_FILE = './data/face_val.txt'
_C.FACE.FDDB_DIR = '/home/data/lj/FDDB'
_C.FACE.WIDER_DIR = '/home/robin/datasets/widerface'
_C.FACE.AFW_DIR = '/home/data/lj/AFW'
_C.FACE.PASCAL_DIR = '/home/data/lj/PASCAL_FACE'
_C.FACE.OVERLAP_THRESH = [0.1, 0.35, 0.5]
# head config
_C.HEAD = EasyDict()
_C.HEAD.DIR = '/home/data/lj/VOCHead/'
_C.HEAD.OVERLAP_THRESH = [0.1, 0.35, 0.5]
| [
"jianzhnie@126.com"
] | jianzhnie@126.com |
4fe30dd6b39027cb6a61d01ba798443b17b266b1 | 2c74bb301f1ed83b79254944183ac5a18a639fdf | /tests/components/sensibo/conftest.py | 48c9317a5cb1150d6d6a59e6b99082a44cd6f666 | [
"Apache-2.0"
] | permissive | Adminiuga/home-assistant | 5bec93007ddac1a268cc359bf7e48530c5f73b38 | dcf68d768e4f628d038f1fdd6e40bad713fbc222 | refs/heads/dev | 2023-02-22T22:03:31.013931 | 2022-11-09T00:27:20 | 2022-11-09T00:27:20 | 123,929,062 | 5 | 4 | Apache-2.0 | 2023-02-22T06:14:31 | 2018-03-05T14:11:09 | Python | UTF-8 | Python | false | false | 2,342 | py | """Fixtures for the Sensibo integration."""
from __future__ import annotations
import json
from typing import Any
from unittest.mock import patch
from pysensibo import SensiboClient
from pysensibo.model import SensiboData
import pytest
from homeassistant.components.sensibo.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.core import HomeAssistant
from . import ENTRY_CONFIG
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.fixture
async def load_int(hass: HomeAssistant, get_data: SensiboData) -> MockConfigEntry:
"""Set up the Sensibo integration in Home Assistant."""
config_entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="username",
version=2,
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
return_value=get_data,
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]},
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
return_value={"result": {"username": "username"}},
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry
@pytest.fixture(name="get_data")
async def get_data_from_library(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any]
) -> SensiboData:
"""Retrieve data from upstream Sensibo library."""
client = SensiboClient("123467890", aioclient_mock.create_session(hass.loop))
with patch("pysensibo.SensiboClient.async_get_devices", return_value=load_json):
output = await client.async_get_devices_data()
await client._session.close() # pylint: disable=protected-access
return output
@pytest.fixture(name="load_json", scope="session")
def load_json_from_fixture() -> SensiboData:
"""Load fixture with json data and return."""
data_fixture = load_fixture("data.json", "sensibo")
json_data: dict[str, Any] = json.loads(data_fixture)
return json_data
| [
"noreply@github.com"
] | Adminiuga.noreply@github.com |
e7e9f173951e4d55eb410c0db31bc9387a456169 | 01733042e84a768b77f64ec24118d0242b2f13b8 | /uhd_restpy/testplatform/sessions/ixnetwork/topology/requestid_4bb823de2302ea46c48b53652c8059b5.py | 9127928006cf5c5a3d16daeb18104885ad73ebf9 | [
"MIT"
] | permissive | slieberth/ixnetwork_restpy | e95673905854bc57e56177911cb3853c7e4c5e26 | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | refs/heads/master | 2023-01-04T06:57:17.513612 | 2020-10-16T22:30:55 | 2020-10-16T22:30:55 | 311,959,027 | 0 | 0 | NOASSERTION | 2020-11-11T12:15:34 | 2020-11-11T12:06:00 | null | UTF-8 | Python | false | false | 1,793 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class RequestId(Base):
"""
The RequestId class encapsulates a required requestId resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'requestId'
_SDM_ATT_MAP = {
'Count': 'count',
}
def __init__(self, parent):
super(RequestId, self).__init__(parent)
@property
def Count(self):
"""
Returns
-------
- number: total number of values
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
| [
"andy.balogh@keysight.com"
] | andy.balogh@keysight.com |
b70ba83ab08390530f1a3a3b414fe878234a746f | e86e65380c81e2459c803a51719124faaf1ad660 | /theano_workspace/function.py | edc95a835248d2a70b12e6fda55ad8f5a54ee096 | [] | no_license | jaberg/theano_workspace | 83e79a45216a1a3e98521c823eef8cd698c714dd | 913e2d443cd23b2a4c746e7cd12d3a97947fee63 | refs/heads/master | 2021-01-20T11:59:51.899982 | 2013-05-03T05:49:48 | 2013-05-03T05:49:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py | # XXX COMPLETELY UNTESTED, DEFINITELY NOT FUNCTIONAL
class Function(object):
"""
Special case of Workspace for implementing a single callable expression
TODO: Provides support for structuring outputs as nested list, dict, etc.
"""
# XXX COMPLETELY UNTESTED
def __init__(self, ws, inputs, outputs, dests, fn_name):
self._ws = ws
self._inputs = inputs
self._outputs = outputs
self._dests = dests
self._fn_name = fn_name
def __call__(self, *args):
assert len(self._inputs) == len(args)
for var, val in zip(self._inputs, args):
self._ws[var] = val
self._ws.compiled_updates[self._fn_name]()
# TODO: unflatten dictionaries, singles, nested stuff, etc.
return [self[var] for var in self._dests]
def function(inputs, outputs, ws_cls=Workspace):
ws = ws_cls()
dests = [o.type() for o in outputs]
for var in inputs + dests:
ws[var] = None
ws.add_compiled_update('__call__', zip(dests, outputs))
return Function(ws, inputs, outputs, dests, '__call__')
| [
"james.bergstra@gmail.com"
] | james.bergstra@gmail.com |
c70dbc1ca64f2452a61d661e32d7cec9f0a63094 | facf0d5a66ba0b762f4d74bb99e568a948a76bc3 | /chat/consumers.py | 416465a4185b0c805d5e81ad97db4b384f057649 | [] | no_license | KennyDaktyl/chat_ws | bddd9bb042a31a2a54d5ac0a916a13c87649e634 | 0dfd5c2dd3a3252635adff18b54a70d7e6c77b50 | refs/heads/main | 2023-03-06T20:35:18.035734 | 2021-02-19T12:57:59 | 2021-02-19T12:57:59 | 340,362,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,216 | py | # import json
# from asgiref.sync import async_to_sync
# from channels.generic.websocket import AsyncWebsocketConsumer
# from datetime import datetime
# from django.utils import timezone
# from .models import Message
# from django.conf import settings
# from account.models import Profile
# from django.contrib.auth import get_user_model
# User = get_user_model()
# class ChatConsumer(AsyncWebsocketConsumer):
# async def fetch_messages(self, data):
# messages = Message.last_30_messages()
# content = {
# 'messages': self.messages_to_json(messages)
# }
# self.send_chat_message(content)
# def new_message(self, data):
# author = User.objects.get(username=data['from'])
# author_user = Profile.objects.get(user__username=author.username)
# message = Message.objects.create(timestamp=datetime.now(), author=author_user,
# content=data['message'])
# content = {
# 'command': 'new_message',
# 'message': self.message_to_json(message)
# }
# return self.send_chat_message(content)
# commands = {
# 'fetch_messages': fetch_messages,
# 'new_message': new_message
# }
# def messages_to_json(self, messages):
# result = []
# for message in messages:
# result.append(self.message_to_json(message))
# return result
# def message_to_json(self, message):
# return {
# 'author': message.author.user.username,
# 'content': message.content,
# 'timestamp': str(message.timestamp.strftime("%Y-%m-%d, %H:%M"))
# }
# async def connect(self):
# self.room_name = self.scope['url_route']['kwargs']['room_name']
# self.room_group_name = 'chat_%s' % self.room_name
# await self.channel_layer.group_add(
# self.room_group_name,
# self.channel_name
# )
# await self.accept()
# async def disconnect(self, close_code):
# await self.channel_layer.group_discard(
# self.room_group_name,
# self.channel_name
# )
# # Receive message from WebSocket
# async def receive(self, text_data):
# data = json.loads(text_data)
# await self.commands[data['command']](self, data)
# async def send_chat_message(self, message):
# await self.channel_layer.group_send(
# self.room_group_name,
# {
# 'type': 'chat_message',
# 'message': message,
# }
# )
# async def send_message(self, message):
# await self.send(text_data=json.dumps(message))
# # Receive message from room group
# async def chat_message(self, event):
# message = event['message']
# await self.send(text_data=json.dumps(message))
# chat/consumers.py
import json
from channels.generic.websocket import AsyncWebsocketConsumer
class ChatConsumer(AsyncWebsocketConsumer):
async def connect(self):
self.room_name = self.scope['url_route']['kwargs']['room_name']
self.room_group_name = 'chat_%s' % self.room_name
# Join room group
await self.channel_layer.group_add(
self.room_group_name,
self.channel_name
)
await self.accept()
async def disconnect(self, close_code):
# Leave room group
await self.channel_layer.group_discard(
self.room_group_name,
self.channel_name
)
# Receive message from WebSocket
async def receive(self, text_data):
text_data_json = json.loads(text_data)
message = text_data_json['message']
# Send message to room group
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'chat_message',
'message': message
}
)
# Receive message from room group
async def chat_message(self, event):
message = event['message']
# Send message to WebSocket
await self.send(text_data=json.dumps({
'message': message
})) | [
"krakow@miktelgsm.pl"
] | krakow@miktelgsm.pl |
779879dc262374b487ad7cf2413495b8e0a213e6 | 63ce62eb08e54c0e1fc93787d23d30ed38a80d19 | /giveaways/urls.py | 5f529a455d759445a26dc5a21a77515ac9ca1487 | [] | no_license | prettyirrelevant/giveawayy | 597581fee7e4e8ab209014594c670660827661e4 | 81e345466ffe9887a262caa0b5fd23d95d620f53 | refs/heads/main | 2023-08-24T11:40:17.916940 | 2021-10-17T23:47:01 | 2021-10-17T23:47:01 | 408,027,551 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | from django.urls import path
from . import views
app_name = "giveaways"
urlpatterns = [
path("giveaways/new/", views.CreateGiveawayView.as_view(), name="create-giveaway"),
path("giveaways/search/", views.SearchGiveawayView.as_view(), name="search-giveaway"),
path("giveaways/<slug>/", views.DisplayGiveawayView.as_view(), name="view-giveaway"),
path("giveaways/<slug>/join/", views.JoinGiveawayView.as_view(), name="join-giveaway"),
]
| [
"ienioladewumi@gmail.com"
] | ienioladewumi@gmail.com |
70a516f5a592ce86deea9bbd83cf59612cedb837 | 0869d7edac80e8aebe951682a2cc311a083eade3 | /Python/tdw/proc_gen/arrangements/side_table.py | 0b2ade721bc8dd0ff79ebba56f2188ac280ad961 | [
"BSD-2-Clause"
] | permissive | threedworld-mit/tdw | 7d5b4453832647733ff91ad7a7ce7ec2320454c1 | 9df96fba455b327bb360d8dd5886d8754046c690 | refs/heads/master | 2023-09-01T11:45:28.132298 | 2023-08-31T16:13:30 | 2023-08-31T16:13:30 | 245,492,977 | 427 | 75 | BSD-2-Clause | 2023-09-14T17:36:12 | 2020-03-06T18:42:09 | Python | UTF-8 | Python | false | false | 3,372 | py | from typing import List
from tdw.tdw_utils import TDWUtils
from tdw.cardinal_direction import CardinalDirection
from tdw.proc_gen.arrangements.arrangement_along_wall import ArrangementAlongWall
class SideTable(ArrangementAlongWall):
"""
A small side table with objects on it.
- The side table model is chosen randomly; see `SideTable.MODEL_CATEGORIES["side_table"]`.
- The side table is placed next to a wall.
- The side table's position is automatically adjusted to set it flush to the wall.
- The side table is automatically rotated so that it faces away from the wall.
- The side table's (x, z) positional coordinates are offset by a factor; see `SideTable.DEPTH_FACTOR` and `SIDE_TABLE.LENGTH_FACTOR`.
- The side table will have a rectangular arrangement of objects on top of it.
- The objects are chosen randomly; see `SideTable.ON_TOP_OF["side_table"]`.
- The objects are positioned in a rectangular grid on the table with random rotations and positional perturbations; see: `SideTable.CELL_SIZE`, `SideTable.CELL_DENSITY`, `SideTable.WIDTH_SCALE`, and `SideTable.DEPTH_SCALE`.
- The side table is non-kinematic.
"""
""":class_var
Offset the distance from the wall by this factor.
"""
DEPTH_FACTOR: float = 1.05
""":class_var
Offset the distance along the wall by this factor.
"""
LENGTH_FACTOR: float = 1.25
""":class_var
The size of each cell in the side table rectangular arrangement. This controls the minimum size of objects and the density of the arrangement.
"""
CELL_SIZE: float = 0.05
""":class_var
The probability from 0 to 1 of a "cell" in the side table rectangular arrangement being empty. Lower value = a higher density of small objects.
"""
CELL_DENSITY: float = 0.4
""":class
When adding objects, the width of the side table is assumed to be `actual_width * WIDTH_SCALE`. This prevents objects from being too close to the edges of the side table.
"""
WIDTH_SCALE: float = 0.8
""":class
When adding objects, the depth of the side table is assumed to be `actual_depth * DEPTH_SCALE`. This prevents objects from being too close to the edges of the side table.
"""
DEPTH_SCALE: float = 0.8
def get_commands(self) -> List[dict]:
return self._add_object_with_other_objects_on_top(kinematic=False,
cell_size=SideTable.CELL_SIZE,
density=SideTable.CELL_DENSITY,
x_scale=SideTable.WIDTH_SCALE,
z_scale=SideTable.DEPTH_SCALE)
def get_length(self) -> float:
return TDWUtils.get_bounds_extents(bounds=self._record.bounds)[2] * SideTable.LENGTH_FACTOR
def _get_depth(self) -> float:
return TDWUtils.get_bounds_extents(bounds=self._record.bounds)[0] * SideTable.DEPTH_FACTOR
def _get_rotation(self) -> float:
if self._wall == CardinalDirection.north:
return 0
elif self._wall == CardinalDirection.east:
return 90
elif self._wall == CardinalDirection.south:
return 180
else:
return 270
def _get_category(self) -> str:
return "side_table"
| [
"alters@mit.edu"
] | alters@mit.edu |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.