blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e6823d1c327a49f0d484cae4b19d90a619c5311 | eaa781cc52a1a8686cd50424132744a300ce2fda | /shell/essay.py | 349342a9af55273a8978fd7add6d96451404e286 | [] | no_license | hustmonk/kdd2014 | 2d7f1d7393cccfaff95f8fc399a213ba588c7f02 | bf4f41af61e5ab08eb0b08aee990c04a7afbef0d | refs/heads/master | 2021-01-02T23:07:03.632621 | 2014-08-13T00:02:45 | 2014-08-13T00:02:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Last modified:
#申请论文信息
#essay info
"""docstring
"""
import csv
__revision__ = '0.1'
class Essay:
def _normal(self, list):
new_list = []
for x in list:
x = x.lower()
newx = []
for ch in x:
if ('a' <= ch and ch <= 'z') or ('0' <= ch and ch <= '9') or ch == ' ':
newx.append(ch)
else:
newx.append(" ")
new_list.append("".join(newx))
return new_list
def __init__(self, debug = False):
data_dir = '../data/'
filename = "essays.csv"
reader = csv.reader(file(data_dir + filename, 'rb'))
reader.next()
self.resources_feature = {}
idx = 0
for line in reader:
pid = line[0]
self.resources_feature[pid] = " ".join(self._normal(line[2:])).decode("utf-8")
if debug:
if idx > 1000:
break
idx = idx + 1
if __name__ == '__main__':
essay = Essay()
| [
"liujingminghust@163.com"
] | liujingminghust@163.com |
23a05b1cda180f9c05216f1af7274c13c0307cd6 | 223c3cf4281427e41ce6dc8d2501e157e05d8f25 | /results/run_check_epochs/0x1.5426135e0ddf4p+30/script.py | c013f937d02bf76521ab27987b3b27a8ab2b8650 | [
"BSD-2-Clause"
] | permissive | chri4354/meg_perceptual_decision_symbols | 94160d81035be283a4ade13dc8f488447b6773f6 | 34ed840c95d6fbedbf911c1b1506da383da77eb9 | refs/heads/master | 2021-01-15T11:46:02.968578 | 2015-03-29T19:12:40 | 2015-03-29T19:12:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,711 | py | import sys
import mkl
import os.path as op
import mne
from meeg_preprocessing.utils import setup_provenance
from ambiguity.conditions import get_events
import matplotlib.pyplot as plt
import warnings
import numpy as np
from scripts.config import (
data_path,
subjects,
results_dir,
open_browser
)
report, run_id, results_dir, logger = setup_provenance(
script=__file__, results_dir=results_dir)
#
# if len(sys.argv) > 1:
# subjects = [sys.argv[1]]
# mkl.set_num_threads(1)
for subject in subjects:
# STIM =====================================================================
epochs_fname = op.join(data_path, 'MEG', subject,
'stim_lock-{}-epo.fif'.format(subject))
bhv_fname = op.join(data_path, 'behavior',
'{}_behaviorMEG.mat'.format(subject[-2:]))
# Read Mat file
events = get_events(bhv_fname, 'stim_lock')
mat = np.array(events['trigger_value'].astype(list))
# Read events from epochs
epochs = mne.read_epochs(epochs_fname)
fiff = epochs.events[:,2]
# Checkup procedure
if len(mat) > len(fiff):
# XXX Here need procedure to correct issue
raise(error)
warnings.warn('too many events in mat as compared to fiff')
mat = mat[0:len(fiff)]
if len(mat) < len(fiff):
raise(error)
warnings.warn('too many events in fiff as compared to mat')
fiff = fiff[0:len(mat)]
if np.any(mat != fiff):
index = np.where((mat - fiff) != 0.)[0][0]
warnings.warn('{}: Problem with trigger {}.'.format(subject, index))
# Report
fig, (ax1, ax2) = plt.subplots(2, 1, sharey=True)
ax1.plot(mat)
ax1.plot(fiff + np.max(mat) + 1.0)
ax2.set_title('triggers from mat & fiff')
ax2.plot(mat - fiff)
ax2.set_title('mat - fiff')
report.add_figs_to_section(fig, 'Stim triggers', subject)
# plt.show()
# MOTOR ====================================================================
epochs_fname = op.join(data_path, 'MEG', subject,
'motor_lock-{}-epo.fif'.format(subject))
# Read Mat file
events = get_events(bhv_fname, 'motor_lock')
mat = np.array(events['motor_side'].astype(list))
# Read events from epochs
epochs = mne.read_epochs(epochs_fname)
fiff = 1 + (epochs.events[:,2] < 2 ** 14)
if len(mat) > len(fiff):
# XXX Here need procedure to correct issue
raise(error)
warnings.warn('too many events in mat as compared to fiff')
mat = mat[0:len(fiff)]
if len(mat) < len(fiff):
raise(error)
warnings.warn('too many events in fiff as compared to mat')
fiff = fiff[0:len(mat)]
# rm = list()
# index = np.where((mat - fiff[0:len(mat)]) != 0.)[0]
# while (len(index) > 0) and ((len(mat) + len(rm)) <= len(fiff)):
# print(rm)
# rm.append(index[0] + len(rm))
# sel = [i for i in range(0,len(mat)+len(rm)) if i not in rm]
# index = np.where((mat - fiff[sel]) != 0.)[0]
# epochs = epochs[sel]
# warnings.warn('Found {} unwanted epochs. Correcting and resaving {} epochs...'.format(len(rm), subject))
# fiff = 1 + (epochs.events[:,2] < 2 ** 14)
# epochs.save(op.join(data_path, 'MEG', subject, 'stim_lock-{}-epo.fif'.format(subject)))
fig, (ax1, ax2) = plt.subplots(2, 1, sharey=True)
ax1.plot(mat)
ax1.plot(fiff + np.max(mat) + 1.0)
ax2.set_title('triggers from mat & fiff')
ax2.plot(mat - fiff)
ax2.set_title('mat - fiff')
report.add_figs_to_section(fig, 'Motor triggers', subject)
# plt.show()
report.save(open_browser=open_browser)
| [
"jeanremi.king+github@gmail.com"
] | jeanremi.king+github@gmail.com |
a25456d0b47ab6ab15ab64b06f610459fed2cc9e | ad13583673551857615498b9605d9dcab63bb2c3 | /output/instances/nistData/atomic/long/Schema+Instance/NISTXML-SV-IV-atomic-long-whiteSpace-1-2.py | edbc3af912f4107c5b5261ff2d179988d496fe2f | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 267 | py | from output.models.nist_data.atomic.long.schema_instance.nistschema_sv_iv_atomic_long_white_space_1_xsd.nistschema_sv_iv_atomic_long_white_space_1 import NistschemaSvIvAtomicLongWhiteSpace1
obj = NistschemaSvIvAtomicLongWhiteSpace1(
value=-141811240377451630
)
| [
"tsoulloftas@gmail.com"
] | tsoulloftas@gmail.com |
79f0bc35dead901f383ef6d61b759a98596cf035 | 13f6df9b74df10c7054cbf826e6e3538012493c9 | /ajenti/plugins/supervisor/client.py | 5edf2c37045040caf22f82a43dc36f22e470eb02 | [] | no_license | zennro/ajenti | 6906bf285a72b35a485555d5a2296f04717d4bae | 0d40cbb38117a2018607d21a138a83bf6581d729 | refs/heads/master | 2021-01-18T20:15:10.615198 | 2014-03-29T17:39:35 | 2014-03-29T17:39:35 | 18,260,276 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,723 | py | import subprocess
from ajenti.api import *
from ajenti.plugins.services.api import Service, ServiceManager
@plugin
class SupervisorServiceManager (ServiceManager):
def test(self):
return subprocess.call(['supervisorctl', 'status']) == 0
def run(self, *cmds):
return subprocess.check_output(['supervisorctl'] + list(cmds))
def get_all(self):
r = []
try:
lines = self.run('status').splitlines()
except:
return []
for l in lines:
if l:
l = l.split(None, 2)
s = SupervisorService()
s.name = l[0]
s.running = len(l) > 2 and l[1] == 'RUNNING'
s.status = l[2] if len(l) > 2 else ''
r.append(s)
return r
def fill(self, programs):
for p in programs:
p.status = ''
p.icon = ''
for s in self.get_all():
for p in programs:
if p.name == s.name:
p.running = s.running
p.status = s.status
p.icon = 'play' if p.running else None
class SupervisorService (Service):
source = 'supervisord'
def __init__(self):
self.name = None
self.running = False
def run(self, *cmds):
return subprocess.check_output(['supervisorctl'] + list(cmds))
@property
def icon(self):
return 'play' if self.running else None
def start(self):
self.run('start', self.name)
def stop(self):
self.run('stop', self.name)
def restart(self):
self.run('restart', self.name)
def tail(self, id):
return self.run('tail', self.name)
| [
"e@ajenti.org"
] | e@ajenti.org |
cbc2ed0f38928c2471f097b3803e8dddb7b91602 | 0809ea2739d901b095d896e01baa9672f3138825 | /without_restapim/testApp/utils.py | 93e40d370280d08463e8ec4a6eabb6bf09a80774 | [] | no_license | Gagangithub1988/djangoprojects | dd001f2184e78be2fb269dbfdc8e3be1dd71ce43 | ea236f0e4172fbf0f71a99aed05ed7c7b38018e2 | refs/heads/master | 2022-11-15T23:46:46.134247 | 2020-07-15T06:37:51 | 2020-07-15T06:37:51 | 273,479,403 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | import json
def is_json(data):
try:
p_data=json.loads(data)
valid=True
except ValueError:
valid=False
return valid | [
"djangopython1988@gmail.com"
] | djangopython1988@gmail.com |
449428cd24acaff229144d38164a47472ec94748 | f207164511f0dfe3f01f6e0c21fd7548e626397f | /toolkit/toolkit.mozbuild | ebdca4a1aed9c6fad7483c3e0a71d58cff634e1d | [] | no_license | PortableApps/palemoon27 | 24dbac1a4b6fe620611f4fb6800a29ae6f008d37 | 3d7e107cc639bc714906baad262a3492372e05d7 | refs/heads/master | 2023-08-15T12:32:23.822300 | 2021-10-11T01:54:45 | 2021-10-11T01:54:45 | 416,058,642 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,400 | mozbuild | # vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
if CONFIG['LIBXUL_SDK']:
error('toolkit.mozbuild is not compatible with --enable-libxul-sdk=')
if CONFIG['MOZ_SANDBOX']:
DIRS += ['/security/sandbox']
DIRS += [
# Depends on NSS and NSPR, and must be built after sandbox or else B2G emulator
# builds fail.
'/security/certverifier',
# Depends on certverifier
'/security/apps',
]
# the signing related bits of libmar depend on nss
if CONFIG['MOZ_UPDATER']:
DIRS += ['/modules/libmar']
DIRS += [
'/config/external/freetype2',
'/xpcom',
'/modules/libpref',
'/intl',
'/netwerk',
]
if CONFIG['MOZ_AUTH_EXTENSION']:
DIRS += ['/extensions/auth']
if CONFIG['MOZ_UPDATER']:
DIRS += ['/other-licenses/bsdiff']
# Gecko/Core components.
DIRS += [
'/ipc',
'/js/ipc',
'/hal',
'/js/xpconnect',
'/intl/chardet',
'/media/libyuv',
'/modules/libjar',
'/storage',
]
if CONFIG['MOZ_PERMISSIONS']:
DIRS += [
'/extensions/cookie',
'/extensions/permissions',
]
DIRS += [
'/rdf',
]
if CONFIG['MOZ_WEBRTC']:
DIRS += [
'/media/webrtc',
'/media/mtransport',
]
if CONFIG['MOZ_OMX_PLUGIN']:
DIRS += [
'/media/omx-plugin/lib/ics/libutils',
'/media/omx-plugin/lib/ics/libstagefright',
'/media/omx-plugin/lib/ics/libvideoeditorplayer',
'/media/omx-plugin/lib/gb/libutils',
'/media/omx-plugin/lib/gb/libstagefright',
'/media/omx-plugin/lib/gb/libstagefright_color_conversion',
'/media/omx-plugin/lib/gb235/libstagefright',
'/media/omx-plugin',
'/media/omx-plugin/gb',
'/media/omx-plugin/gb235',
'/media/omx-plugin/lib/hc/libstagefright',
'/media/omx-plugin/hc',
'/media/omx-plugin/kk',
]
if CONFIG['ENABLE_TESTS']:
DIRS += ['/testing/specialpowers']
DIRS += [
'/testing/gtest',
'/uriloader',
'/caps',
'/parser',
'/gfx',
'/image',
'/dom',
'/view',
'/widget',
'/content',
'/editor',
'/layout',
'/docshell',
'/embedding',
'/xpfe/appshell'
]
# This needs to be built after the gfx/ directory
# to ensure all dependencies for skia (e.g. mozalloc, xpcom)
# have been built
if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android':
DIRS += ['/other-licenses/skia-npapi']
if CONFIG['MOZ_UNIVERSALCHARDET']:
DIRS += ['/extensions/universalchardet']
if CONFIG['ACCESSIBILITY']:
DIRS += ['/accessible']
else:
DIRS += ['/accessible/ipc']
# toolkit
DIRS += ['/profile']
# This must precede xpfe.
if CONFIG['MOZ_JPROF']:
DIRS += ['/tools/jprof']
DIRS += [
'/tools/profiler',
'/xpfe/components',
]
if CONFIG['MOZ_ENABLE_XREMOTE']:
DIRS += ['/widget/xremoteclient']
if CONFIG['MOZ_SPELLCHECK']:
DIRS += ['/extensions/spellcheck']
DIRS += [
'/security/manager',
'/toolkit',
]
if CONFIG['MOZ_PREF_EXTENSIONS']:
DIRS += ['/extensions/pref']
DIRS += [
'/services',
'/startupcache',
'/js/ductwork/debugger',
'/other-licenses/snappy',
]
if CONFIG['MOZ_GIO_COMPONENT']:
DIRS += ['/extensions/gio']
DIRS += [
'/toolkit/library/StaticXULComponentsEnd',
'/toolkit/library',
]
if CONFIG['MOZ_ENABLE_GNOME_COMPONENT']:
DIRS += ['/toolkit/system/gnome']
# if QtNetwork is present, it will do its own network monitoring
if not CONFIG['MOZ_ENABLE_QTNETWORK'] and CONFIG['MOZ_ENABLE_DBUS']:
DIRS += ['/toolkit/system/dbus']
if CONFIG['ENABLE_MARIONETTE'] or CONFIG['MOZ_WIDGET_TOOLKIT'] not in ('gonk', 'android'):
DIRS += ['/testing/marionette']
DIRS += [
'/tools/quitter',
'/media/gmp-clearkey/0.1',
]
if CONFIG['ENABLE_TESTS']:
DIRS += [
'/testing/mochitest',
'/testing/xpcshell',
'/testing/tools/screenshot',
'/testing/profiles',
'/testing/mozbase',
'/testing/modules',
'/testing/runtimes',
'/testing/web-platform',
]
if CONFIG['MOZ_WEBRTC'] and CONFIG['MOZ_WIDGET_TOOLKIT'] != 'gonk':
DIRS += [
'/media/webrtc/signaling/test',
'/media/webrtc/signaling/test/standalone',
'/media/mtransport/test',
]
| [
"roytam@gmail.com"
] | roytam@gmail.com |
36cd06d92565b7898b01d87fe29079a787f22e91 | 1ee910d6602123eb1328f56419b04e31b3761b6b | /lib/python3.5/site-packages/twilio/rest/ip_messaging/v2/service/user/__init__.py | bd5acc2db6d505f60bb14bf59362bb134b9091e5 | [
"MIT"
] | permissive | mraza007/Pizza-or-Not-a-Pizza | 7fc89e0905c86fbd3c77a9cc834a4b6098912aeb | 6ad59d046adbd6be812c7403d9cb8ffbdbd6b0b8 | refs/heads/master | 2022-12-15T15:47:34.779838 | 2018-07-04T02:28:56 | 2018-07-04T02:28:56 | 127,992,302 | 30 | 4 | MIT | 2022-11-22T00:43:51 | 2018-04-04T01:56:26 | Python | UTF-8 | Python | false | false | 17,468 | py | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.ip_messaging.v2.service.user.user_binding import UserBindingList
from twilio.rest.ip_messaging.v2.service.user.user_channel import UserChannelList
class UserList(ListResource):
""" """
def __init__(self, version, service_sid):
"""
Initialize the UserList
:param Version version: Version that contains the resource
:param service_sid: The service_sid
:returns: twilio.rest.chat.v2.service.user.UserList
:rtype: twilio.rest.chat.v2.service.user.UserList
"""
super(UserList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, }
self._uri = '/Services/{service_sid}/Users'.format(**self._solution)
def create(self, identity, role_sid=values.unset, attributes=values.unset,
friendly_name=values.unset):
"""
Create a new UserInstance
:param unicode identity: The identity
:param unicode role_sid: The role_sid
:param unicode attributes: The attributes
:param unicode friendly_name: The friendly_name
:returns: Newly created UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserInstance
"""
data = values.of({
'Identity': identity,
'RoleSid': role_sid,
'Attributes': attributes,
'FriendlyName': friendly_name,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return UserInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def stream(self, limit=None, page_size=None):
"""
Streams UserInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.chat.v2.service.user.UserInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists UserInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.chat.v2.service.user.UserInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of UserInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return UserPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of UserInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return UserPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a UserContext
:param sid: The sid
:returns: twilio.rest.chat.v2.service.user.UserContext
:rtype: twilio.rest.chat.v2.service.user.UserContext
"""
return UserContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a UserContext
:param sid: The sid
:returns: twilio.rest.chat.v2.service.user.UserContext
:rtype: twilio.rest.chat.v2.service.user.UserContext
"""
return UserContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.IpMessaging.V2.UserList>'
class UserPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the UserPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The service_sid
:returns: twilio.rest.chat.v2.service.user.UserPage
:rtype: twilio.rest.chat.v2.service.user.UserPage
"""
super(UserPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of UserInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.user.UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserInstance
"""
return UserInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.IpMessaging.V2.UserPage>'
class UserContext(InstanceContext):
""" """
def __init__(self, version, service_sid, sid):
"""
Initialize the UserContext
:param Version version: Version that contains the resource
:param service_sid: The service_sid
:param sid: The sid
:returns: twilio.rest.chat.v2.service.user.UserContext
:rtype: twilio.rest.chat.v2.service.user.UserContext
"""
super(UserContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'sid': sid, }
self._uri = '/Services/{service_sid}/Users/{sid}'.format(**self._solution)
# Dependents
self._user_channels = None
self._user_bindings = None
def fetch(self):
"""
Fetch a UserInstance
:returns: Fetched UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return UserInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the UserInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def update(self, role_sid=values.unset, attributes=values.unset,
friendly_name=values.unset):
"""
Update the UserInstance
:param unicode role_sid: The role_sid
:param unicode attributes: The attributes
:param unicode friendly_name: The friendly_name
:returns: Updated UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserInstance
"""
data = values.of({'RoleSid': role_sid, 'Attributes': attributes, 'FriendlyName': friendly_name, })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return UserInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
@property
def user_channels(self):
"""
Access the user_channels
:returns: twilio.rest.chat.v2.service.user.user_channel.UserChannelList
:rtype: twilio.rest.chat.v2.service.user.user_channel.UserChannelList
"""
if self._user_channels is None:
self._user_channels = UserChannelList(
self._version,
service_sid=self._solution['service_sid'],
user_sid=self._solution['sid'],
)
return self._user_channels
@property
def user_bindings(self):
"""
Access the user_bindings
:returns: twilio.rest.chat.v2.service.user.user_binding.UserBindingList
:rtype: twilio.rest.chat.v2.service.user.user_binding.UserBindingList
"""
if self._user_bindings is None:
self._user_bindings = UserBindingList(
self._version,
service_sid=self._solution['service_sid'],
user_sid=self._solution['sid'],
)
return self._user_bindings
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.IpMessaging.V2.UserContext {}>'.format(context)
class UserInstance(InstanceResource):
""" """
def __init__(self, version, payload, service_sid, sid=None):
"""
Initialize the UserInstance
:returns: twilio.rest.chat.v2.service.user.UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserInstance
"""
super(UserInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload['sid'],
'account_sid': payload['account_sid'],
'service_sid': payload['service_sid'],
'attributes': payload['attributes'],
'friendly_name': payload['friendly_name'],
'role_sid': payload['role_sid'],
'identity': payload['identity'],
'is_online': payload['is_online'],
'is_notifiable': payload['is_notifiable'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'joined_channels_count': deserialize.integer(payload['joined_channels_count']),
'links': payload['links'],
'url': payload['url'],
}
# Context
self._context = None
self._solution = {'service_sid': service_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: UserContext for this UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserContext
"""
if self._context is None:
self._context = UserContext(
self._version,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The sid
:rtype: unicode
"""
return self._properties['sid']
@property
def account_sid(self):
"""
:returns: The account_sid
:rtype: unicode
"""
return self._properties['account_sid']
@property
def service_sid(self):
"""
:returns: The service_sid
:rtype: unicode
"""
return self._properties['service_sid']
@property
def attributes(self):
"""
:returns: The attributes
:rtype: unicode
"""
return self._properties['attributes']
@property
def friendly_name(self):
"""
:returns: The friendly_name
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def role_sid(self):
"""
:returns: The role_sid
:rtype: unicode
"""
return self._properties['role_sid']
@property
def identity(self):
"""
:returns: The identity
:rtype: unicode
"""
return self._properties['identity']
@property
def is_online(self):
"""
:returns: The is_online
:rtype: bool
"""
return self._properties['is_online']
@property
def is_notifiable(self):
"""
:returns: The is_notifiable
:rtype: bool
"""
return self._properties['is_notifiable']
@property
def date_created(self):
"""
:returns: The date_created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date_updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def joined_channels_count(self):
"""
:returns: The joined_channels_count
:rtype: unicode
"""
return self._properties['joined_channels_count']
@property
def links(self):
"""
:returns: The links
:rtype: unicode
"""
return self._properties['links']
@property
def url(self):
"""
:returns: The url
:rtype: unicode
"""
return self._properties['url']
def fetch(self):
"""
Fetch a UserInstance
:returns: Fetched UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the UserInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def update(self, role_sid=values.unset, attributes=values.unset,
friendly_name=values.unset):
"""
Update the UserInstance
:param unicode role_sid: The role_sid
:param unicode attributes: The attributes
:param unicode friendly_name: The friendly_name
:returns: Updated UserInstance
:rtype: twilio.rest.chat.v2.service.user.UserInstance
"""
return self._proxy.update(role_sid=role_sid, attributes=attributes, friendly_name=friendly_name, )
@property
def user_channels(self):
"""
Access the user_channels
:returns: twilio.rest.chat.v2.service.user.user_channel.UserChannelList
:rtype: twilio.rest.chat.v2.service.user.user_channel.UserChannelList
"""
return self._proxy.user_channels
@property
def user_bindings(self):
"""
Access the user_bindings
:returns: twilio.rest.chat.v2.service.user.user_binding.UserBindingList
:rtype: twilio.rest.chat.v2.service.user.user_binding.UserBindingList
"""
return self._proxy.user_bindings
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.IpMessaging.V2.UserInstance {}>'.format(context)
| [
"muhammadraza0047@gmail.com"
] | muhammadraza0047@gmail.com |
57eb78a0554396f29509498063511bd588bb9f1f | f2d99d437f29af7bd2d4a5ea8af57f49e3d30871 | /test/assignment/module_attribute.py | f003021d6d9dd2f1123f626046504736d9312929 | [] | no_license | terrence2/millipede | 6d68a09773f83e304702d72a38475946590e3079 | 3e1d63899f54f1154cb6a0bc3634815623803246 | refs/heads/master | 2020-04-01T14:28:06.810699 | 2011-05-30T20:15:30 | 2011-05-30T20:15:30 | 839,748 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | class A: pass
a = A()
a.b = A()
a.b.c = A()
a.b.c.d = A()
print(a.__class__.__name__)
print(a.b.__class__.__name__)
print(a.b.c.__class__.__name__)
print(a.b.c.d.__class__.__name__)
#out: A
#out: A
#out: A
#out: A
| [
"terrence@zettabytestorage.com"
] | terrence@zettabytestorage.com |
a6d78dcee4b7d9d0b627e8613bc50a7083969f3b | 92abae05cb2c31f4f4f228844ff8ecfc5439e098 | /Generators/GEN_GLCM_PREPROCESS.py | c141ca99775a719a86f5ce463c86bcbea82ed86a | [] | no_license | lukkascost/py_Crosswalk | 1530133e0c808be433bdb00cbcda810b689df6ce | 287db1583bf21696cd9f307498a5fa2e1cb69c1b | refs/heads/master | 2021-06-03T16:33:38.139919 | 2020-01-29T16:12:39 | 2020-01-29T16:12:39 | 140,041,089 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,663 | py | import cv2
import numpy as np
from MachineLearn.Classes.Extractors.GLCM import GLCM
MIN_BITS = 8
MAX_BITS = 8
MIN_DECIMATION = 20
MAX_DECIMATION = 100
PATH_TO_IMAGES_FOLDER = '../database-Crosswalk/Preprocessed/'
PATH_TO_SAVE_FEATURES = 'GLCM_FILES/EXP_01/'
for nbits in range(MIN_BITS, MAX_BITS + 1):
for k in range(MIN_DECIMATION, MAX_DECIMATION + 1):
listGLCM = []
for quantity in [[1, 50], [2, 50], [3, 50], [4, 150]]:
for image in range(1, quantity[1] + 1):
img = cv2.imread(PATH_TO_IMAGES_FOLDER+"c{:d}_p1_{:d}.JPG".format(quantity[0], image), 0)
""" DECIMATION """
klist = [x for x in range(0, img.shape[0], k)]
klist2 = [x for x in range(0, img.shape[1], k)]
img = img[klist]
img = img[:, klist2]
""" CHANGING IMAGE TO VALUES BETWEEN 0 AND 2**NBITS"""
img = img / 2 ** (8 - nbits)
""" GENERATING FEATURES FOR GLCM """
oGlcm = GLCM(img, nbits)
oGlcm.generateCoOccurenceHorizontal()
oGlcm.normalizeCoOccurence()
oGlcm.calculateAttributes()
""" ADDING FEATURES IN ARRAY FOR SAVE IN FILE """
listGLCM.append(oGlcm.exportToClassfier("Class " + str(quantity[0])))
print nbits, k, quantity[0], image
listGLCM = np.array(listGLCM)
""" SAVE FILE WITH FEATURES, DECIMATION WITH STEP = k AND CORRELATION MATRIX WITH nbits BITS. """
np.savetxt(PATH_TO_SAVE_FEATURES+"FEATURES_M{}_CM{}b.txt".format(k, nbits), listGLCM, fmt="%s", delimiter=',')
| [
"lucas.costa@lit.ifce.edu.br"
] | lucas.costa@lit.ifce.edu.br |
0bba2d08714c610787e7f98c5cdf1edbf1dfeff9 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/438/usersdata/314/99574/submittedfiles/pico.py | 2fe6a6464d97f450c55ab04953a46384b1186e82 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | # -*- coding: utf-8 -*-
def pico(lista):
maior=lista[0]
meio=lista.index(lista[i])
for i in range(0,len(lista)-1,1):
if lista[i]>maior:
maior=lista[i]
crescente=True
if meio>lista[i]:
decrescente=True
else:
decrescente=False
crescente=False
n=int(input('Digite n: '))
lista=[]
for i in range(0,n,1):
lista.append(int(input('Digite a quantidade de elementos da lista: ')))
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
bfe7efa037054787430565e8e7dd7e67369deef0 | 9eb35d6df7b0490d556623f84dba12bb05f30ee2 | /comp_stat_little_tricks/welfords_variance.py | 879fe04be37135f1677925987a51fe5785db7a74 | [
"MIT"
] | permissive | FelSiq/statistics-related | 0b4442bd19338c5b0da7dcf5ecd53eb304dcd3f8 | ee050202717fc368a3793b195dea03687026eb1f | refs/heads/master | 2021-11-24T12:31:08.660652 | 2021-11-03T23:42:39 | 2021-11-03T23:42:39 | 211,089,869 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 848 | py | import typing as t
def welfords_var(vals: t.Sequence[t.Union[int, float]]) -> float:
"""Powerful one-pass method for computing array variance."""
M, S = 0, 0
for k, x in enumerate(vals, 1):
oldM = M
M += (x - M) / k
S += (x - M) * (x - oldM)
return S / (len(vals) - 1)
if __name__ == "__main__":
import numpy as np
np.random.seed(1444)
for i in range(500):
vals = (np.random.randint(-999999, 999999, size=1000) +
2.0 * np.random.random(size=1000) - 1.0)
var_wf = welfords_var(vals)
var_np = vals.var(ddof=1)
assert np.allclose(var_wf, var_np)
for i in range(500):
vals = 2.0 * np.random.random(size=1000) - 1.0
var_wf = welfords_var(vals)
var_np = vals.var(ddof=1)
assert np.allclose(var_wf, var_np)
| [
"felipe.siqueira@usp.br"
] | felipe.siqueira@usp.br |
4fc95827b4a6c449e2add1cbc38c5c2192e31a9e | d8f43a5eebda6dec3e07520e2719f5ccf645210a | /day09/__init__.py | b5cb543237c5ee58caf3ddea6c5f9d3a6084e1e2 | [] | no_license | sisul1204/Python60 | 1f7db60c448fd9c0487341f4768bbf1363a69eff | c3675cc02d79102b0b2ee73be6fa4c1645981640 | refs/heads/master | 2022-12-14T11:03:08.653333 | 2020-08-18T00:56:50 | 2020-08-18T00:56:50 | 278,508,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106 | py | #!/usr/bin/env python
#-*-coding:utf-8-*-
# Author:sisul
# time: 2020/7/8 15:38
# file: __init__.py.py | [
"lizhipeng1021@163.com"
] | lizhipeng1021@163.com |
c95eafdcb80903dfb6648b489d3951c8372de95c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02775/s052208557.py | 8fef45c5cc9e573263a2006aaadb8f57669aedf5 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | import sys
def main():
input=sys.stdin.readline
S=input().strip()
dp=[[0,0] for i in range(len(S)+1)]
dp[0][1]=1
for i in range(1,len(S)+1):
for j in (0,1):
if j==0:
dp[i][0]=min(dp[i-1][0]+int(S[i-1]),dp[i-1][1]+10-int(S[i-1]))
elif j==1:
dp[i][1]=min(dp[i-1][0]+int(S[i-1])+1,dp[i-1][1]+10-int(S[i-1])-1)
print(dp[len(S)][0])
if __name__=="__main__":
main() | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
be521724694bccb38164c968ce2d4e190a3ce385 | cea03b578f0f6207afe5056611090848ab76bd23 | /model/verify_huisu.py | e987f6282e8d3cd45c0c5d6d7e1b57863d2ca583 | [] | no_license | swq90/stock | fa295f4fa0bf6a4d8afe8a71c02cc54fc7442bcd | a49ae395de82ecdfa38220f4fdbcaf4da6a39719 | refs/heads/master | 2021-07-14T03:42:21.950897 | 2020-10-14T11:52:24 | 2020-10-14T11:52:24 | 215,325,863 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,617 | py | import datetime
from sqlalchemy import create_engine
import pandas as pd
import stock.util.stockfilter as sfilter
import stock.util.sheep as sheep
engine = create_engine('postgresql://nezha:nezha@10.0.0.5:5432/stock', echo=False)
def get_data(start_date,end_date):
NOTCONTAIN = sfilter.StockFilter().stock_basic(end_date, name="st|ST", market="科创板")
t1=datetime.datetime.now()
raw_data = pd.read_sql_query('select * from daily where (trade_date>=%(start)s and trade_date<=%(end)s)',
params={'start': start_date, 'end': end_date}, con=engine)
stk_limit = pd.read_sql_query('select * from stk_limit where (trade_date>=%(start)s and trade_date<=%(end)s)',
params={'start': start_date, 'end': end_date}, con=engine)
print(datetime.datetime.now()-t1)
raw_data.drop_duplicates(inplace=True)
stk_limit.drop_duplicates(inplace=True)
print('交易数据%s,包含%s个交易日,涨停数据%s' % (raw_data.shape, raw_data['trade_date'].unique().shape, stk_limit.shape))
raw_data = raw_data[raw_data["ts_code"].isin(NOTCONTAIN['ts_code']) == False]
df=raw_data.merge(stk_limit,on=['ts_code','trade_date'])
return df
def verify(up,data):
t1 = datetime.datetime.now()
s1=sheep.wool(up,data)
t2= datetime.datetime.now()
s2=sheep.wool2(up,data)
t3 = datetime.datetime.now()
s1.to_csv('s1.csv')
s2.to_csv('s2.csv')
print()
t=get_data('20190220','20190430')
s=t[t['close']==t['up_limit']]
pd.DataFrame(s.groupby('trade_date')['ts_code'].count()).to_csv('count.csv')
verify(s,t)
| [
"shaowenqin620@163.com"
] | shaowenqin620@163.com |
c99a1bdd8fc9c1068b09146888230fdd761ab4bb | 6f6465903edbb0587a43fbef9c3a6776c948d9b3 | /IntroducaoAoKNN/KNNComPandasESKTlearn.py | 3441ee1659085dbe72e658130ac2c775726d6418 | [] | no_license | borin98/CursoDeMachineLearning | 5871beccd2e09f2fc5d51e40370c11c730f056c1 | 6ead1db41c27009207a32658089bc7a790a06be8 | refs/heads/master | 2020-03-25T09:04:13.436480 | 2018-08-30T02:52:32 | 2018-08-30T02:52:32 | 143,645,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,382 | py | import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from math import sqrt
"""
Função que mexe com um arquivo CSV sobre sapatos que funciona da seguinte forma :
Coluna 0 : tamanho do sapato
Coluna 1 : Peso da pessoa
Coluna 2 : Tipo de pessoa ( Senior ou fourth - primario )
"""
def main ( ) :
# montando os dados de teste e treino
dadosTreino = pd.read_csv ( "train.csv" )
dadosTeste = pd.read_csv ( "test.csv" )
# montando um numPY array dos dados de treino e teste
col1 = ["shoe size", "height"]
col2 = ["class"]
xTreino = dadosTreino.as_matrix ( col1 )
yTreino = dadosTreino.as_matrix ( col2 )
xTeste = dadosTeste.as_matrix ( col1 )
yTeste = dadosTeste.as_matrix ( col2 )
# montando o parâmetro k
k = int ( sqrt ( len ( dadosTreino ) + len ( dadosTeste ) ) )
# montando o parâmetro knn
knn = KNeighborsClassifier ( n_neighbors = k, weights = "distance" )
knn.fit ( xTreino, yTreino.ravel() )
predicao = knn.predict ( xTeste )
acertos = np.sum ( predicao == yTeste )
porcentagemAcertos = knn.score ( xTeste, yTeste )
print("Quantidade de dados acertados : {} acertos de 11329 dados \n\n".format ( acertos ) )
print("Porcentagem de acertos : {} %\n\n".format ( porcentagemAcertos*100 ) )
main()
| [
"borinmacedo@gmail.com"
] | borinmacedo@gmail.com |
9c7abc977eb54a42f4e5543185cd6e3b8c9f8758 | 3a4fbde06794da1ec4c778055dcc5586eec4b7d2 | /_google_app_engine-projects/The_GAE_SWF_Project/pyamf/tests/remoting/test_client.py | 5f00bec64c391375aed4ff144a451f1ebd65751f | [
"MIT"
] | permissive | raychorn/svn_python-django-projects | 27b3f367303d6254af55c645ea003276a5807798 | df0d90c72d482b8a1e1b87e484d7ad991248ecc8 | refs/heads/main | 2022-12-30T20:36:25.884400 | 2020-10-15T21:52:32 | 2020-10-15T21:52:32 | 304,455,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,639 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2008 The PyAMF Project.
# See LICENSE for details.
"""
Test for Remoting client.
@author: U{Nick Joyce<mailto:nick@boxdesign.co.uk>}
@since: 0.1.0
"""
import unittest
import pyamf
from pyamf import remoting
from pyamf.remoting import client
class ServiceMethodProxyTestCase(unittest.TestCase):
def test_create(self):
x = client.ServiceMethodProxy('a', 'b')
self.assertEquals(x.service, 'a')
self.assertEquals(x.name, 'b')
def test_call(self):
tc = self
class TestService(object):
def __init__(self, s, args):
self.service = s
self.args = args
def _call(self, service, *args):
tc.assertTrue(self.service, service)
tc.assertTrue(self.args, args)
x = client.ServiceMethodProxy(None, None)
ts = TestService(x, [1,2,3])
x.service = ts
x(1,2,3)
def test_str(self):
x = client.ServiceMethodProxy('spam', 'eggs')
self.assertEquals(str(x), 'spam.eggs')
x = client.ServiceMethodProxy('spam', None)
self.assertEquals(str(x), 'spam')
class ServiceProxyTestCase(unittest.TestCase):
def test_create(self):
x = client.ServiceProxy('spam', 'eggs')
self.assertEquals(x._gw, 'spam')
self.assertEquals(x._name, 'eggs')
self.assertEquals(x._auto_execute, True)
x = client.ServiceProxy('hello', 'world', True)
self.assertEquals(x._gw, 'hello')
self.assertEquals(x._name, 'world')
self.assertEquals(x._auto_execute, True)
x = client.ServiceProxy(ord, chr, False)
self.assertEquals(x._gw, ord)
self.assertEquals(x._name, chr)
self.assertEquals(x._auto_execute, False)
def test_getattr(self):
x = client.ServiceProxy(None, None)
y = x.spam
self.assertTrue(isinstance(y, client.ServiceMethodProxy))
self.assertEquals(y.name, 'spam')
def test_call(self):
class DummyGateway(object):
def __init__(self, tc):
self.tc = tc
def addRequest(self, method_proxy, *args):
self.tc.assertEquals(method_proxy, self.method_proxy)
self.tc.assertEquals(args, self.args)
self.request = {'method_proxy': method_proxy, 'args': args}
return self.request
def execute_single(self, request):
self.tc.assertEquals(request, self.request)
return pyamf.ASObject(body=None)
gw = DummyGateway(self)
x = client.ServiceProxy(gw, 'test')
y = x.spam
gw.method_proxy = y
gw.args = ()
y()
gw.args = (1, 2, 3)
y(1, 2, 3)
def test_service_call(self):
class DummyGateway(object):
def __init__(self, tc):
self.tc = tc
def addRequest(self, method_proxy, *args):
self.tc.assertEquals(method_proxy.service, self.x)
self.tc.assertEquals(method_proxy.name, None)
return pyamf.ASObject(method_proxy=method_proxy, args=args)
def execute_single(self, request):
return pyamf.ASObject(body=None)
gw = DummyGateway(self)
x = client.ServiceProxy(gw, 'test')
gw.x = x
x()
def test_pending_call(self):
class DummyGateway(object):
def __init__(self, tc):
self.tc = tc
def addRequest(self, method_proxy, *args):
self.tc.assertEquals(method_proxy, self.method_proxy)
self.tc.assertEquals(args, self.args)
self.request = pyamf.ASObject(method_proxy=method_proxy, args=args)
return self.request
gw = DummyGateway(self)
x = client.ServiceProxy(gw, 'test', False)
y = x.eggs
gw.method_proxy = y
gw.args = ()
res = y()
self.assertEquals(id(gw.request), id(res))
def test_str(self):
x = client.ServiceProxy(None, 'test')
self.assertEquals(str(x), 'test')
class RequestWrapperTestCase(unittest.TestCase):
def test_create(self):
x = client.RequestWrapper(1, 2, 3, 4)
self.assertEquals(x.gw, 1)
self.assertEquals(x.id, 2)
self.assertEquals(x.service, 3)
self.assertEquals(x.args, (4,))
def test_str(self):
x = client.RequestWrapper(None, '/1', None, None)
self.assertEquals(str(x), '/1')
def test_null_response(self):
x = client.RequestWrapper(None, None, None, None)
self.assertRaises(AttributeError, getattr, x, 'result')
def test_set_response(self):
x = client.RequestWrapper(None, None, None, None)
y = pyamf.ASObject(body='spam.eggs')
x.setResponse(y)
self.assertEquals(x.response, y)
self.assertEquals(x.result, 'spam.eggs')
class DummyResponse(object):
tc = None
def __init__(self, status, body, headers=()):
self.status = status
self.body = body
self.headers = headers
def getheader(self, header):
if header in self.headers:
return self.headers[header]
return None
def read(self, x=None):
if x is None:
return self.body
return self.body[:x]
class DummyConnection(object):
tc = None
expected_value = None
expected_url = None
response = None
def request(self, method, url, value):
self.tc.assertEquals(method, 'POST')
self.tc.assertEquals(url, self.expected_url)
self.tc.assertEquals(value, self.expected_value)
def getresponse(self):
return self.response
class RemotingServiceTestCase(unittest.TestCase):
def test_create(self):
self.assertRaises(TypeError, client.RemotingService)
x = client.RemotingService('http://example.org')
self.assertEquals(x.url, ('http', 'example.org', '', '', '', ''))
# amf version
x = client.RemotingService('http://example.org', pyamf.AMF3)
self.assertEquals(x.amf_version, pyamf.AMF3)
# client type
x = client.RemotingService('http://example.org', pyamf.AMF3,
pyamf.ClientTypes.FlashCom)
self.assertEquals(x.client_type, pyamf.ClientTypes.FlashCom)
def test_schemes(self):
x = client.RemotingService('http://example.org')
self.assertEquals(x.connection.port, 80)
x = client.RemotingService('https://example.org')
self.assertEquals(x.connection.port, 443)
self.assertRaises(ValueError, client.RemotingService,
'ftp://example.org')
def test_port(self):
x = client.RemotingService('http://example.org:8080')
self.assertEquals(x.connection.port, 8080)
def test_get_service(self):
x = client.RemotingService('http://example.org')
y = x.getService('spam')
self.assertTrue(isinstance(y, client.ServiceProxy))
self.assertEquals(y._name, 'spam')
self.assertEquals(y._gw, x)
self.assertRaises(TypeError, x.getService, 1)
def test_add_request(self):
gw = client.RemotingService('http://spameggs.net')
self.assertEquals(gw.request_number, 1)
self.assertEquals(gw.requests, [])
service = gw.getService('baz')
wrapper = gw.addRequest(service, 1, 2, 3)
self.assertEquals(gw.requests, [wrapper])
self.assertEquals(wrapper.gw, gw)
self.assertEquals(gw.request_number, 2)
self.assertEquals(wrapper.id, '/1')
self.assertEquals(wrapper.service, service)
self.assertEquals(wrapper.args, (1, 2, 3))
# add 1 arg
wrapper2 = gw.addRequest(service, None)
self.assertEquals(gw.requests, [wrapper, wrapper2])
self.assertEquals(wrapper2.gw, gw)
self.assertEquals(gw.request_number, 3)
self.assertEquals(wrapper2.id, '/2')
self.assertEquals(wrapper2.service, service)
self.assertEquals(wrapper2.args, (None,))
# add no args
wrapper3 = gw.addRequest(service)
self.assertEquals(gw.requests, [wrapper, wrapper2, wrapper3])
self.assertEquals(wrapper3.gw, gw)
self.assertEquals(gw.request_number, 4)
self.assertEquals(wrapper3.id, '/3')
self.assertEquals(wrapper3.service, service)
self.assertEquals(wrapper3.args, tuple())
def test_remove_request(self):
gw = client.RemotingService('http://spameggs.net')
self.assertEquals(gw.requests, [])
service = gw.getService('baz')
wrapper = gw.addRequest(service, 1, 2, 3)
self.assertEquals(gw.requests, [wrapper])
gw.removeRequest(wrapper)
self.assertEquals(gw.requests, [])
wrapper = gw.addRequest(service, 1, 2, 3)
self.assertEquals(gw.requests, [wrapper])
gw.removeRequest(service, 1, 2, 3)
self.assertEquals(gw.requests, [])
self.assertRaises(LookupError, gw.removeRequest, service, 1, 2, 3)
def test_get_request(self):
gw = client.RemotingService('http://spameggs.net')
service = gw.getService('baz')
wrapper = gw.addRequest(service, 1, 2, 3)
wrapper2 = gw.getRequest(str(wrapper))
self.assertEquals(wrapper, wrapper2)
wrapper2 = gw.getRequest('/1')
self.assertEquals(wrapper, wrapper2)
wrapper2 = gw.getRequest(wrapper.id)
self.assertEquals(wrapper, wrapper2)
def test_get_amf_request(self):
gw = client.RemotingService('http://example.org', pyamf.AMF3,
pyamf.ClientTypes.FlashCom)
service = gw.getService('baz')
method_proxy = service.gak
wrapper = gw.addRequest(method_proxy, 1, 2, 3)
envelope = gw.getAMFRequest([wrapper])
self.assertEquals(envelope.amfVersion, pyamf.AMF3)
self.assertEquals(envelope.clientType, pyamf.ClientTypes.FlashCom)
self.assertEquals(envelope.keys(), ['/1'])
request = envelope['/1']
self.assertEquals(request.target, 'baz.gak')
self.assertEquals(request.body, [1, 2, 3])
envelope2 = gw.getAMFRequest(gw.requests)
self.assertEquals(envelope2.amfVersion, pyamf.AMF3)
self.assertEquals(envelope2.clientType, pyamf.ClientTypes.FlashCom)
self.assertEquals(envelope2.keys(), ['/1'])
request = envelope2['/1']
self.assertEquals(request.target, 'baz.gak')
self.assertEquals(request.body, [1, 2, 3])
def test_execute_single(self):
gw = client.RemotingService('http://example.org/x/y/z')
dc = DummyConnection()
gw.connection = dc
dc.tc = self
service = gw.getService('baz', auto_execute=False)
wrapper = service.gak()
response = DummyResponse(200, '\x00\x00\x00\x00\x00\x01\x00\x0b/1/onRe'
'sult\x00\x04null\x00\x00\x00\x00\x00\x02\x00\x05hello', {
'Content-Type': 'application/x-amf', 'Content-Length': 50})
response.tc = self
dc.expected_url = '/x/y/z'
dc.expected_value = '\x00\x00\x00\x00\x00\x01\x00\x07baz.gak\x00' + \
'\x02/1\x00\x00\x00\x00\x0a\x00\x00\x00\x00'
dc.response = response
gw.execute_single(wrapper)
self.assertEquals(gw.requests, [])
wrapper = service.gak()
response = DummyResponse(200, '\x00\x00\x00\x00\x00\x01\x00\x0b/2/onRe'
'sult\x00\x04null\x00\x00\x00\x00\x00\x02\x00\x05hello', {
'Content-Type': 'application/x-amf'})
response.tc = self
dc.expected_url = '/x/y/z'
dc.expected_value = '\x00\x00\x00\x00\x00\x01\x00\x07baz.gak\x00' + \
'\x02/2\x00\x00\x00\x00\n\x00\x00\x00\x00'
dc.response = response
gw.execute_single(wrapper)
def test_execute(self):
gw = client.RemotingService('http://example.org/x/y/z')
dc = DummyConnection()
gw.connection = dc
dc.tc = self
baz = gw.getService('baz', auto_execute=False)
spam = gw.getService('spam', auto_execute=False)
wrapper = baz.gak()
wrapper2 = spam.eggs()
response = DummyResponse(200, '\x00\x00\x00\x00\x00\x02\x00\x0b/1/onRe'
'sult\x00\x04null\x00\x00\x00\x00\x00\x02\x00\x05hello\x00\x0b/2/o'
'nResult\x00\x04null\x00\x00\x00\x00\x00\x02\x00\x05hello', {
'Content-Type': 'application/x-amf'})
response.tc = self
dc.expected_url = '/x/y/z'
dc.expected_value = '\x00\x00\x00\x00\x00\x02\x00\x09spam.eggs\x00' + \
'\x02/2\x00\x00\x00\x00\x0a\x00\x00\x00\x00' + \
'\x00\x07baz.gak\x00\x02/1\x00\x00\x00\x00\x0a\x00\x00\x00\x00'
dc.response = response
gw.execute()
self.assertEquals(gw.requests, [])
def test_get_response(self):
gw = client.RemotingService('http://example.org/amf-gateway')
dc = DummyConnection()
gw.connection = dc
response = DummyResponse(200, '\x00\x00\x00\x00\x00\x00', {
'Content-Type': 'application/x-amf'})
dc.response = response
gw._getResponse()
response = DummyResponse(404, '', {})
dc.response = response
self.assertRaises(remoting.RemotingError, gw._getResponse)
# bad content type
response = DummyResponse(200, '\x00\x00\x00\x00\x00\x00',
{'Content-Type': 'text/html'})
dc.response = response
self.assertRaises(remoting.RemotingError, gw._getResponse)
def test_credentials(self):
gw = client.RemotingService('http://example.org/amf-gateway')
self.assertFalse('Credentials' in gw.headers)
gw.setCredentials('spam', 'eggs')
self.assertTrue('Credentials' in gw.headers)
self.assertEquals(gw.headers['Credentials'],
{'userid' : u'spam', 'password': u'eggs'})
envelope = gw.getAMFRequest([])
self.assertTrue('Credentials' in envelope.headers)
cred = envelope.headers['Credentials']
self.assertEquals(envelope.headers['Credentials'], gw.headers['Credentials'])
def test_append_url_header(self):
gw = client.RemotingService('http://example.org/amf-gateway')
dc = DummyConnection()
gw.connection = dc
response = DummyResponse(200, '\x00\x00\x00\x01\x00\x12AppendToGatewayUrl'
'\x01\x00\x00\x00\x00\x02\x00\x05hello\x00\x00', {
'Content-Type': 'application/x-amf'})
dc.response = response
response = gw._getResponse()
self.assertEquals(gw.original_url, 'http://example.org/amf-gatewayhello')
def test_replace_url_header(self):
gw = client.RemotingService('http://example.org/amf-gateway')
dc = DummyConnection()
gw.connection = dc
response = DummyResponse(200, '\x00\x00\x00\x01\x00\x11ReplaceGatewayUrl'
'\x01\x00\x00\x00\x00\x02\x00\x10http://spam.eggs\x00\x00', {
'Content-Type': 'application/x-amf'})
dc.response = response
response = gw._getResponse()
self.assertEquals(gw.original_url, 'http://spam.eggs')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ServiceMethodProxyTestCase))
suite.addTest(unittest.makeSuite(ServiceProxyTestCase))
suite.addTest(unittest.makeSuite(RequestWrapperTestCase))
suite.addTest(unittest.makeSuite(RemotingServiceTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| [
"raychorn@gmail.com"
] | raychorn@gmail.com |
19272d5e378571a0887ba59caf9a71b741c098a6 | 45129489b5556a70d3caa6020b0f035de8019a94 | /probn/04.03/24.py | 68ac6d9678cb2e0dd08b41a05723b6f8e1d864cd | [] | no_license | trofik00777/EgeInformatics | 83d853b1e8fd1d1a11a9d1f06d809f31e6f986c0 | 60f2587a08d49ff696f50b68fe790e213c710b10 | refs/heads/main | 2023-06-06T13:27:34.627915 | 2021-06-23T20:15:28 | 2021-06-23T20:15:28 | 362,217,172 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | s = input()
s = s.replace("C", "D")
print(s)
a = [len(i) for i in s.split("D")]
print(max(a))
| [
"noreply@github.com"
] | trofik00777.noreply@github.com |
0d4b8f8cacf0f2d8d6475dca0953ff0640291571 | 79c67ec1a5bececc030c222d7469e73c0dc775eb | /life/migrations/0004_auto_20151017_1106.py | 65632b5077c0f0249461420b9d73d05740b1ea54 | [] | no_license | evz/awildlife | 64b29ddcc6644514cd6536b5c1f67dca7d5114db | 39de34006ece41119efef006a73974ce020ae360 | refs/heads/master | 2021-01-10T11:50:59.810504 | 2015-11-07T18:50:57 | 2015-11-07T18:50:57 | 44,484,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('life', '0003_event_image'),
]
operations = [
migrations.AddField(
model_name='event',
name='image_height',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='event',
name='image_width',
field=models.IntegerField(null=True),
),
]
| [
"eric.vanzanten@gmail.com"
] | eric.vanzanten@gmail.com |
e103327d601a8b3a4868d771c2abc2d13e759ea2 | 74d664fe1785119a35def3a2a297f0babe912048 | /modules/environment.py | d0a8339d80e2db96bf541dedb74863a4cad68711 | [] | no_license | zhong1234/chapter7 | cc3b15e7585fd23c026acbda05419d8c8d6c1030 | 4d117ebf5a8a2f49a99443b9df6dad299ae31337 | refs/heads/master | 2018-07-11T19:51:00.851986 | 2018-06-01T08:50:06 | 2018-06-01T08:50:06 | 112,837,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | import os
def run(**args)
print "in environment module."
return str(os.environ)
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
d66169dc3f67e1581b590b9b6fb6cc8c4f62d277 | b05761d771bb5a85d39d370c649567c1ff3eb089 | /venv/lib/python3.10/site-packages/Cryptodome/PublicKey/RSA.py | 1db68bb441ec67e7ece371601847eb838a092739 | [] | no_license | JawshyJ/Coding_Practice | 88c49cab955eab04609ec1003b6b8c20f103fc06 | eb6b229d41aa49b1545af2120e6bee8e982adb41 | refs/heads/master | 2023-02-19T10:18:04.818542 | 2023-02-06T21:22:58 | 2023-02-06T21:22:58 | 247,788,631 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | /home/runner/.cache/pip/pool/98/83/40/f0f4f056ecdf2bdda87416dd1290a31161e7d6ebcbd56f24e292240f78 | [
"37465112+JawshyJ@users.noreply.github.com"
] | 37465112+JawshyJ@users.noreply.github.com |
c09e9222d65ec6663cd9d3ff0e6ef114d5147ac7 | 9ba0771dcef17191483798b4d18f78dbb7b4c27e | /camacq/plugins/leica/helper.py | d3f6d5c7f229b25f26bc63a1e8c5f792be4cd92a | [
"Apache-2.0"
] | permissive | CellProfiling/cam_acq | fd035831bbdd7d6e6bfc2e85fea1b838829eacb8 | 8cf99cb738353c052b93e7ff1dbd5951f65808c2 | refs/heads/master | 2023-08-31T01:46:58.390960 | 2023-08-28T07:12:05 | 2023-08-28T07:12:05 | 35,374,025 | 5 | 1 | Apache-2.0 | 2023-09-13T21:44:38 | 2015-05-10T14:23:34 | Python | UTF-8 | Python | false | false | 1,929 | py | """Helper functions for Leica api."""
from pathlib import Path, PureWindowsPath
from leicaimage import experiment
def find_image_path(relpath, root):
"""Parse the relpath from the server to find file path from root.
Convert from windows path to posix path.
Parameters
----------
relpath : str
A relative path to the image.
root : str
Path to directory where path should start.
Returns
-------
str
Return path to image.
"""
parts = PureWindowsPath(relpath).parts
return str(Path(root).joinpath(*parts))
def get_field(path):
"""Get path to field from image path.
Parameters
----------
path : string
Path to image.
Returns
-------
str
Return path to field directory of image.
"""
return experiment.Experiment(path).dirname # pylint: disable=no-member
def get_well(path):
"""Get path to well from image path.
Parameters
----------
path : string
Path to image.
Returns
-------
str
Return path to well directory of image.
"""
# pylint: disable=no-member
return experiment.Experiment(get_field(path)).dirname
def get_imgs(path, img_type="tif", search=""):
"""Get all images below path.
Parameters
----------
path : string
Path to directory where to search for images.
img_type : string
A string representing the image file type extension.
path : string
A glob pattern string to use in the search.
Returns
-------
list
Return paths of all images found.
"""
root = Path(path)
_path = Path("")
if search:
search = f"{search}*"
patterns = ["slide", "chamber--", "field--", "image--"]
for pattern in patterns:
if pattern not in path:
_path = _path / f"{pattern}*"
return list(root.glob(f"{_path}{search}.{img_type}"))
| [
"noreply@github.com"
] | CellProfiling.noreply@github.com |
9e6e6ebc0d8bceed5f15987173cad13b3f0b7933 | 7d1d49560328f9b5588197abf2c623c304c0d95a | /src/datasets/mpii.py | f844b380ffa8e5ac8b779a29186730dd9c064817 | [] | no_license | peternara/adversarial-pose-pytorch | 1dc6ed10281844c59a827b77505f2ab55d906c16 | 12570ea03f3f2e8ecca7208997c99eb88da47824 | refs/heads/master | 2021-09-05T05:01:01.110501 | 2018-01-24T08:16:29 | 2018-01-24T08:16:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,590 | py | import os
import numpy as np
import h5py
import skimage as skim
import skimage.io as skio
import skimage.transform as sktf
import torch
import torch.utils.data
from .utils import rand, rnd, crop, fliplr_coords, transform, create_label
class MPII_Dataset(torch.utils.data.Dataset):
def __init__(self, data_root, split,
inp_res=256, out_res=64, sigma=1,
scale_factor=0.25, rot_factor=30, return_meta=False, small_image=True):
self.data_root = data_root
self.split = split
self.inp_res = inp_res
self.out_res = out_res
self.sigma = sigma
self.scale_factor = scale_factor
self.rot_factor = rot_factor
self.return_meta = return_meta
self.small_image = small_image
self.nJoints = 16
self.accIdxs = [0, 1, 2, 3, 4, 5, 10, 11, 14, 15] # joint idxs for accuracy calculation
self.flipRef = [[0, 5], [1, 4], [2, 3], # noqa
[10, 15], [11, 14], [12, 13]]
self.annot = {}
tags = ['imgname', 'part', 'center', 'scale']
f = h5py.File('{}/mpii/{}.h5'.format(data_root, split), 'r')
for tag in tags:
self.annot[tag] = np.asarray(f[tag]).copy()
f.close()
def _getPartInfo(self, index):
# get a COPY
pts = self.annot['part'][index].copy()
c = self.annot['center'][index].copy()
s = self.annot['scale'][index].copy()
# Small adjustment so cropping is less likely to take feet out
c[1] = c[1] + 15 * s
s = s * 1.25
return pts, c, s
def _loadImage(self, index):
impath = os.path.join(self.data_root, 'mpii/images', self.annot['imgname'][index].decode('utf-8'))
im = skim.img_as_float(skio.imread(impath))
return im
def __getitem__(self, index):
im = self._loadImage(index)
pts, c, s = self._getPartInfo(index)
r = 0
if self.split == 'train':
# scale and rotation
s = s * (2 ** rnd(self.scale_factor))
r = 0 if rand() < 0.6 else rnd(self.rot_factor)
# flip LR
if rand() < 0.5:
im = im[:, ::-1, :]
pts = fliplr_coords(pts, width=im.shape[1], matchedParts=self.flipRef)
c[0] = im.shape[1] - c[0] # flip center point also
# Color jitter
im = np.clip(im * np.random.uniform(0.6, 1.4, size=3), 0, 1)
# Prepare image
im = crop(im, c, s, r, self.inp_res)
if im.ndim == 2:
im = np.tile(im, [1, 1, 3])
if self.small_image:
# small size image
im_s = sktf.resize(im, [self.out_res, self.out_res], preserve_range=True)
# (h, w, c) to (c, h, w)
im = np.transpose(im, [2, 0, 1])
if self.small_image:
im_s = np.transpose(im_s, [2, 0, 1])
# Prepare label
labels = np.zeros((self.nJoints, self.out_res, self.out_res))
new_pts = transform(pts.T, c, s, r, self.out_res).T
for i in range(self.nJoints):
if pts[i, 0] > 0:
labels[i] = create_label(
labels.shape[1:],
new_pts[i],
self.sigma)
ret_list = [im.astype(np.float32), labels.astype(np.float32)]
if self.small_image:
ret_list.append(im_s)
if self.return_meta:
meta = [pts, c, s, r]
ret_list.append(meta)
return tuple(ret_list)
def __len__(self):
return len(self.annot['imgname'])
| [
"roytseng.tw@gmail.com"
] | roytseng.tw@gmail.com |
55617353af8ff1014f3016dead5a6fc37ed86a7b | 4e30d990963870478ed248567e432795f519e1cc | /ciscoisesdk/models/validators/v3_1_1/jsd_a518d5655f69e8687c9c98740c6.py | f4a0c1ac6389f96f6b686e10bbb8edf3b73f4606 | [
"MIT"
] | permissive | CiscoISE/ciscoisesdk | 84074a57bf1042a735e3fc6eb7876555150d2b51 | f468c54998ec1ad85435ea28988922f0573bfee8 | refs/heads/main | 2023-09-04T23:56:32.232035 | 2023-08-25T17:31:49 | 2023-08-25T17:31:49 | 365,359,531 | 48 | 9 | MIT | 2023-08-25T17:31:51 | 2021-05-07T21:43:52 | Python | UTF-8 | Python | false | false | 8,194 | py | # -*- coding: utf-8 -*-
"""Identity Services Engine updateNetworkAccessTimeConditionById data model.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import json
from builtins import *
import fastjsonschema
from ciscoisesdk.exceptions import MalformedRequest
class JSONSchemaValidatorA518D5655F69E8687C9C98740C6(object):
"""updateNetworkAccessTimeConditionById request schema definition."""
def __init__(self):
super(JSONSchemaValidatorA518D5655F69E8687C9C98740C6, self).__init__()
self._validator = fastjsonschema.compile(json.loads(
'''{
"properties": {
"attributeName": {
"type": "string"
},
"attributeValue": {
"type": "string"
},
"children": {
"items": {
"properties": {
"conditionType": {
"enum": [
"ConditionAndBlock",
"ConditionAttributes",
"ConditionOrBlock",
"ConditionReference",
"LibraryConditionAndBlock",
"LibraryConditionAttributes",
"LibraryConditionOrBlock",
"TimeAndDateCondition"
],
"type": "string"
},
"isNegate": {
"type": "boolean"
},
"link": {
"properties": {
"href": {
"type": "string"
},
"rel": {
"enum": [
"next",
"previous",
"self",
"status"
],
"type": "string"
},
"type": {
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
},
"type": "array"
},
"conditionType": {
"enum": [
"ConditionAndBlock",
"ConditionAttributes",
"ConditionOrBlock",
"ConditionReference",
"LibraryConditionAndBlock",
"LibraryConditionAttributes",
"LibraryConditionOrBlock",
"TimeAndDateCondition"
],
"type": "string"
},
"datesRange": {
"properties": {
"endDate": {
"type": "string"
},
"startDate": {
"type": "string"
}
},
"required": [
"endDate",
"startDate"
],
"type": "object"
},
"datesRangeException": {
"properties": {
"endDate": {
"type": "string"
},
"startDate": {
"type": "string"
}
},
"required": [
"endDate",
"startDate"
],
"type": "object"
},
"description":
{
"type": "string"
},
"dictionaryName": {
"type": "string"
},
"dictionaryValue": {
"type": "string"
},
"hoursRange": {
"properties": {
"endTime": {
"type": "string"
},
"startTime": {
"type": "string"
}
},
"required": [
"endTime",
"startTime"
],
"type": "object"
},
"hoursRangeException": {
"properties": {
"endTime": {
"type": "string"
},
"startTime": {
"type": "string"
}
},
"required": [
"endTime",
"startTime"
],
"type": "object"
},
"id": {
"type": "string"
},
"isNegate": {
"type": "boolean"
},
"link": {
"properties": {
"href": {
"type": "string"
},
"rel": {
"enum": [
"next",
"previous",
"self",
"status"
],
"type": "string"
},
"type": {
"type": "string"
}
},
"required": [
"href"
],
"type": "object"
},
"name": {
"type": "string"
},
"operator": {
"enum": [
"contains",
"endsWith",
"equals",
"greaterOrEquals",
"greaterThan",
"in",
"ipEquals",
"ipGreaterThan",
"ipLessThan",
"ipNotEquals",
"lessOrEquals",
"lessThan",
"matches",
"notContains",
"notEndsWith",
"notEquals",
"notIn",
"notStartsWith",
"startsWith"
],
"type": "string"
},
"weekDays": {
"items": {
"enum": [
"Friday",
"Monday",
"Saturday",
"Sunday",
"Thursday",
"Tuesday",
"Wednesday"
],
"type": "string"
},
"type": "array"
},
"weekDaysException": {
"items": {
"enum": [
"Friday",
"Monday",
"Saturday",
"Sunday",
"Thursday",
"Tuesday",
"Wednesday"
],
"type": "string"
},
"type": "array"
}
},
"type": "object"
}'''.replace("\n" + ' ' * 16, '')
))
def validate(self, request):
try:
self._validator(request)
except fastjsonschema.exceptions.JsonSchemaException as e:
raise MalformedRequest(
'{} is invalid. Reason: {}'.format(request, e.message)
)
| [
"wastorga@altus.co.cr"
] | wastorga@altus.co.cr |
a17fb5bb9c1a3b110d7a6567ee80dd59963ffcfe | 9795e787a54d15f2f249a17b616fec3df67d4559 | /exception/custom_exceptions.py | 9fad21e41c70df88234ff44398cf49bea43a46cf | [] | no_license | gebbz03/PythonProject | 377b6ccf5eafa37dd157012ce499138370ba882f | c12f939cf194a4c541ee77e1f614ba9867ef7090 | refs/heads/master | 2020-04-02T22:16:11.082863 | 2018-10-30T05:49:22 | 2018-10-30T05:49:22 | 154,827,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | class VowelNotAccepted(Exception):
def __init__(self,message,status):
self.message=message
self.status=status
def check_chars(word):
for char in word:
if char.lower() in ['a','e','i','o','u']:
raise VowelNotAccepted('Vowel is not accepted',101)
return word
try:
print(check_chars("love"))
except Exception as e:
print("Error reason: ",e.message) | [
"gebb.freelancer@gmail.com"
] | gebb.freelancer@gmail.com |
8e9b7070944e7a936111d51786b3c0668cbafaea | 7ae44f6975561ff5542cd369dcd04d53093db887 | /Data Structures and Algorithms in Python/5_OPPS_2/predict_output_6.py | 6321e1342fd9c1d7d57d8c6ca335429b5fb3a179 | [] | no_license | ashisharora24/learning_tutorials_practice | 89208a77ad162265c6573ca4559ebf6f4a6f8f18 | 57f461908d0c4d58d831ec375c428179fa69cb3f | refs/heads/master | 2020-05-21T05:32:26.397725 | 2019-07-23T10:36:06 | 2019-07-23T10:36:06 | 185,923,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | class Circle(object):
def __str__(self):
return "This is a Circle Class"
c = Circle()
print(c)
| [
"ashisharora24@gmail.com"
] | ashisharora24@gmail.com |
91fe27a11835961dfd6dc4a2e9e411a0416da877 | e56214188faae8ebfb36a463e34fc8324935b3c2 | /test/test_boot_precision_policy_list.py | c36229afebd412dbf2266ec56b1e059db74964dc | [
"Apache-2.0"
] | permissive | CiscoUcs/intersight-python | 866d6c63e0cb8c33440771efd93541d679bb1ecc | a92fccb1c8df4332ba1f05a0e784efbb4f2efdc4 | refs/heads/master | 2021-11-07T12:54:41.888973 | 2021-10-25T16:15:50 | 2021-10-25T16:15:50 | 115,440,875 | 25 | 18 | Apache-2.0 | 2020-03-02T16:19:49 | 2017-12-26T17:14:03 | Python | UTF-8 | Python | false | false | 1,957 | py | # coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.boot_precision_policy_list import BootPrecisionPolicyList # noqa: E501
from intersight.rest import ApiException
class TestBootPrecisionPolicyList(unittest.TestCase):
"""BootPrecisionPolicyList unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBootPrecisionPolicyList(self):
"""Test BootPrecisionPolicyList"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.boot_precision_policy_list.BootPrecisionPolicyList() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"ucs-build@github.com"
] | ucs-build@github.com |
4d35d4841261340af3b86fda5221b5d30c179c0b | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/learnbgame_hops/operators/misc/mirrormirror.py | 9a0a837b40c0ac02de7aea1f201f98a778a4bb09 | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,491 | py | import bpy
import mathutils
from ... preferences import get_preferences
from ... utils.context import ExecutionContext
# Do the Basic Union, Difference and Intersection operations
def operation(context, _operation, x, y, z, zx, zy, zz, direction, used_axis):
object = bpy.context.active_object
object.select_set(state=True)
if(len(bpy.context.selected_objects)) == 1: # one is selected , add mirror mod immediately to that object#
if object.type == "MESH":
if get_preferences().Hops_mirror_modes == "MODIFIER":
with ExecutionContext(mode="OBJECT", active_object=object):
mirror_mod = None
for modifier in object.modifiers:
if modifier.name == "hops_mirror":
mirror_mod = modifier
if mirror_mod is None:
mirror_mod = object.modifiers.new("hops_mirror", "MIRROR")
mirror_mod.use_clip = True
mirror_mod.use_axis[0] = False
mirror_mod.use_axis[1] = False
mirror_mod.use_axis[2] = False
elif get_preferences().Hops_mirror_modes == "BISECT":
with ExecutionContext(mode="OBJECT", active_object=object):
if get_preferences().Hops_mirror_direction == "+":
clear_inner = True
clear_outer = False
elif get_preferences().Hops_mirror_direction == "-":
clear_inner = False
clear_outer = True
bpy.ops.object.mode_set(mode='EDIT')
if object.hops.status == "CSTEP":
bpy.ops.mesh.reveal()
bpy.ops.mesh.select_all(action='SELECT')
#bisection happens
bpy.ops.mesh.bisect(plane_co=(x, y, z), plane_no=(zx, zy, zz), clear_inner=clear_inner, clear_outer=clear_outer)
bpy.ops.mesh.select_all(action='DESELECT')
if object.hops.status == "CSTEP":
bpy.ops.mesh.select_all(action='TOGGLE')
bpy.ops.mesh.hide(unselected=False)
bpy.ops.object.mode_set(mode='OBJECT')
object = bpy.context.active_object
mirror_mod = None
for modifier in object.modifiers:
if modifier.name == "hops_mirror":
mirror_mod = modifier
if mirror_mod is None:
mirror_mod = object.modifiers.new("hops_mirror", "MIRROR")
mirror_mod.use_clip = True
mirror_mod.use_axis[0] = False
mirror_mod.use_axis[1] = False
mirror_mod.use_axis[2] = False
elif get_preferences().Hops_mirror_modes == "SYMMETRY":
with ExecutionContext(mode="EDIT", active_object=object):
bpy.ops.mesh.select_all(action='SELECT')
if get_preferences().Hops_Mir2_symmetrize_type == "Machin3" and "MESHmachine" in bpy.context.preferences.addons.keys():
bpy.ops.machin3.symmetrize(axis=direction.split("_")[1], direction=direction.split("_")[0])
else:
bpy.ops.mesh.symmetrize(direction=direction)
bpy.ops.mesh.select_all(action='DESELECT')
if get_preferences().Hops_mirror_modes in {"BISECT", "MODIFIER"}:
if _operation == "MIRROR_X":
mirror_mod.use_axis[0] = True
if get_preferences().Hops_mirror_modes == "MODIFIER":
mirror_mod.use_bisect_axis[0] = True
if get_preferences().Hops_mirror_direction == "-":
mirror_mod.use_bisect_flip_axis[0] = True
else:
mirror_mod.use_bisect_flip_axis[0] = False
mirror_mod.show_on_cage = True
elif _operation == "MIRROR_Y":
mirror_mod.use_axis[1] = True
if get_preferences().Hops_mirror_modes == "MODIFIER":
mirror_mod.use_bisect_axis[1] = True
if get_preferences().Hops_mirror_direction == "-":
mirror_mod.use_bisect_flip_axis[1] = True
else:
mirror_mod.use_bisect_flip_axis[1] = False
mirror_mod.show_on_cage = True
elif _operation == "MIRROR_Z":
mirror_mod.use_axis[2] = True
if get_preferences().Hops_mirror_modes == "MODIFIER":
mirror_mod.use_bisect_axis[2] = True
if get_preferences().Hops_mirror_direction == "-":
mirror_mod.use_bisect_flip_axis[2] = True
else:
mirror_mod.use_bisect_flip_axis[2] = False
mirror_mod.show_on_cage = True
else:
if get_preferences().Hops_mirror_modes_multi == "VIA_ACTIVE":
with ExecutionContext(mode="OBJECT", active_object=object):
mirror_ob = bpy.context.active_object # last ob selected
for obj in bpy.context.selected_objects:
if obj != mirror_ob:
if obj.type == "MESH":
mirror_ob.select_set(state=False) # pop object from sel_stack
object = obj
mirror_mod_multi = None
for modifier in object.modifiers:
if modifier.name == "hops_mirror_via_active":
mirror_mod_multi = modifier
if mirror_mod_multi is None:
mirror_mod_multi = object.modifiers.new("hops_mirror_via_active", "MIRROR")
mirror_mod_multi.use_axis[0] = False
mirror_mod_multi.use_axis[1] = False
mirror_mod_multi.use_axis[2] = False
mirror_mod_multi.use_clip = True
mirror_mod_multi.mirror_object = mirror_ob
mirror_mod_multi.use_mirror_u = True
if _operation == "MIRROR_X":
mirror_mod_multi.use_axis[0] = True
if get_preferences().Hops_mirror_direction == "-":
mirror_mod_multi.use_bisect_axis[0] = True
else:
mirror_mod_multi.use_bisect_axis[0] = False
elif _operation == "MIRROR_Y":
mirror_mod_multi.use_axis[1] = True
if get_preferences().Hops_mirror_direction == "-":
mirror_mod_multi.use_bisect_axis[1] = True
else:
mirror_mod_multi.use_bisect_axis[1] = False
elif _operation == "MIRROR_Z":
mirror_mod_multi.use_axis[2] = True
if get_preferences().Hops_mirror_direction == "-":
mirror_mod_multi.use_bisect_axis[2] = True
else:
mirror_mod_multi.use_bisect_axis[2] = False
elif get_preferences().Hops_mirror_modes_multi == "SYMMETRY":
selected = bpy.context.selected_objects
for obj in selected:
print("aa")
bpy.context.view_layer.objects.active = obj
with ExecutionContext(mode="EDIT", active_object=obj):
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.symmetrize(direction=direction)
bpy.ops.mesh.select_all(action='DESELECT')
# mirror_ob.select = 1
# object.select = 1
bpy.context.view_layer.objects.active = object
# ------------------- OPERATOR CLASSES ------------------------------
# Mirror Tool
class HOPS_OT_MirrorX(bpy.types.Operator):
bl_idname = "hops.mirror_mirror_x"
bl_label = "Mirror X"
bl_description = "Mirror On the X Axis"
bl_options = {"REGISTER", "UNDO"}
def draw(self, context):
layout = self.layout
layout.prop(get_preferences(), "Hops_mirror_direction")
layout.prop(get_preferences(), "Hops_mirror_modes")
@classmethod
def poll(cls, context):
# selected = context.selected_objects
object = context.active_object
if object is None: return False
if object.mode in {"OBJECT", "EDIT"}:
return True
def execute(self, context):
x, y, z = bpy.context.object.location
zx, zy, zz = bpy.context.object.rotation_euler
if get_preferences().Hops_mirror_direction == "+":
direction = "POSITIVE_X"
elif get_preferences().Hops_mirror_direction == "-":
direction = "NEGATIVE_X"
used_axis = "X"
vec = mathutils.Vector((1, 0, 0))
mat = mathutils.Matrix.Rotation(zx, 4, "X")
vec.rotate(mat)
mat = mathutils.Matrix.Rotation(zy, 4, "Y")
vec.rotate(mat)
mat = mathutils.Matrix.Rotation(zz, 4, "Z")
vec.rotate(mat)
nx = vec[0]
ny = vec[1]
nz = vec[2]
operation(context, "MIRROR_X", x, y, z, nx, ny, nz, direction, used_axis)
return {'FINISHED'}
class HOPS_OT_MirrorY(bpy.types.Operator):
bl_idname = "hops.mirror_mirror_y"
bl_label = "Mirror Y"
bl_description = "Mirror On the Y Axis"
bl_options = {"REGISTER", "UNDO"}
def draw(self, context):
layout = self.layout
layout.prop(get_preferences(), "Hops_mirror_direction")
layout.prop(get_preferences(), "Hops_mirror_modes")
@classmethod
def poll(cls, context):
# selected = context.selected_objects
object = context.active_object
if object is None: return False
if object.mode in {"OBJECT", "EDIT"}:
return True
def execute(self, context):
x, y, z = bpy.context.object.location
zx, zy, zz = bpy.context.object.rotation_euler
if get_preferences().Hops_mirror_direction == "+":
direction = "POSITIVE_Y"
elif get_preferences().Hops_mirror_direction == "-":
direction = "NEGATIVE_Y"
used_axis = "Y"
vec = mathutils.Vector((0, 1, 0))
mat = mathutils.Matrix.Rotation(zx, 4, "X")
vec.rotate(mat)
mat = mathutils.Matrix.Rotation(zy, 4, "Y")
vec.rotate(mat)
mat = mathutils.Matrix.Rotation(zz, 4, "Z")
vec.rotate(mat)
nx = vec[0]
ny = vec[1]
nz = vec[2]
operation(context, "MIRROR_Y", x, y, z, nx, ny, nz, direction, used_axis)
return {'FINISHED'}
class HOPS_OT_MirrorZ(bpy.types.Operator):
bl_idname = "hops.mirror_mirror_z"
bl_label = "Mirror Z"
bl_description = "Mirror On the Z Axis"
bl_options = {"REGISTER", "UNDO"}
def draw(self, context):
layout = self.layout
layout.prop(get_preferences(), "Hops_mirror_direction")
layout.prop(get_preferences(), "Hops_mirror_modes")
@classmethod
def poll(cls, context):
object = context.active_object
if object is None: return False
if object.mode in {"OBJECT", "EDIT"}:
return True
def execute(self, context):
x, y, z = bpy.context.object.location
zx, zy, zz = bpy.context.object.rotation_euler
if get_preferences().Hops_mirror_direction == "+":
direction = "POSITIVE_Z"
elif get_preferences().Hops_mirror_direction == "-":
direction = "NEGATIVE_Z"
used_axis = "Z"
vec = mathutils.Vector((0, 0, 1))
mat = mathutils.Matrix.Rotation(zx, 4, "X")
vec.rotate(mat)
mat = mathutils.Matrix.Rotation(zy, 4, "Y")
vec.rotate(mat)
mat = mathutils.Matrix.Rotation(zz, 4, "Z")
vec.rotate(mat)
nx = vec[0]
ny = vec[1]
nz = vec[2]
operation(context, "MIRROR_Z", x, y, z, nx, ny, nz, direction, used_axis)
return {'FINISHED'}
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
d09996202930b38b56df754c8e5dd034958f4031 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/pfnet_chainer/chainer-master/cupy/testing/__init__.py | 7399762e4c91f5d702050e4014d3363e1b55177d | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 1,934 | py | from cupy.testing import array
from cupy.testing import attr
from cupy.testing import helper
from cupy.testing import parameterized
assert_allclose = array.assert_allclose
assert_array_almost_equal = array.assert_array_almost_equal
assert_array_almost_equal_nulp = array.assert_array_almost_equal_nulp
assert_array_max_ulp = array.assert_array_max_ulp
assert_array_equal = array.assert_array_equal
assert_array_list_equal = array.assert_array_list_equal
assert_array_less = array.assert_array_less
numpy_cupy_allclose = helper.numpy_cupy_allclose
numpy_cupy_array_almost_equal = helper.numpy_cupy_array_almost_equal
numpy_cupy_array_almost_equal_nulp = \
helper.numpy_cupy_array_almost_equal_nulp
numpy_cupy_array_max_ulp = helper.numpy_cupy_array_max_ulp
numpy_cupy_array_equal = helper.numpy_cupy_array_equal
numpy_cupy_array_list_equal = helper.numpy_cupy_array_list_equal
numpy_cupy_array_less = helper.numpy_cupy_array_less
numpy_cupy_raises = helper.numpy_cupy_raises
for_dtypes = helper.for_dtypes
for_all_dtypes = helper.for_all_dtypes
for_float_dtypes = helper.for_float_dtypes
for_signed_dtypes = helper.for_signed_dtypes
for_unsigned_dtypes = helper.for_unsigned_dtypes
for_int_dtypes = helper.for_int_dtypes
for_dtypes_combination = helper.for_dtypes_combination
for_all_dtypes_combination = helper.for_all_dtypes_combination
for_signed_dtypes_combination = helper.for_signed_dtypes_combination
for_unsigned_dtypes_combination = helper.for_unsigned_dtypes_combination
for_int_dtypes_combination = helper.for_int_dtypes_combination
for_orders = helper.for_orders
for_CF_orders = helper.for_CF_orders
with_requires = helper.with_requires
parameterize = parameterized.parameterize
product = parameterized.product
shaped_arange = helper.shaped_arange
shaped_reverse_arange = helper.shaped_reverse_arange
shaped_random = helper.shaped_random
NumpyError = helper.NumpyError
gpu = attr.gpu
multi_gpu = attr.multi_gpu
| [
"659338505@qq.com"
] | 659338505@qq.com |
a015cd16852be7d4367b84a80f44d6eb6db18e83 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/third_party/google/cloud/pubsublite/cloudpubsub/internal/ack_set_tracker_impl.py | 3222994e53e33b5ba1cad9c761ad3e563cb45561 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 2,672 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import deque
from typing import Optional
from google.api_core.exceptions import FailedPrecondition
from google.cloud.pubsublite.cloudpubsub.internal.sorted_list import SortedList
from google.cloud.pubsublite.cloudpubsub.internal.ack_set_tracker import AckSetTracker
from google.cloud.pubsublite.internal.wire.committer import Committer
from google.cloud.pubsublite_v1 import Cursor
class AckSetTrackerImpl(AckSetTracker):
_committer: Committer
_receipts: "deque[int]"
_acks: SortedList[int]
def __init__(self, committer: Committer):
super().__init__()
self._committer = committer
self._receipts = deque()
self._acks = SortedList()
def track(self, offset: int):
if len(self._receipts) > 0:
last = self._receipts[0]
if last >= offset:
raise FailedPrecondition(
f"Tried to track message {offset} which is before last tracked message {last}."
)
self._receipts.append(offset)
def ack(self, offset: int):
self._acks.push(offset)
prefix_acked_offset: Optional[int] = None
while len(self._receipts) != 0 and not self._acks.empty():
receipt = self._receipts.popleft()
ack = self._acks.peek()
if receipt == ack:
prefix_acked_offset = receipt
self._acks.pop()
continue
self._receipts.appendleft(receipt)
break
if prefix_acked_offset is None:
return
# Convert from last acked to first unacked.
cursor = Cursor()
cursor._pb.offset = prefix_acked_offset + 1
self._committer.commit(cursor)
async def clear_and_commit(self):
self._receipts.clear()
self._acks = SortedList()
await self._committer.wait_until_empty()
async def __aenter__(self):
await self._committer.__aenter__()
async def __aexit__(self, exc_type, exc_value, traceback):
await self._committer.__aexit__(exc_type, exc_value, traceback)
| [
"cloudsdk.mirror@gmail.com"
] | cloudsdk.mirror@gmail.com |
fca104b783bfc0e2592794845e82a2e7670d1953 | b0ed67c452f79da72120b185960bf06711695fdd | /while_loop/max_number.py | a50ae857badae0270d2df0df54ae12bdd4ae191b | [] | no_license | NikiDimov/SoftUni-Python-Basics | c0e1ae37867c1cfa264f8a19fdfba68b349df5d3 | f02045578930d03edbbd073995867eabfb171bbc | refs/heads/main | 2023-07-11T01:10:25.612754 | 2021-08-19T10:26:12 | 2021-08-19T10:26:12 | 345,221,145 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 121 | py | n = input()
my_nums = []
while not n == "Stop":
n = int(n)
my_nums.append(n)
n = input()
print(max(my_nums))
| [
"niki.dimov86@gmail.com"
] | niki.dimov86@gmail.com |
d8244c4d2a0e627a721f9626fcb9edc7c3ef3b0e | 3cd4e2aae2a3ee3f9002fea903a6695f9fd5d373 | /bigml/tests/read_configuration_steps.py | 4d14340f31677c9017ce140619a82c926a491344 | [
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] | permissive | jaykamau7/python | 1c2daf7222f12909563005701b02308b8b80c732 | faf718173e4a108ae8d500e82a6b4197fabbecb4 | refs/heads/master | 2023-02-28T13:29:59.759663 | 2021-02-07T14:10:20 | 2021-02-07T14:10:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 957 | py | # -*- coding: utf-8 -*-
#
# Copyright 2017-2020 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from .world import world
from nose.tools import eq_
from bigml.api import HTTP_OK
#@step(r'I get the configuration "(.*)"')
def i_get_the_configuration(step, configuration):
resource = world.api.get_configuration(configuration)
world.status = resource['code']
eq_(world.status, HTTP_OK)
world.configuration = resource['object']
| [
"merce@bigml.com"
] | merce@bigml.com |
0a77fbe43f734f778b70dd19d7af6633f82c5acc | 4c534dc33d548acf07edc3e7b826f4bfb9207030 | /lexicon/management/commands/populatetags.py | 247a11e8fedbee41aad11ff4954dd17c4615e3be | [] | no_license | Typecraft/valex-backend | a1fdbcdf0f761eca51f7762ec20e57a2feec234b | 7c7ca74851bf595e811ffba5b0f5b09fbd5ac19a | refs/heads/develop | 2022-12-18T08:22:47.545094 | 2017-10-24T09:00:58 | 2017-10-24T09:00:58 | 102,861,803 | 0 | 0 | null | 2022-12-08T00:38:08 | 2017-09-08T13:00:09 | Python | UTF-8 | Python | false | false | 3,141 | py | from functools import reduce
from django.core.management import BaseCommand
from lexicon.models import ValenceFrame
tags = [
"EXPL",
"EXPL+[INF:rais"
"EXPL+adpos",
"EXPL+adpos+PP[S",
"EXPL+adpos+S",
"EXPL+APpred+INF",
"EXPL+APpred+PP",
"EXPL+APpred+S",
"EXPL+INF",
"EXPL+INF:equiOBJ",
"EXPL+INF:equiSBJ",
"EXPL+NP",
"EXPL+NP+adpos",
"EXPL+NP+INF",
"EXPL+NP+INF:equiSBJ",
"EXPL+NP+NP",
"EXPL+NP+NP+INF",
"EXPL+NP+NP+S",
"EXPL+NP+S",
"EXPL+NPpred+INF",
"EXPL+NPpred+S",
"EXPL+PARTPpred+S",
"EXPL+PP",
"EXPL+PP+INF",
"EXPL+PP[INF",
"EXPL+PP[S",
"EXPL+PPpred+S",
"EXPL+S",
"INF",
"INF+APpred",
"INF+INF",
"INF+NP",
"INF+NP+NP",
"INF+NP+NPgen",
"INF+NP+PP",
"INF+NPdat+NP",
"INF+NPpred",
"INF+PARTPpred",
"INF+PPpred",
"inherentCompNP+NP+NP",
"NP",
"NP+adpos",
"NP+adpos+INF:equiSBJ",
"NP+adpos+INF:equiSBJ",
"NP+adpos+NP",
"NP+adpos+PARTPpred",
"NP+adpos+PP",
"NP+adpos+PP[INF:equiSBJ",
"NP+adpos+PP[INF:raisSBJ",
"NP+adpos+S",
"NP+ADVPpred",
"NP+APpred",
"NP+APpred+adpos",
"NP+EXPL+APpred+S",
"NP+EXPL+INF",
"NP+EXPL+S",
"NP+INF",
"NP+INF",
"NP+INF:equiSBJ",
"NP+INF:raisingOBJ",
"NP+INF:raisingSBJ",
"NP+NP",
"NP+NP++ADVPpredSBJ",
"NP+NP+ADVPpred",
"NP+NP+APpred",
"NP+NP+INF:equi:OBJ",
"NP+NP+INF:equi:SBJ",
"NP+NP+INF:rais:OBJ",
"NP+NP+INF:rais:SBJ",
"NP+NP+NP",
"NP+NP+NP+PP",
"NP+NP+NPpred",
"NP+NP+PP",
"NP+NP+PP[INF",
"NP+NP+PP[INF:equiOBJ",
"NP+NP+PP[INF:equiSBJ",
"NP+NP+PP[INF:raisOBJ",
"NP+NP+PP[S",
"NP+NP+PPpred",
"NP+NP+PRTP[INF:raisOBJ",
"NP+NP+PRTPpred",
"NP+NP+PRTPpred",
"NP+NP+PRTPpredSBJ",
"NP+NP+S",
"NP+NPdat+NP",
"NP+NPdat+NP+PP",
"NP+NPpred",
"NP+NPpred",
"NP+NPrefl",
"NP+NPrefl+adpos",
"NP+NPrefl+ADVpred",
"NP+NPrefl+APpred",
"Np+NPrefl+APpred+adpos",
"NP+NPrefl+INF",
"NP+NPrefl+INF:equi:OBJ",
"NP+NPrefl+INF:equiSBJ",
"NP+NPrefl+INF:raisSBJ",
"NP+NPrefl+NP",
"NP+NPrefl+NPpred",
"NP+NPrefl+PP",
"NP+NPrefl+PP[INF",
"NP+NPrefl+PP[INF:equiSBJ",
"NP+NPrefl+PP[INF:raisOBJ",
"NP+NPrefl+PPpred",
"NP+NPrefl+PRTP[INF:raisOBJ",
"NP+NPrefl+PRTPpred",
"NP+NPrefl+S",
"NP+phrasalVerb",
"NP+PP",
"NP+PP[INF",
"NP+PP[INF:equiSBJ",
"NP+PP[INF:raisSBJ",
"NP+PP[S",
"NP+PPpred",
"NP+PRTPpred",
"NP+PTCP",
"NP+S",
"NPdat",
"S",
"S+APpred",
"S+idiomatic",
"S+NP",
"S+NP+APpred",
"S+NP+NP",
"S+NP+S",
"S+NPpred",
"S+PARTPpred",
"S+PP",
"S+PP+NP",
"S+PPpred",
]
class Command(BaseCommand):
help = 'Populates the ValenceFrame table with tags'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
valence_frames = []
for tag in tags:
valence_frames.append(ValenceFrame(name=tag))
ValenceFrame.objects.bulk_create(valence_frames)
| [
"tormod.haugland@gmail.com"
] | tormod.haugland@gmail.com |
0f4cae1c82065decfb1458cf930ea6fa0d20184f | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_1480487_0/Python/TV4Fun/CodePython.py | d48048e4b9227440502b06435ae41e03eaa57447 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 905 | py | filename = raw_input("Enter the name of the input file: ")
f = open(filename, 'r')
outfile = open("outfile.txt", 'w')
t = int(f.readline())
for case in range(t):
x = [int(i) for i in f.readline().split()]
n = x.pop(0)
xsum = sum(x)
outfile.write('Case #' + str(case + 1) + ':')
minscore = 2.0 * xsum / n
changed = True
maylose = x[:]
while changed:
changed = False
for i in maylose:
if i > minscore:
maylose.remove(i)
if len(maylose) > 1:
minscore = (float(sum(maylose)) + xsum) / len(maylose)
changed = True
print minscore
for i in x:
if i >= minscore:
outfile.write(' 0.0')
else:
outfile.write(' ' + str(100.0 * float(minscore - i) / xsum))
outfile.write('\n')
outfile.close()
| [
"eewestman@gmail.com"
] | eewestman@gmail.com |
9185725abebca98dc093dd4d562ae61a619fb4cc | 29ed133feb870455ca619c9fa2ce9b7eb1dcc470 | /URIs/URI2968.py | aac803dbfada4346f0a947ba958b8c61eff6808a | [] | no_license | jrantunes/URIs-Python-3 | c5e676686a979b6bbfd10b8e7168a6d35fb8f6a2 | 4692f3fba4a1c9a0f51322a13e9e267d8b07ea3e | refs/heads/master | 2022-04-17T10:56:52.468275 | 2020-03-28T17:07:46 | 2020-03-28T17:07:46 | 250,395,664 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | #Hour for a Run
import math
v, n = input().split()
v = int(v)
n = int(n)
x = []
for i in range(1, 10):
r = math.ceil((n * v * i) / 10.0)
x.append(r)
print(x[0], x[1], x[2], x[3], x[4], x[5], x[6], x[7], x[8]) | [
"noreply@github.com"
] | jrantunes.noreply@github.com |
2e1f9b8a34984b316be5df9a700ccee570f03474 | 89cd8b77ad5171c336cc60b2133fe6468a6cb53f | /Module02/06-正则表达式/05-FilterLagou.py | 7a68143d021baee871dc9c51bfd6f9e213906972 | [
"MIT"
] | permissive | fenglihanxiao/Python | 75178f6b6b0c53345e1ed54226ea645216572d6c | 872baf3a3a5ee42740161152605ca2b1ddf4cd30 | refs/heads/master | 2021-05-23T18:49:20.656433 | 2020-04-29T01:06:21 | 2020-04-29T01:06:21 | 253,199,073 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,017 | py | import re
html_str = """
<div class="job_at">
<p>岗位职责:</p>
<p>完成推荐算法、数据统计、接口、后台等服务器端相关工作</p>
<p><br></p>
<p>必备要求:</p>
<p>良好的自我驱动力和职业素养,工作积极主动、结果导向</p>
<p> <br></p>
<p>技术要求:</p>
<p>1、一年以上 Python 开发经验,掌握面向对象分析和设计,了解设计模式</p>
<p>2、掌握HTTP协议,熟悉MVC、MVVM等概念以及相关WEB开发框架</p>
<p>3、掌握关系数据库开发设计,掌握 SQL,熟练使用 MySQL/PostgreSQL 中的一种<br></p>
<p>4、掌握NoSQL、MQ,熟练使用对应技术解决方案</p>
<p>5、熟悉 Javascript/CSS/HTML5,JQuery、React、Vue.js</p>
<p> <br></p>
<p>加分项:</p>
<p>大数据,数理统计,机器学习,sklearn,高性能,大并发。</p>
</div>
"""
ret = re.sub(r"<[^>]*>| ", " ", html_str)
print(ret) | [
"fenglihanxiao@qq.com"
] | fenglihanxiao@qq.com |
6297beb8ba52f92cb896761b185e0a4502499949 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2022_02_01/operations/_private_link_resources_operations.py | b8989a164064339b3ceb0b30e1c860798d4401ef | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 6,861 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-02-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/privateLinkResources",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class PrivateLinkResourcesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerservice.v2022_02_01.ContainerServiceClient`'s
:attr:`private_link_resources` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> _models.PrivateLinkResourcesListResult:
"""Gets a list of private link resources in the specified managed cluster.
To learn more about private clusters, see:
https://docs.microsoft.com/azure/aks/private-clusters.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateLinkResourcesListResult or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_02_01.models.PrivateLinkResourcesListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-02-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-02-01"))
cls: ClsType[_models.PrivateLinkResourcesListResult] = kwargs.pop("cls", None)
request = build_list_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PrivateLinkResourcesListResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/privateLinkResources"
}
| [
"noreply@github.com"
] | kurtzeborn.noreply@github.com |
8ea4e4fe4fcab2f4511431a6d1a91c8e94bcd4cc | 029aa4fa6217dbb239037dec8f2e64f5b94795d0 | /数据结构练习/Python算法指南数据结构/123_删除链表中倒数第n个节点.py | e5895f57b7dca317281edff54280a7e8d0b3517a | [] | no_license | tonyyo/algorithm | 5a3f0bd4395a75703f9ee84b01e42a74283a5de9 | 60dd5281e7ce4dfb603b795aa194a67ff867caf6 | refs/heads/master | 2022-12-14T16:04:46.723771 | 2020-09-23T06:59:33 | 2020-09-23T06:59:33 | 270,216,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,439 | py | class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
class Solution(object):
def removeNthFromEnd(self, head, n):
res = ListNode(0)
res.next = head
tmp = res
for i in range(0, n):
head = head.next
while head != None:
head = head.next
tmp = tmp.next
tmp.next = tmp.next.next
return res.next
def removeNthFromEnd2(self, head, n):
newHead = ListNode(0)
newHead.next = head
pre_slow = newHead
slow = head
fast = head
for _ in range(n - 1):
fast = fast.next
while fast.next:
fast = fast.next
pre_slow = pre_slow.next
slow = slow.next
pre_slow.next = slow.next
return newHead.next
#主函数
if __name__ == "__main__":
node1 = ListNode(1)
node2 = ListNode(2)
node3 = ListNode(3)
node4 = ListNode(4)
node5 = ListNode(5)
node1.next = node2
node2.next = node3
node3.next = node4
node4.next = node5
list1 = []
n = 1
#创建对象
solution = Solution()
print("初始链表是:", [node1.val, node2.val, node3.val, node4.val, node5.val])
newlist = solution.removeNthFromEnd2(node1, n)
while (newlist):
list1.append(newlist.val)
newlist = newlist.next
print("最终链表是:", list1) | [
"1325338208@qq.com"
] | 1325338208@qq.com |
1d30a93b7b0fdf10bb8df12a279bd831e5c4df12 | 50f57af6bc95c755597074138ebef2200c0e0022 | /第二天作业/9.将首尾反转,自己写算法实现.py | c44f033e04040dda41193c2a113ad331fe4f9a76 | [] | no_license | qiaoy9377/python-base | f18666c1851f5d5f214b72723e04298471e55d63 | 6c7e4acf96d3582b85464efb8f9d5a6d6d8c2271 | refs/heads/main | 2023-04-05T05:42:40.021651 | 2021-04-20T12:53:10 | 2021-04-20T12:53:10 | 359,812,810 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | #有一堆字符串,‘abcdef’,将首尾反转,结果:fedcba,不能使用现有的函数或方法,自己写算法实现
#定义字符串
str1 = 'abcdef'
#创建一个空列表
list1 = []
#逆着循环取出字符串的数据,插入列表
#1、使用while循环
l = len(str1)
#循环变量--数据下标
i = l-1
#循环判断-判断下标大于等于0
while i >= 0:
#循环体--取数据插入列表
list1.append(str1[i])
#循环变量发生变化
i -= 1
print(''.join(list1))
#2、使用for循环实现
list2 = []
for i in str1[::-1]:
list2.append(i)
print(''.join(list2)) | [
"18335909377@163.com"
] | 18335909377@163.com |
99d3c391e4b1a82321546dbb646c812be3ae5c73 | 07c75f8717683b9c84864c446a460681150fb6a9 | /python-base/days10_code/demo04_text_a.py | 4f8d31d8f263d76e58733be3c497bd13c9224399 | [] | no_license | laomu/py_1709 | 987d9307d9025001bd4386381899eb3778f9ccd6 | 80630e6ac3ed348a2a6445e90754bb6198cfe65a | refs/heads/master | 2021-05-11T09:56:45.382526 | 2018-01-19T07:08:00 | 2018-01-19T07:08:00 | 118,088,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | # 以追加内容的方式打开文件
f = open("d:/shuaishuai.txt", "a")
# 向文件中追加数据
f.write("\r\n王帅:今晚我们去炸粮仓..")
# 关闭文件
f.close() | [
"1007821300@qq.com"
] | 1007821300@qq.com |
e81498d14e8c062e1e3e7a026d2372dea587c945 | a4844ab94268c60ccb3e58e3006bed8e187f4f9c | /decorator_eample.py | cad7c75d3b59eb3bbff7f455b05b056aa56d9c88 | [] | no_license | namitha89/python_works | 2d9eacaf0c73dcea5695c3446b0923610e963795 | d130f4f4f32bf40616fdb3e9eef518d58d2c6a51 | refs/heads/master | 2022-03-31T11:09:35.840452 | 2020-01-14T20:40:22 | 2020-01-14T20:40:22 | 115,792,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py | import time
def log(filename):
def inner(func):
def innermost(*args, **kwargs):
start = time.time()
s = func(*args, **kwargs)
end = time.time()
with open(filename, "w") as f:
diff = end-start
x = [start,end,diff]
f.write(str(x)+'\n')
return s
return innermost
return inner
@log('myfile1.txt')
def view1(param):
print(param)
@log('myfile2.txt')
def view2(param_one, times=8):
for i in range(times):
print(param_one)
view2('hi am doing good', times=3) | [
"namigowda51@gmail.com"
] | namigowda51@gmail.com |
4fee9d06c23ab38f3284349a68a3ea91b4348d7f | e70a17e8a37847a961f19b136f3bbe74393fa2af | /RPI/src/video_stream_opencv/cfg/VideoStream.cfg | ed9b908047679eaf1c59bef03a222c023b12a809 | [
"MIT"
] | permissive | Mondiegus/ROS-4x4-CAR-AI | 1413ead6f46a8b16005abeea3e0b215caa45f27e | 124efe39168ce96eec13d57e644f4ddb6dfe2364 | refs/heads/Master | 2023-07-14T23:56:53.519082 | 2021-03-27T17:28:45 | 2021-03-27T17:28:45 | 334,233,839 | 0 | 0 | MIT | 2021-02-02T13:00:30 | 2021-01-29T18:46:16 | Makefile | UTF-8 | Python | false | false | 2,012 | cfg | #!/usr/bin/env python
from dynamic_reconfigure.parameter_generator_catkin import *
PKG = "video_stream_opencv"
gen = ParameterGenerator()
class LEVEL:
NORMAL = 0
RUNNING = 1
# name type level description default min max
gen.add("camera_name", str_t, LEVEL.NORMAL, "Camera name", "camera")
gen.add("set_camera_fps", double_t, LEVEL.RUNNING, "Image Publish Rate", 30.0, 0.0, 1000.0)
gen.add("buffer_queue_size", int_t, LEVEL.NORMAL, "Buffer size for capturing frames", 100, 1, 1000)
gen.add("fps", double_t, LEVEL.RUNNING, "Image Publish Rate", 240.0, 0.0, 1000.0)
gen.add("frame_id", str_t, LEVEL.RUNNING, "Camera FrameID", "camera")
gen.add("camera_info_url", str_t, LEVEL.RUNNING, "Camera info URL", "")
gen.add("flip_horizontal", bool_t, LEVEL.NORMAL, "Flip image horizontally", False)
gen.add("flip_vertical", bool_t, LEVEL.NORMAL, "Flip image vertically", False)
gen.add("width", int_t, LEVEL.RUNNING, "Target width", 0, 0, 10000)
gen.add("height", int_t, LEVEL.RUNNING, "Target height", 0, 0, 10000)
gen.add("brightness", double_t, LEVEL.RUNNING, "Target brightness", 0.5019607843137255, 0.0, 1.0)
gen.add("contrast", double_t, LEVEL.RUNNING, "Target contrast", 0.12549019607843137, 0.0, 1.0)
gen.add("hue", double_t, LEVEL.RUNNING, "Target hue", 0.5, 0.0, 1.0)
gen.add("saturation", double_t, LEVEL.RUNNING, "Target saturation", 0.64, 0.0, 1.0)
gen.add("auto_exposure", bool_t, LEVEL.RUNNING, "Target auto exposure", True)
gen.add("exposure", double_t, LEVEL.RUNNING, "Target exposure", 0.5, 0.0, 1.0)
gen.add("loop_videofile", bool_t, LEVEL.RUNNING, "Loop videofile", False)
gen.add("reopen_on_read_failure", bool_t, LEVEL.RUNNING, "Re-open camera device on read failure", False)
gen.add("output_encoding", str_t, LEVEL.NORMAL, "Output encoding", "bgr8")
gen.add("start_frame", int_t, LEVEL.RUNNING, "Start frame of the video ", 0, 0)
gen.add("stop_frame", int_t, LEVEL.RUNNING, "Stop frame of the video", -1, -1)
exit(gen.generate(PKG, PKG, "VideoStream"))
| [
"Mondiegus9@gmail.com"
] | Mondiegus9@gmail.com |
4c2077d7acf3d765cf747f4fb6fbcdf1dffc6276 | e36225e61d95adfabfd4ac3111ec7631d9efadb7 | /problems/CR/auto/problem9_CR.py | 9ab4523e3e2e108161cae160b7e968f1641b787e | [
"BSD-3-Clause"
] | permissive | sunandita/ICAPS_Summer_School_RAE_2020 | d2ab6be94ac508e227624040283e8cc6a37651f1 | a496b62185bcfdd2c76eb7986ae99cfa85708d28 | refs/heads/main | 2023-01-01T02:06:40.848068 | 2020-10-15T17:25:01 | 2020-10-15T17:25:01 | 301,263,711 | 5 | 2 | BSD-3-Clause | 2020-10-15T17:25:03 | 2020-10-05T01:24:08 | Python | UTF-8 | Python | false | false | 1,111 | py | __author__ = 'patras'
from domain_chargeableRobot import *
from timer import DURATION
from state import state
DURATION.TIME = {
'put': 2,
'take': 2,
'perceive': 3,
'charge': 5,
'move': 10,
'moveToEmergency': 5,
'moveCharger': 15,
'addressEmergency': 10,
'wait': 5,
}
DURATION.COUNTER = {
'put': 2,
'take': 2,
'perceive': 3,
'charge': 5,
'move': 10,
'moveToEmergency': 5,
'moveCharger': 15,
'addressEmergency': 10,
'wait': 5,
}
rv.LOCATIONS = [1, 2, 3, 4, 5, 6, 7, 8]
rv.EDGES = {1: [7], 2: [8], 3: [8], 4: [8], 5: [7], 6: [7], 7: [1, 5, 6, 8], 8: [2, 3, 4, 7]}
rv.OBJECTS=['o1']
rv.ROBOTS=['r1']
def ResetState():
state.loc = {'r1': 2}
state.charge = {'r1': 2}
state.load = {'r1': NIL}
state.pos = {'c1': 1, 'o1': 1}
state.containers = { 1:['o1'],2:[],3:[],4:[],5:[],6:[],7:[],8:[],}
state.emergencyHandling = {'r1': False, 'r2': False}
state.view = {}
for l in rv.LOCATIONS:
state.view[l] = False
tasks = {
3: [['fetch', 'r1', 'o1']],
5: [['emergency', 'r1', 2, 1]],
}
eventsEnv = {
} | [
"sunandita.patra@gmail.com"
] | sunandita.patra@gmail.com |
b453edd28f0abe75121ce1976ba9a9c00c0b850a | c20f811f26afd1310dc0f75cb00992e237fdcfbd | /202-happy-number.py | f7b8c53ffaef5cd4e475b3ce7753353ed5dae8fe | [
"MIT"
] | permissive | dchentech/leetcode | 4cfd371fe4a320ab3e95925f1b5e00eed43b38b8 | 3111199beeaefbb3a74173e783ed21c9e53ab203 | refs/heads/master | 2022-10-21T09:59:08.300532 | 2016-01-04T03:21:16 | 2016-01-04T03:21:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,756 | py | """
Question:
Happy Number
Write an algorithm to determine if a number is "happy".
A happy number is a number defined by the following process: Starting with any positive integer, replace the number by the sum of the squares of its digits, and repeat the process until the number equals 1 (where it will stay), or it loops endlessly in a cycle which does not include 1. Those numbers for which this process ends in 1 are happy numbers.
Example: 19 is a happy number
1^2 + 9^2 = 82
8^2 + 2^2 = 68
6^2 + 8^2 = 100
1^2 + 0^2 + 0^2 = 1
Credits:
Special thanks to @mithmatt and @ts for adding this problem and creating all test cases.
Performance:
1. Total Accepted: 34378 Total Submissions: 104779 Difficulty: Easy
2. Your runtime beats 55.38% of python submissions.
"""
class Solution(object):
def isHappy(self, n):
"""
:type n: int
:rtype: bool
"""
result = self.compute(n)
if result["is_endless"]:
return False
return True
def compute(self, n):
num = n # n is already a positive integer
is_endless = False
same_nums = set([]) # check if it's already in a endless loop
while num != 1 and not is_endless:
num = sum(map(lambda i: i * i, map(int, list(str(num)))))
if num in same_nums:
is_endless = True
break
same_nums.add(num)
return {"num": num, "is_endless": is_endless}
assert Solution().compute(19)["num"] == 1
assert Solution().compute(0)["num"] == 0
assert Solution().compute(1)["num"] == 1
assert Solution().isHappy(19) is True
assert Solution().isHappy(0) is False
assert Solution().isHappy(1) is True
| [
"mvjome@gmail.com"
] | mvjome@gmail.com |
2120159042ebf409524843722f2a78301223752c | d42a9128898d504a9831f1afee3198c4677236c9 | /Level_2/기능개발.py | 012a8d1f581b77ca300f381fa928db03d99a1447 | [] | no_license | ketkat001/Programmers-coding | 6848a9c8cffd97b792cfc8856ec135b72af5d688 | 799baba8d66a9971b43233d231cecbf262b4ea27 | refs/heads/master | 2023-09-02T23:07:25.614820 | 2021-10-17T18:12:02 | 2021-10-17T18:12:02 | 235,016,879 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | def solution(progresses, speeds):
answer = []
while progresses:
time = 0
if (100 - progresses[0]) % speeds[0] != 0:
time += 1
time += (100 - progresses[0]) // speeds[0]
for i in range(len(progresses)):
progresses[i] += speeds[i] * time
temp, idx = 1, 1
while idx < len(progresses):
if progresses[idx] >= 100:
temp += 1
idx += 1
else:
break
answer.append(temp)
progresses = progresses[idx:]
speeds = speeds[idx:]
return answer
print(solution([99, 99, 99, 99, 99] , [3, 3, 3, 3, 3])) | [
"ketkat001@gmail.com"
] | ketkat001@gmail.com |
9f12561ad304b6686343604c8f927cda626574b8 | 26e91aead18d0fad6f5ce8fc4adf7d8e05a2f07f | /tests/util/datetime/calc/test_calculate_days_util.py | 53cf0307f8290841c7e426a19284122c18f21258 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | leathe/byceps | 40c1f8a1aab3521fcac45d88eab6364d448d4e67 | cd0c618af63fed1cd7006bb67da46eac0ddbb1c7 | refs/heads/master | 2020-12-02T09:02:51.087511 | 2019-12-14T17:00:22 | 2019-12-14T17:00:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 530 | py | """
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import date
import pytest
from byceps.util.datetime.calc import calculate_days_until
SOME_DATE = date(1994, 3, 18)
@pytest.mark.parametrize('today, expected', [
(date(2014, 3, 16), 2),
(date(2014, 3, 17), 1),
(date(2014, 3, 18), 0),
(date(2014, 3, 19), 364),
])
def test_calculate_days_until(today, expected):
actual = calculate_days_until(SOME_DATE, today)
assert actual == expected
| [
"homework@nwsnet.de"
] | homework@nwsnet.de |
f0f826009c7d7edec9d20ab836fbd2f002481a5f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03088/s886343007.py | cb0bffa8d09a57e497014975538a18cc8134c4f7 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py | N = int(input())
mod = 10 ** 9 + 7
# 直大さんの解説風
dp = [[[[0] * 4 for _ in range(4)] for _ in range(4)] for _ in range(N + 1)]
dp[0][3][3][3] = 1
for Length in range(N):
for i in range(4):
for j in range(4):
for k in range(4):
if dp[Length][i][j][k] == 0:
continue
for d in range(4): # 追加する文字
if d == 1 and j == 0 and k == 2:
continue
if d == 2:
if j == 0 and k == 1:
continue
if j == 1 and k == 0:
continue
if i == 0 and k == 1:
continue
if i == 0 and j == 1:
continue
dp[Length + 1][j][k][d] += dp[Length][i][j][k]
dp[Length + 1][j][k][d] %= mod
ans = 0
for i in range(4):
for j in range(4):
for k in range(4):
ans = (ans + dp[N][i][j][k]) % mod
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
161008cf4704dcdb1f26126b3a6ceb19798c3bf0 | 2a8a6327fb9a7ce8696aa15b197d5170661fb94f | /zuora_client/models/post_revenue_schedule_by_date_range_type.py | 12aa39cf71f88420db701acb1540ede72ff7edb4 | [] | no_license | moderndatainc/zuora-client | 8b88e05132ddf7e8c411a6d7dad8c0baabaa6dad | d50da49ce1b8465c76723496c2561a3b8ebdf07d | refs/heads/master | 2021-09-21T19:17:34.752404 | 2018-08-29T23:24:07 | 2018-08-29T23:24:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48,281 | py | # coding: utf-8
"""
Zuora API Reference
# Introduction Welcome to the reference for the Zuora REST API! <a href=\"http://en.wikipedia.org/wiki/REST_API\" target=\"_blank\">REST</a> is a web-service protocol that lends itself to rapid development by using everyday HTTP and JSON technology. The Zuora REST API provides a broad set of operations and resources that: * Enable Web Storefront integration from your website. * Support self-service subscriber sign-ups and account management. * Process revenue schedules through custom revenue rule models. * Enable manipulation of most objects in the Zuora Object Model. Want to share your opinion on how our API works for you? <a href=\"https://community.zuora.com/t5/Developers/API-Feedback-Form/gpm-p/21399\" target=\"_blank\">Tell us how you feel </a>about using our API and what we can do to make it better. ## Access to the API If you have a Zuora tenant, you can access the Zuora REST API via one of the following endpoints: | Tenant | Base URL for REST Endpoints | |-------------------------|-------------------------| |US Production | https://rest.zuora.com | |US API Sandbox | https://rest.apisandbox.zuora.com| |US Performance Test | https://rest.pt1.zuora.com | |EU Production | https://rest.eu.zuora.com | |EU Sandbox | https://rest.sandbox.eu.zuora.com | The Production endpoint provides access to your live user data. API Sandbox tenants are a good place to test code without affecting real-world data. If you would like Zuora to provision an API Sandbox tenant for you, contact your Zuora representative for assistance. **Note:** If you have a tenant in the Production Copy Environment, submit a request at <a href=\"http://support.zuora.com/\" target=\"_blank\">Zuora Global Support</a> to enable the Zuora REST API in your tenant and obtain the base URL for REST endpoints. If you do not have a Zuora tenant, go to <a href=\"https://www.zuora.com/resource/zuora-test-drive\" target=\"_blank\">https://www.zuora.com/resource/zuora-test-drive</a> and sign up for a Production Test Drive tenant. The tenant comes with seed data, including a sample product catalog. # API Changelog You can find the <a href=\"https://community.zuora.com/t5/Developers/API-Changelog/gpm-p/18092\" target=\"_blank\">Changelog</a> of the API Reference in the Zuora Community. # Authentication ## OAuth v2.0 Zuora recommends that you use OAuth v2.0 to authenticate to the Zuora REST API. Currently, OAuth is not available in every environment. See [Zuora Testing Environments](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/D_Zuora_Environments) for more information. Zuora recommends you to create a dedicated API user with API write access on a tenant when authenticating via OAuth, and then create an OAuth client for this user. See <a href=\"https://knowledgecenter.zuora.com/CF_Users_and_Administrators/A_Administrator_Settings/Manage_Users/Create_an_API_User\" target=\"_blank\">Create an API User</a> for how to do this. By creating a dedicated API user, you can control permissions of the API user without affecting other non-API users. If a user is deactivated, all of the user's OAuth clients will be automatically deactivated. Authenticating via OAuth requires the following steps: 1. Create a Client 2. Generate a Token 3. Make Authenticated Requests ### Create a Client You must first [create an OAuth client](https://knowledgecenter.zuora.com/CF_Users_and_Administrators/A_Administrator_Settings/Manage_Users#Create_an_OAuth_Client_for_a_User) in the Zuora UI. To do this, you must be an administrator of your Zuora tenant. This is a one-time operation. You will be provided with a Client ID and a Client Secret. Please note this information down, as it will be required for the next step. **Note:** The OAuth client will be owned by a Zuora user account. If you want to perform PUT, POST, or DELETE operations using the OAuth client, the owner of the OAuth client must have a Platform role that includes the \"API Write Access\" permission. ### Generate a Token After creating a client, you must make a call to obtain a bearer token using the [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) operation. This operation requires the following parameters: - `client_id` - the Client ID displayed when you created the OAuth client in the previous step - `client_secret` - the Client Secret displayed when you created the OAuth client in the previous step - `grant_type` - must be set to `client_credentials` **Note**: The Client ID and Client Secret mentioned above were displayed when you created the OAuth Client in the prior step. The [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) response specifies how long the bearer token is valid for. Call [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) again to generate a new bearer token. ### Make Authenticated Requests To authenticate subsequent API requests, you must provide a valid bearer token in an HTTP header: `Authorization: Bearer {bearer_token}` If you have [Zuora Multi-entity](https://www.zuora.com/developer/api-reference/#tag/Entities) enabled, you need to set an additional header to specify the ID of the entity that you want to access. You can use the `scope` field in the [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) response to determine whether you need to specify an entity ID. If the `scope` field contains more than one entity ID, you must specify the ID of the entity that you want to access. For example, if the `scope` field contains `entity.1a2b7a37-3e7d-4cb3-b0e2-883de9e766cc` and `entity.c92ed977-510c-4c48-9b51-8d5e848671e9`, specify one of the following headers: - `Zuora-Entity-Ids: 1a2b7a37-3e7d-4cb3-b0e2-883de9e766cc` - `Zuora-Entity-Ids: c92ed977-510c-4c48-9b51-8d5e848671e9` **Note**: For a limited period of time, Zuora will accept the `entityId` header as an alternative to the `Zuora-Entity-Ids` header. If you choose to set the `entityId` header, you must remove all \"-\" characters from the entity ID in the `scope` field. If the `scope` field contains a single entity ID, you do not need to specify an entity ID. ## Other Supported Authentication Schemes Zuora continues to support the following additional legacy means of authentication: * Use username and password. Include authentication with each request in the header: * `apiAccessKeyId` * `apiSecretAccessKey` Zuora recommends that you create an API user specifically for making API calls. See <a href=\"https://knowledgecenter.zuora.com/CF_Users_and_Administrators/A_Administrator_Settings/Manage_Users/Create_an_API_User\" target=\"_blank\">Create an API User</a> for more information. * Use an authorization cookie. The cookie authorizes the user to make calls to the REST API for the duration specified in **Administration > Security Policies > Session timeout**. The cookie expiration time is reset with this duration after every call to the REST API. To obtain a cookie, call the [Connections](https://www.zuora.com/developer/api-reference/#tag/Connections) resource with the following API user information: * ID * Password * For CORS-enabled APIs only: Include a 'single-use' token in the request header, which re-authenticates the user with each request. See below for more details. ### Entity Id and Entity Name The `entityId` and `entityName` parameters are only used for [Zuora Multi-entity](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Multi-entity \"Zuora Multi-entity\"). These are the legacy parameters that Zuora will only continue to support for a period of time. Zuora recommends you to use the `Zuora-Entity-Ids` parameter instead. The `entityId` and `entityName` parameters specify the Id and the [name of the entity](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Multi-entity/B_Introduction_to_Entity_and_Entity_Hierarchy#Name_and_Display_Name \"Introduction to Entity and Entity Hierarchy\") that you want to access, respectively. Note that you must have permission to access the entity. You can specify either the `entityId` or `entityName` parameter in the authentication to access and view an entity. * If both `entityId` and `entityName` are specified in the authentication, an error occurs. * If neither `entityId` nor `entityName` is specified in the authentication, you will log in to the entity in which your user account is created. To get the entity Id and entity name, you can use the GET Entities REST call. For more information, see [API User Authentication](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Multi-entity/A_Overview_of_Multi-entity#API_User_Authentication \"API User Authentication\"). ### Token Authentication for CORS-Enabled APIs The CORS mechanism enables REST API calls to Zuora to be made directly from your customer's browser, with all credit card and security information transmitted directly to Zuora. This minimizes your PCI compliance burden, allows you to implement advanced validation on your payment forms, and makes your payment forms look just like any other part of your website. For security reasons, instead of using cookies, an API request via CORS uses **tokens** for authentication. The token method of authentication is only designed for use with requests that must originate from your customer's browser; **it should not be considered a replacement to the existing cookie authentication** mechanism. See [Zuora CORS REST](https://knowledgecenter.zuora.com/DC_Developers/REST_API/A_REST_basics/G_CORS_REST \"Zuora CORS REST\") for details on how CORS works and how you can begin to implement customer calls to the Zuora REST APIs. See [HMAC Signatures](https://www.zuora.com/developer/api-reference/#operation/POSTHMACSignature \"HMAC Signatures\") for details on the HMAC method that returns the authentication token. # Requests and Responses ## Request IDs As a general rule, when asked to supply a \"key\" for an account or subscription (accountKey, account-key, subscriptionKey, subscription-key), you can provide either the actual ID or the number of the entity. ## HTTP Request Body Most of the parameters and data accompanying your requests will be contained in the body of the HTTP request. The Zuora REST API accepts JSON in the HTTP request body. No other data format (e.g., XML) is supported. ### Data Type ([Actions](https://www.zuora.com/developer/api-reference/#tag/Actions) and CRUD operations only) We recommend that you do not specify the decimal values with quotation marks, commas, and spaces. Use characters of `+-0-9.eE`, for example, `5`, `1.9`, `-8.469`, and `7.7e2`. Also, Zuora does not convert currencies for decimal values. ## Testing a Request Use a third party client, such as [curl](https://curl.haxx.se \"curl\"), [Postman](https://www.getpostman.com \"Postman\"), or [Advanced REST Client](https://advancedrestclient.com \"Advanced REST Client\"), to test the Zuora REST API. You can test the Zuora REST API from the Zuora API Sandbox or Production tenants. If connecting to Production, bear in mind that you are working with your live production data, not sample data or test data. ## Testing with Credit Cards Sooner or later it will probably be necessary to test some transactions that involve credit cards. For suggestions on how to handle this, see [Going Live With Your Payment Gateway](https://knowledgecenter.zuora.com/CB_Billing/M_Payment_Gateways/C_Managing_Payment_Gateways/B_Going_Live_Payment_Gateways#Testing_with_Credit_Cards \"C_Zuora_User_Guides/A_Billing_and_Payments/M_Payment_Gateways/C_Managing_Payment_Gateways/B_Going_Live_Payment_Gateways#Testing_with_Credit_Cards\" ). ## Concurrent Request Limits Zuora enforces tenant-level concurrent request limits. See <a href=\"https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Policies/Concurrent_Request_Limits\" target=\"_blank\">Concurrent Request Limits</a> for more information. ## Timeout Limit If a request does not complete within 120 seconds, the request times out and Zuora returns a Gateway Timeout error. ## Error Handling Responses and error codes are detailed in [Responses and errors](https://knowledgecenter.zuora.com/DC_Developers/REST_API/A_REST_basics/3_Responses_and_errors \"Responses and errors\"). # Pagination When retrieving information (using GET methods), the optional `pageSize` query parameter sets the maximum number of rows to return in a response. The maximum is `40`; larger values are treated as `40`. If this value is empty or invalid, `pageSize` typically defaults to `10`. The default value for the maximum number of rows retrieved can be overridden at the method level. If more rows are available, the response will include a `nextPage` element, which contains a URL for requesting the next page. If this value is not provided, no more rows are available. No \"previous page\" element is explicitly provided; to support backward paging, use the previous call. ## Array Size For data items that are not paginated, the REST API supports arrays of up to 300 rows. Thus, for instance, repeated pagination can retrieve thousands of customer accounts, but within any account an array of no more than 300 rate plans is returned. # API Versions The Zuora REST API are version controlled. Versioning ensures that Zuora REST API changes are backward compatible. Zuora uses a major and minor version nomenclature to manage changes. By specifying a version in a REST request, you can get expected responses regardless of future changes to the API. ## Major Version The major version number of the REST API appears in the REST URL. Currently, Zuora only supports the **v1** major version. For example, `POST https://rest.zuora.com/v1/subscriptions`. ## Minor Version Zuora uses minor versions for the REST API to control small changes. For example, a field in a REST method is deprecated and a new field is used to replace it. Some fields in the REST methods are supported as of minor versions. If a field is not noted with a minor version, this field is available for all minor versions. If a field is noted with a minor version, this field is in version control. You must specify the supported minor version in the request header to process without an error. If a field is in version control, it is either with a minimum minor version or a maximum minor version, or both of them. You can only use this field with the minor version between the minimum and the maximum minor versions. For example, the `invoiceCollect` field in the POST Subscription method is in version control and its maximum minor version is 189.0. You can only use this field with the minor version 189.0 or earlier. If you specify a version number in the request header that is not supported, Zuora will use the minimum minor version of the REST API. In our REST API documentation, if a field or feature requires a minor version number, we note that in the field description. You only need to specify the version number when you use the fields require a minor version. To specify the minor version, set the `zuora-version` parameter to the minor version number in the request header for the request call. For example, the `collect` field is in 196.0 minor version. If you want to use this field for the POST Subscription method, set the `zuora-version` parameter to `196.0` in the request header. The `zuora-version` parameter is case sensitive. For all the REST API fields, by default, if the minor version is not specified in the request header, Zuora will use the minimum minor version of the REST API to avoid breaking your integration. ### Minor Version History The supported minor versions are not serial. This section documents the changes made to each Zuora REST API minor version. The following table lists the supported versions and the fields that have a Zuora REST API minor version. | Fields | Minor Version | REST Methods | Description | |:--------|:--------|:--------|:--------| | invoiceCollect | 189.0 and earlier | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Generates an invoice and collects a payment for a subscription. | | collect | 196.0 and later | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Collects an automatic payment for a subscription. | | invoice | 196.0 and 207.0| [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Generates an invoice for a subscription. | | invoiceTargetDate | 196.0 and earlier | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\") |Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | invoiceTargetDate | 207.0 and earlier | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | targetDate | 207.0 and later | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\") |Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | targetDate | 211.0 and later | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | includeExisting DraftInvoiceItems | 196.0 and earlier| [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | Specifies whether to include draft invoice items in subscription previews. Specify it to be `true` (default) to include draft invoice items in the preview result. Specify it to be `false` to excludes draft invoice items in the preview result. | | includeExisting DraftDocItems | 207.0 and later | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | Specifies whether to include draft invoice items in subscription previews. Specify it to be `true` (default) to include draft invoice items in the preview result. Specify it to be `false` to excludes draft invoice items in the preview result. | | previewType | 196.0 and earlier| [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | The type of preview you will receive. The possible values are `InvoiceItem`(default), `ChargeMetrics`, and `InvoiceItemChargeMetrics`. | | previewType | 207.0 and later | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | The type of preview you will receive. The possible values are `LegalDoc`(default), `ChargeMetrics`, and `LegalDocChargeMetrics`. | | runBilling | 211.0 and later | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Generates an invoice or credit memo for a subscription. **Note:** Credit memos are only available if you have the Invoice Settlement feature enabled. | | invoiceDate | 214.0 and earlier | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date that should appear on the invoice being generated, as `yyyy-mm-dd`. | | invoiceTargetDate | 214.0 and earlier | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date through which to calculate charges on this account if an invoice is generated, as `yyyy-mm-dd`. | | documentDate | 215.0 and later | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date that should appear on the invoice and credit memo being generated, as `yyyy-mm-dd`. | | targetDate | 215.0 and later | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date through which to calculate charges on this account if an invoice or a credit memo is generated, as `yyyy-mm-dd`. | | memoItemAmount | 223.0 and earlier | [Create credit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_CreditMemoFromPrpc \"Create credit memo from charge\"); [Create debit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_DebitMemoFromPrpc \"Create debit memo from charge\") | Amount of the memo item. | | amount | 224.0 and later | [Create credit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_CreditMemoFromPrpc \"Create credit memo from charge\"); [Create debit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_DebitMemoFromPrpc \"Create debit memo from charge\") | Amount of the memo item. | | subscriptionNumbers | 222.4 and earlier | [Create order](https://www.zuora.com/developer/api-reference/#operation/POST_Order \"Create order\") | Container for the subscription numbers of the subscriptions in an order. | | subscriptions | 223.0 and later | [Create order](https://www.zuora.com/developer/api-reference/#operation/POST_Order \"Create order\") | Container for the subscription numbers and statuses in an order. | #### Version 207.0 and Later The response structure of the [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\") and [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") methods are changed. The following invoice related response fields are moved to the invoice container: * amount * amountWithoutTax * taxAmount * invoiceItems * targetDate * chargeMetrics # Zuora Object Model The following diagram presents a high-level view of the key Zuora objects. Click the image to open it in a new tab to resize it. <a href=\"https://www.zuora.com/wp-content/uploads/2017/01/ZuoraERD.jpeg\" target=\"_blank\"><img src=\"https://www.zuora.com/wp-content/uploads/2017/01/ZuoraERD.jpeg\" alt=\"Zuora Object Model Diagram\"></a> See the following articles for information about other parts of the Zuora business object model: * <a href=\"https://knowledgecenter.zuora.com/CB_Billing/Invoice_Settlement/D_Invoice_Settlement_Object_Model\" target=\"_blank\">Invoice Settlement Object Model</a> * <a href=\"https://knowledgecenter.zuora.com/BC_Subscription_Management/Orders/BA_Orders_Object_Model\" target=\"_blank\">Orders Object Model</a> You can use the [Describe object](https://www.zuora.com/developer/api-reference/#operation/GET_Describe) operation to list the fields of each Zuora object that is available in your tenant. When you call the operation, you must specify the API name of the Zuora object. The following table provides the API name of each Zuora object: | Object | API Name | |-----------------------------------------------|--------------------------------------------| | Account | `Account` | | Accounting Code | `AccountingCode` | | Accounting Period | `AccountingPeriod` | | Amendment | `Amendment` | | Application Group | `ApplicationGroup` | | Billing Run | <p>`BillingRun`</p><p>**Note:** The API name of this object is `BillingRun` in the [Describe object](https://www.zuora.com/developer/api-reference/#operation/GET_Describe) operation and Export ZOQL queries only. Otherwise, the API name of this object is `BillRun`.</p> | | Contact | `Contact` | | Contact Snapshot | `ContactSnapshot` | | Credit Balance Adjustment | `CreditBalanceAdjustment` | | Credit Memo | `CreditMemo` | | Credit Memo Application | `CreditMemoApplication` | | Credit Memo Application Item | `CreditMemoApplicationItem` | | Credit Memo Item | `CreditMemoItem` | | Credit Memo Part | `CreditMemoPart` | | Credit Memo Part Item | `CreditMemoPartItem` | | Credit Taxation Item | `CreditTaxationItem` | | Custom Exchange Rate | `FXCustomRate` | | Debit Memo | `DebitMemo` | | Debit Memo Item | `DebitMemoItem` | | Debit Taxation Item | `DebitTaxationItem` | | Discount Applied Metrics | `DiscountAppliedMetrics` | | Entity | `Tenant` | | Gateway Reconciliation Event | `PaymentGatewayReconciliationEventLog` | | Gateway Reconciliation Job | `PaymentReconciliationJob` | | Gateway Reconciliation Log | `PaymentReconciliationLog` | | Invoice | `Invoice` | | Invoice Adjustment | `InvoiceAdjustment` | | Invoice Item | `InvoiceItem` | | Invoice Item Adjustment | `InvoiceItemAdjustment` | | Invoice Payment | `InvoicePayment` | | Journal Entry | `JournalEntry` | | Journal Entry Item | `JournalEntryItem` | | Journal Run | `JournalRun` | | Order | `Order` | | Order Action | `OrderAction` | | Order ELP | `OrderElp` | | Order Item | `OrderItem` | | Order MRR | `OrderMrr` | | Order Quantity | `OrderQuantity` | | Order TCB | `OrderTcb` | | Order TCV | `OrderTcv` | | Payment | `Payment` | | Payment Application | `PaymentApplication` | | Payment Application Item | `PaymentApplicationItem` | | Payment Method | `PaymentMethod` | | Payment Method Snapshot | `PaymentMethodSnapshot` | | Payment Method Transaction Log | `PaymentMethodTransactionLog` | | Payment Method Update | `UpdaterDetail` | | Payment Part | `PaymentPart` | | Payment Part Item | `PaymentPartItem` | | Payment Run | `PaymentRun` | | Payment Transaction Log | `PaymentTransactionLog` | | Processed Usage | `ProcessedUsage` | | Product | `Product` | | Product Rate Plan | `ProductRatePlan` | | Product Rate Plan Charge | `ProductRatePlanCharge` | | Product Rate Plan Charge Tier | `ProductRatePlanChargeTier` | | Rate Plan | `RatePlan` | | Rate Plan Charge | `RatePlanCharge` | | Rate Plan Charge Tier | `RatePlanChargeTier` | | Refund | `Refund` | | Refund Application | `RefundApplication` | | Refund Application Item | `RefundApplicationItem` | | Refund Invoice Payment | `RefundInvoicePayment` | | Refund Part | `RefundPart` | | Refund Part Item | `RefundPartItem` | | Refund Transaction Log | `RefundTransactionLog` | | Revenue Charge Summary | `RevenueChargeSummary` | | Revenue Charge Summary Item | `RevenueChargeSummaryItem` | | Revenue Event | `RevenueEvent` | | Revenue Event Credit Memo Item | `RevenueEventCreditMemoItem` | | Revenue Event Debit Memo Item | `RevenueEventDebitMemoItem` | | Revenue Event Invoice Item | `RevenueEventInvoiceItem` | | Revenue Event Invoice Item Adjustment | `RevenueEventInvoiceItemAdjustment` | | Revenue Event Item | `RevenueEventItem` | | Revenue Event Item Credit Memo Item | `RevenueEventItemCreditMemoItem` | | Revenue Event Item Debit Memo Item | `RevenueEventItemDebitMemoItem` | | Revenue Event Item Invoice Item | `RevenueEventItemInvoiceItem` | | Revenue Event Item Invoice Item Adjustment | `RevenueEventItemInvoiceItemAdjustment` | | Revenue Event Type | `RevenueEventType` | | Revenue Schedule | `RevenueSchedule` | | Revenue Schedule Credit Memo Item | `RevenueScheduleCreditMemoItem` | | Revenue Schedule Debit Memo Item | `RevenueScheduleDebitMemoItem` | | Revenue Schedule Invoice Item | `RevenueScheduleInvoiceItem` | | Revenue Schedule Invoice Item Adjustment | `RevenueScheduleInvoiceItemAdjustment` | | Revenue Schedule Item | `RevenueScheduleItem` | | Revenue Schedule Item Credit Memo Item | `RevenueScheduleItemCreditMemoItem` | | Revenue Schedule Item Debit Memo Item | `RevenueScheduleItemDebitMemoItem` | | Revenue Schedule Item Invoice Item | `RevenueScheduleItemInvoiceItem` | | Revenue Schedule Item Invoice Item Adjustment | `RevenueScheduleItemInvoiceItemAdjustment` | | Subscription | `Subscription` | | Taxable Item Snapshot | `TaxableItemSnapshot` | | Taxation Item | `TaxationItem` | | Updater Batch | `UpdaterBatch` | | Usage | `Usage` | # noqa: E501
OpenAPI spec version: 2018-08-23
Contact: docs@zuora.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from zuora_client.models.post_revenue_schedule_by_date_range_type_revenue_event import POSTRevenueScheduleByDateRangeTypeRevenueEvent # noqa: F401,E501
from zuora_client.models.revenue_schedule_object_custom_fields import RevenueScheduleObjectCustomFields # noqa: F401,E501
class POSTRevenueScheduleByDateRangeType(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'distribution_type': 'str',
'notes': 'str',
'recognition_end': 'date',
'recognition_start': 'date',
'revenue_event': 'POSTRevenueScheduleByDateRangeTypeRevenueEvent'
}
attribute_map = {
'distribution_type': 'distributionType',
'notes': 'notes',
'recognition_end': 'recognitionEnd',
'recognition_start': 'recognitionStart',
'revenue_event': 'revenueEvent'
}
def __init__(self, distribution_type=None, notes=None, recognition_end=None, recognition_start=None, revenue_event=None): # noqa: E501
"""POSTRevenueScheduleByDateRangeType - a model defined in Swagger""" # noqa: E501
self._distribution_type = None
self._notes = None
self._recognition_end = None
self._recognition_start = None
self._revenue_event = None
self.discriminator = None
if distribution_type is not None:
self.distribution_type = distribution_type
if notes is not None:
self.notes = notes
self.recognition_end = recognition_end
self.recognition_start = recognition_start
self.revenue_event = revenue_event
@property
def distribution_type(self):
"""Gets the distribution_type of this POSTRevenueScheduleByDateRangeType. # noqa: E501
How you want to distribute the revenue. * Daily Distribution: Distributes revenue evenly across each day between the recognitionStart and recognitionEnd dates. * Monthly Distribution (Back Load): Back loads the revenue so you distribute the monthly amount in the partial month in the end only. * Monthly Distribution (Front Load): Front loads the revenue so you distribute the monthly amount in the partial month in the beginning only. * Monthly Distribution (Proration by Days): Splits the revenue amount between the two partial months. **Note:** To use any of the Monthly Distribution options, you must have the \"Monthly recognition over time\" model enabled in **Settings > Finance > Manage Revenue Recognition Models** in the Zuora UI. # noqa: E501
:return: The distribution_type of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:rtype: str
"""
return self._distribution_type
@distribution_type.setter
def distribution_type(self, distribution_type):
"""Sets the distribution_type of this POSTRevenueScheduleByDateRangeType.
How you want to distribute the revenue. * Daily Distribution: Distributes revenue evenly across each day between the recognitionStart and recognitionEnd dates. * Monthly Distribution (Back Load): Back loads the revenue so you distribute the monthly amount in the partial month in the end only. * Monthly Distribution (Front Load): Front loads the revenue so you distribute the monthly amount in the partial month in the beginning only. * Monthly Distribution (Proration by Days): Splits the revenue amount between the two partial months. **Note:** To use any of the Monthly Distribution options, you must have the \"Monthly recognition over time\" model enabled in **Settings > Finance > Manage Revenue Recognition Models** in the Zuora UI. # noqa: E501
:param distribution_type: The distribution_type of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:type: str
"""
allowed_values = ["Daily Distribution", "Monthly Distribution (Back Load)", "Monthly Distribution (Front Load)", "Monthly Distribution (Proration by Days)"] # noqa: E501
if distribution_type not in allowed_values:
raise ValueError(
"Invalid value for `distribution_type` ({0}), must be one of {1}" # noqa: E501
.format(distribution_type, allowed_values)
)
self._distribution_type = distribution_type
@property
def notes(self):
"""Gets the notes of this POSTRevenueScheduleByDateRangeType. # noqa: E501
Additional information about this record. # noqa: E501
:return: The notes of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:rtype: str
"""
return self._notes
@notes.setter
def notes(self, notes):
"""Sets the notes of this POSTRevenueScheduleByDateRangeType.
Additional information about this record. # noqa: E501
:param notes: The notes of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:type: str
"""
self._notes = notes
@property
def recognition_end(self):
"""Gets the recognition_end of this POSTRevenueScheduleByDateRangeType. # noqa: E501
The end date of a recognition period in `yyyy-mm-dd` format. The maximum difference between the `recognitionStart` and `recognitionEnd` date fields is equal to 250 multiplied by the length of an accounting period. # noqa: E501
:return: The recognition_end of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:rtype: date
"""
return self._recognition_end
@recognition_end.setter
def recognition_end(self, recognition_end):
"""Sets the recognition_end of this POSTRevenueScheduleByDateRangeType.
The end date of a recognition period in `yyyy-mm-dd` format. The maximum difference between the `recognitionStart` and `recognitionEnd` date fields is equal to 250 multiplied by the length of an accounting period. # noqa: E501
:param recognition_end: The recognition_end of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:type: date
"""
if recognition_end is None:
raise ValueError("Invalid value for `recognition_end`, must not be `None`") # noqa: E501
self._recognition_end = recognition_end
@property
def recognition_start(self):
"""Gets the recognition_start of this POSTRevenueScheduleByDateRangeType. # noqa: E501
The start date of a recognition period in `yyyy-mm-dd` format. If there is a closed accounting period between the `recognitionStart` and `recognitionEnd` dates, the revenue that would be placed in the closed accounting period is instead placed in the next open accounting period. # noqa: E501
:return: The recognition_start of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:rtype: date
"""
return self._recognition_start
@recognition_start.setter
def recognition_start(self, recognition_start):
"""Sets the recognition_start of this POSTRevenueScheduleByDateRangeType.
The start date of a recognition period in `yyyy-mm-dd` format. If there is a closed accounting period between the `recognitionStart` and `recognitionEnd` dates, the revenue that would be placed in the closed accounting period is instead placed in the next open accounting period. # noqa: E501
:param recognition_start: The recognition_start of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:type: date
"""
if recognition_start is None:
raise ValueError("Invalid value for `recognition_start`, must not be `None`") # noqa: E501
self._recognition_start = recognition_start
@property
def revenue_event(self):
"""Gets the revenue_event of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:return: The revenue_event of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:rtype: POSTRevenueScheduleByDateRangeTypeRevenueEvent
"""
return self._revenue_event
@revenue_event.setter
def revenue_event(self, revenue_event):
"""Sets the revenue_event of this POSTRevenueScheduleByDateRangeType.
:param revenue_event: The revenue_event of this POSTRevenueScheduleByDateRangeType. # noqa: E501
:type: POSTRevenueScheduleByDateRangeTypeRevenueEvent
"""
if revenue_event is None:
raise ValueError("Invalid value for `revenue_event`, must not be `None`") # noqa: E501
self._revenue_event = revenue_event
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, POSTRevenueScheduleByDateRangeType):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"jairo.velasco@alertlogic.com"
] | jairo.velasco@alertlogic.com |
a35a19b9d67715a726dacdfc4e9dc71bcf0d3f70 | 13f5984be7be77852e4de29ab98d5494a7fc6767 | /100Cases/51-100/num52按位或.py | 7b1d3c7885a6b8ca4068d3022d343b79d8057e73 | [] | no_license | YuanXianguo/Python-Interview-Master | 4252514763fc3f563d9b94e751aa873de1719f91 | 2f73786e8c51dbd248341559de171e18f67f9bf2 | refs/heads/master | 2020-11-26T18:14:50.190812 | 2019-12-20T02:18:03 | 2019-12-20T02:18:03 | 229,169,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | """程序分析:0|0=0; 0|1=1; 1|0=1; 1|1=1"""
a = 0o77
a_d = 63
a_b = 0b111111
b = 3
b_b = 0b11
num = a | b
num_b = 0b111111
print(num_b,num)
d = 7
d_b = 0b111
num_ = num | d
num_b_ = 0b111111
print(num_b_,num_)
| [
"736913978@qq.com"
] | 736913978@qq.com |
072ffb1a0289a7bf669e19f91d43bfb644b269de | d3c518b69525c04022ff76c583b4c31aae1e4295 | /tests/func/grammar/test_special_grammars.py | b04bfc98a47e74b30dfbc9f3986e1c5b5948384c | [
"Python-2.0",
"MIT"
] | permissive | boriel/parglare | 6714ed8c9c52b174f8c7fdf0bb986446ad2d55d9 | 74a6d98b6e510ae3c814c517924796c5dccefae0 | refs/heads/master | 2023-01-28T09:15:44.402669 | 2020-12-08T10:47:22 | 2020-12-08T10:47:22 | 103,997,403 | 0 | 0 | MIT | 2020-12-08T10:47:23 | 2017-09-18T22:13:24 | Python | UTF-8 | Python | false | false | 7,118 | py | # -*- coding: utf-8 -*-
"""
Test non-deterministic parsing.
"""
import pytest # noqa
import sys
from parglare import Parser, GLRParser, Grammar, SLR, LALR
from parglare.exceptions import ParseError, SRConflicts, RRConflicts
def test_lr_1_grammar():
"""From the Knuth's 1965 paper: On the Translation of Languages from Left to
Right
"""
grammar = """
S: 'a' A 'd' | 'b' A 'd';
A: 'c' A | 'c';
"""
g = Grammar.from_string(grammar)
parser = Parser(g)
parser.parse("acccccccccd")
parser.parse("bcccccccccd")
parser = GLRParser(g)
assert len(parser.parse("accccccccd")) == 1
assert len(parser.parse("bccccccccd")) == 1
def test_slr_conflict():
"""
Unambiguous grammar which is not SLR(1).
From the Dragon Book.
This grammar has a S/R conflict if SLR tables are used.
"""
grammar = """
S: L '=' R | R;
L: '*' R | 'id';
R: L;
"""
grammar = Grammar.from_string(grammar)
with pytest.raises(SRConflicts):
Parser(grammar, tables=SLR, prefer_shifts=False)
Parser(grammar, tables=LALR, prefer_shifts=False)
def test_lalr_reduce_reduce_conflict():
"""
Naive merging of states can lead to R/R conflict as shown in this grammar
from the Dragon Book.
But the extended LALR state compression algorithm used in parglare doesn't
exibit this problem.
"""
grammar = """
S: 'a' A 'd' | 'b' B 'd' | 'a' B 'e' | 'b' A 'e';
A: C;
B: C;
C: 'c';
"""
grammar = Grammar.from_string(grammar)
Parser(grammar)
def test_nondeterministic_LR_raise_error():
"""Language of even length palindromes.
This is a non-deterministic grammar and the language is non-ambiguous.
If the string is a even length palindrome parser should reduce EMPTY at he
middle of the string and start to reduce by A and B.
LR parsing is deterministic so this grammar can't parse the input as the
EMPTY reduction will be tried only after consuming all the input by
implicit disambiguation strategy of favouring shifts over empty reductions.
OTOH, GLR parser can handle this by forking parser at each step and trying
both empty reductions and shifts. Only the parser that has reduced empty at
the middle of the input will succeed.
"""
grammar = """
S: A | B | EMPTY;
A: '1' S '1';
B: '0' S '0';
"""
g = Grammar.from_string(grammar)
with pytest.raises(ParseError):
p = Parser(g)
p.parse('0101000110001010')
p = GLRParser(g)
results = p.parse('0101000110001010')
assert len(results) == 1
def test_cyclic_grammar_1():
"""
From the paper: "GLR Parsing for e-Grammers" by Rahman Nozohoor-Farshi
"""
grammar = """
S: A;
A: S;
A: 'x';
"""
g = Grammar.from_string(grammar)
with pytest.raises(SRConflicts):
Parser(g, prefer_shifts=False)
p = GLRParser(g)
results = p.parse('x')
# x -> A -> S
assert len(results) == 1
@pytest.mark.skipif(sys.version_info < (3, 6),
reason="list comparison doesn't work "
"correctly in pytest 4.1")
def test_cyclic_grammar_2():
"""
From the paper: "GLR Parsing for e-Grammers" by Rahman Nozohoor-Farshi
"""
grammar = """
S: S S;
S: 'x';
S: EMPTY;
"""
g = Grammar.from_string(grammar)
with pytest.raises(SRConflicts):
Parser(g, prefer_shifts=False)
p = GLRParser(g)
results = p.parse('xx')
# We have 11 valid solutions
assert len(results) == 11
expected = [
['x', 'x'],
[[[], 'x'], 'x'],
[[[], [[], 'x']], 'x'],
['x', [[], 'x']],
[[[], 'x'], [[], 'x']],
[[], ['x', 'x']],
[[], [[], ['x', 'x']]],
['x', [[], 'x']],
[[[], 'x'], [[], 'x']],
[[[], [[], 'x']], [[], 'x']],
[[], [[[], 'x'], 'x']]
]
assert expected == results
@pytest.mark.skipif(sys.version_info < (3, 6),
reason="list comparison doesn't work "
"correctly in pytest 4.1")
def test_cyclic_grammar_3():
"""
Grammar with indirect cycle.
r:EMPTY->A ; r:A->S; r:EMPTY->A; r:SA->S; r:EMPTY->A; r:SA->S;...
"""
grammar = """
S: S A | A;
A: "a" | EMPTY;
"""
g = Grammar.from_string(grammar)
Parser(g)
p = GLRParser(g)
results = p.parse('aa')
assert len(results) == 2
expected = [
['a', 'a'],
[[[], 'a'], 'a']
]
assert results == expected
def test_highly_ambiguous_grammar():
"""
This grammar has both Shift/Reduce and Reduce/Reduce conflicts and
thus can't be parsed by a deterministic LR parsing.
Shift/Reduce can be resolved by prefer_shifts strategy.
"""
grammar = """
S: "b" | S S | S S S;
"""
g = Grammar.from_string(grammar)
with pytest.raises(SRConflicts):
Parser(g, prefer_shifts=False)
# S/R are resolved by selecting prefer_shifts strategy.
# But R/R conflicts remain.
with pytest.raises(RRConflicts):
Parser(g, prefer_shifts=True)
# GLR parser handles this fine.
p = GLRParser(g, build_tree=True)
# For three tokens we have 3 valid derivations/trees.
results = p.parse("bbb")
assert len(results) == 3
# For 4 tokens we have 10 valid derivations.
results = p.parse("bbbb")
assert len(results) == 10
def test_indirect_left_recursive():
"""Grammar with indirect/hidden left recursion.
parglare LR parser will handle this using implicit disambiguation by
preferring shifts over empty reductions. It will greadily match "b" tokens
and than reduce EMPTY before "a" and start to reduce by 'B="b" B'
production.
"""
grammar = """
S: B "a";
B: "b" B | EMPTY;
"""
g = Grammar.from_string(grammar)
p = Parser(g)
p.parse("bbbbbbbbbbbba")
p = GLRParser(g)
results = p.parse("bbbbbbbbbbbba")
assert len(results) == 1
def test_reduce_enough_empty():
"""
In this unambiguous grammar parser must reduce as many empty A productions
as there are "b" tokens ahead to be able to finish successfully, thus it
needs unlimited lookahead
Language is: xb^n, n>=0
References:
Nozohoor-Farshi, Rahman: "GLR Parsing for ε-Grammers", Generalized LR
parsing, Springer, 1991.
Rekers, Joan Gerard: "Parser generation for interactive environments",
phD thesis, Universiteit van Amsterdam, 1992.
"""
grammar = """
S: A S "b";
S: "x";
A: EMPTY;
"""
g = Grammar.from_string(grammar)
p = GLRParser(g)
results = p.parse("xbbb")
assert len(results) == 1
def test_reduce_enough_many_empty():
"""
This is a generalization of the previous grammar where parser must reduce
enough A B pairs to succeed.
The language is the same: xb^n, n>=0
"""
grammar = """
S: A B S "b";
S: "x";
A: EMPTY;
B: EMPTY;
"""
g = Grammar.from_string(grammar)
p = GLRParser(g)
results = p.parse("xbbb")
assert len(results) == 1
| [
"igor.dejanovic@gmail.com"
] | igor.dejanovic@gmail.com |
861c1152d96532898f9e8f77ce6d44266c661726 | 8500de86f864e60856f4af17cfc7f620fd0e0ec9 | /test/counts_table/initialize.py | e8408b6b3e86675fc8984e895ded9060e1ef6026 | [
"MIT"
] | permissive | kaladharprajapati/singlet | c813ba7aa3d078659ef7868299b7093df207c6fc | 314ba68cbecebbda2806afdc97fdf6ac6f6c672e | refs/heads/master | 2020-05-18T06:58:04.697593 | 2019-02-15T15:50:59 | 2019-02-15T15:50:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | #!/usr/bin/env python
# vim: fdm=indent
'''
author: Fabio Zanini
date: 15/08/17
content: Test CountsTable class.
'''
# Script
if __name__ == '__main__':
# NOTE: an env variable for the config file needs to be set when
# calling this script
print('Instantiating CountsTable')
from singlet.counts_table import CountsTable
ct = CountsTable.from_tablename('example_table_tsv')
print('Done!')
| [
"fabio.zanini@fastmail.fm"
] | fabio.zanini@fastmail.fm |
88bdbcb1399726e566d59184082756a59e84d1fb | 609d5408f302c9188b723998762c2c1f7b883af9 | /.closet/jython.configurator.efr32/1.0.0.201606231656-435/pyradioconfig/parts/jumbo/profiles/__init__.py | 7450887a42996decd085e0a7530498b61cf25be7 | [] | no_license | acvilla/Sundial-Beta | 6ea4fd44cbf7c2df8100128aff5c39b6faf24a82 | 9f84e3b5a1397998dfea5287949fa5b1f4c209a6 | refs/heads/master | 2021-01-15T15:36:19.394640 | 2016-08-31T20:15:16 | 2016-08-31T20:15:16 | 63,294,451 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,740 | py | """
Dumbo specific Profiles
How to add a new Profile:
--------------------------
* Add a new Python file in this directory (example: Profile_[Name].py)
* Inside the Py file, create a class that implements IProfile:
>>> class Profile_Base(IProfile):
* Implement/override buildProfileModel() function. This function builds the profile inputs, forced, outputs into modem model.
Example:
>>> def buildProfileModel(self, model):
>>> # Build profile
>>> profile = self._makeProfile(model)
>>>
>>> profile.inputs.append(ModelInput(model.vars.xtal_frequency_hz, "crystal", input_type=ModelInputType.REQUIRED, readable_name="Crystal Frequency", value_limit_min=38000000, value_limit_max=40000000))
>>> profile.inputs.append(ModelInput(model.vars.rx_xtal_error_ppm, "crystal", input_type=ModelInputType.REQUIRED, readable_name="RX Crystal Accuracy", value_limit_min=0, value_limit_max=200))
>>>
>>> # Intermediate values
>>> self.make_linked_input_output(profile, model.vars.timing_detection_threshold , 'Advanced', readable_name='Timing Detection Threshold', value_limit_min=0, value_limit_max=255)
>>>
>>> # Output fields
>>> profile.outputs.append(ModelOutput(model.vars.SYNTH_CTRL_PRSMUX1 , '', ModelOutputType.SVD_REG_FIELD, readable_name='SYNTH.CTRL.PRSMUX1' ))
>>>
>>> return profile
"""
import os
import glob
modules = glob.glob(os.path.dirname(__file__)+"/*.py")
if len(modules) == 0:
modules = glob.glob(os.path.dirname(__file__)+"/*.pyc")
__all__ = [ os.path.basename(f)[:-3] for f in modules] | [
"acvilla@bu.edu"
] | acvilla@bu.edu |
dbd6e34e75a7e79edfad1c60ea67094198683509 | 15e818aada2b18047fa895690bc1c2afda6d7273 | /gs/monitor2/apps/plugins/layouts/gs_layout.py | becc28275206704a061906649bfea056979cc494 | [
"Apache-2.0"
] | permissive | ghomsy/makani | 4ee34c4248fb0ac355f65aaed35718b1f5eabecf | 818ae8b7119b200a28af6b3669a3045f30e0dc64 | refs/heads/master | 2023-01-11T18:46:21.939471 | 2020-11-10T00:23:31 | 2020-11-10T00:23:31 | 301,863,147 | 0 | 0 | Apache-2.0 | 2020-11-10T00:23:32 | 2020-10-06T21:51:21 | null | UTF-8 | Python | false | false | 2,790 | py | # Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Layout to monitor ground station status."""
from makani.gs.monitor2.apps.layout import base
from makani.gs.monitor2.apps.plugins.indicators import aio_comms
from makani.gs.monitor2.apps.plugins.indicators import control
from makani.gs.monitor2.apps.plugins.indicators import gps
from makani.gs.monitor2.apps.plugins.indicators import ground_station
class GsLayout(base.BaseLayout):
"""The ground station layout."""
_NAME = 'Ground Station'
_DESIRED_VIEW_COLS = 2
_GPS_NODE = 'GpsBaseStation'
_ORDER_HORIZONTALLY = False
def Initialize(self):
self._AddIndicators('AIO Update', [
aio_comms.GsCoreSwitchAioUpdateIndicator(),
aio_comms.GsGpsAioUpdateIndicator(),
aio_comms.PlatformSensorsAioUpdateIndicator(),
])
self._AddIndicators('Wind', [
ground_station.WindIndicator(),
ground_station.WindSensorSpeedIndicator(),
ground_station.WindSensorStatusIndicator(),
control.WindStateEstIndicator(),
ground_station.WeatherSensorIndicator(),
ground_station.AirDensityIndicator(),
])
self._AddIndicators('GPS', [
gps.NovAtelNumSatsIndicator(self._GPS_NODE),
gps.NovAtelCn0Indicator(self._GPS_NODE),
gps.NovAtelSigmasIndicator(self._GPS_NODE),
gps.CompassHeadingIndicator(self._GPS_NODE),
])
self._AddIndicators('Winch PLC', [
ground_station.PerchAzimuthIndicator(),
ground_station.GsgAzimuthIndicator(['A']),
ground_station.GsgElevationIndicator(),
ground_station.PlcStatusIndicator(),
# TODO: The following indicators will be removed in the future
# when we test the top head for China Lake.
ground_station.LevelwindElevationIndicator(),
ground_station.WinchArmedIndicator(),
ground_station.DrumStateIndicator(),
ground_station.WinchProximityIndicator(),
])
self._AddBreak()
self._AddIndicators('PLC', [
ground_station.DetwistArmedIndicator(),
ground_station.DetwistStatusIndicator(),
ground_station.DetwistTemperatureIndicator(),
ground_station.DetwistStatusInfoIndicator(),
ground_station.Ground480VIndicator(),
])
| [
"luislarco@google.com"
] | luislarco@google.com |
911088a4b9e8ea6455ba828f81d588cfeeea396f | 4054fde482f06ba5566ff88ff7c65b7221f4fd91 | /forml/io/asset/_directory/level/minor.py | c29d58e4031c348c2c45e3b39619ef444c9fcc2b | [
"Apache-2.0"
] | permissive | formlio/forml | e54278c2cc76cdfaf9d4feb405bd1a98c6dcd3e6 | 373bf4329338a9056e43966b8cfa458529ed2817 | refs/heads/main | 2023-06-07T21:38:34.952453 | 2023-05-28T21:53:47 | 2023-05-28T21:53:47 | 310,066,051 | 108 | 15 | Apache-2.0 | 2023-05-28T19:38:16 | 2020-11-04T17:04:13 | Python | UTF-8 | Python | false | false | 9,144 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Generic assets directory.
"""
import collections
import datetime
import logging
import operator
import types
import typing
import uuid
import toml
from forml.io import dsl
from ... import _directory, _persistent
if typing.TYPE_CHECKING:
from . import case as prjmod
from . import major as lngmod
LOGGER = logging.getLogger(__name__)
class Tag(collections.namedtuple('Tag', 'training, tuning, states')):
"""Generation metadata.
Args:
training: Generation training information.
tuning: Generation tuning information.
states: Sequence of state asset IDs.
"""
class Mode(types.SimpleNamespace):
"""Mode metadata."""
class Proxy(tuple):
"""Mode attributes proxy."""
_tag = property(operator.itemgetter(0))
_mode = property(operator.itemgetter(1))
def __new__(cls, tag: 'Tag', mode: 'Tag.Mode'):
return super().__new__(cls, (tag, mode))
def __repr__(self):
return f'Mode{repr(self._mode)}'
def __bool__(self):
return bool(self._mode)
def __getattr__(self, item):
return getattr(self._mode, item)
def __eq__(self, other) -> bool:
# pylint: disable=protected-access
return isinstance(other, self.__class__) and self._mode == other._mode
def replace(self, **kwargs) -> 'Tag':
"""Mode attributes setter.
Args:
**kwargs: Keyword parameters to be set on given mode attributes.
Returns:
New tag instance with new values.
"""
mode = self._mode.__class__(**self._mode.__dict__ | kwargs)
return Tag(**{k: mode if v is self._mode else v for k, v in self._tag._asdict().items()})
def trigger(self, timestamp: typing.Optional[datetime.datetime] = None) -> 'Tag':
"""Create new tag with given mode triggered (all attributes reset and timestamp set to now).
Returns:
New tag.
"""
return self.replace(timestamp=(timestamp or datetime.datetime.utcnow()))
def __init__(self, timestamp: typing.Optional[datetime.datetime], **kwargs: typing.Any):
super().__init__(timestamp=timestamp, **kwargs)
def __bool__(self):
return bool(self.timestamp)
class Training(Mode):
"""Training mode attributes."""
def __init__(
self, timestamp: typing.Optional[datetime.datetime] = None, ordinal: typing.Optional[dsl.Native] = None
):
super().__init__(timestamp, ordinal=ordinal)
class Tuning(Mode):
"""Tuning mode attributes."""
def __init__(self, timestamp: typing.Optional[datetime.datetime] = None, score: typing.Optional[float] = None):
super().__init__(timestamp, score=score)
def __new__(
cls,
training: typing.Optional[Training] = None,
tuning: typing.Optional[Tuning] = None,
states: typing.Optional[typing.Sequence[uuid.UUID]] = None,
):
return super().__new__(cls, training or cls.Training(), tuning or cls.Tuning(), tuple(states or []))
def __bool__(self):
return bool(self.training or self.tuning)
def __getattribute__(self, name: str) -> typing.Any:
attribute = super().__getattribute__(name)
if isinstance(attribute, Tag.Mode):
attribute = self.Mode.Proxy(self, attribute)
return attribute
def replace(self, **kwargs) -> 'Tag':
"""Replace the given non-mode attributes.
Args:
**kwargs: Non-mode attributes to be replaced.
Returns:
New tag instance.
"""
if not {k for k, v in self._asdict().items() if not isinstance(v, Tag.Mode)}.issuperset(kwargs.keys()):
raise ValueError('Invalid replacement')
return self._replace(**kwargs)
def dumps(self) -> bytes:
"""Dump the tag into a string of bytes.
Returns:
String of bytes representation.
"""
return toml.dumps(
{
'training': {'timestamp': self.training.timestamp, 'ordinal': self.training.ordinal},
'tuning': {'timestamp': self.tuning.timestamp, 'score': self.tuning.score},
'states': [str(s) for s in self.states],
},
).encode('utf-8')
@classmethod
def loads(cls, raw: bytes) -> 'Tag':
"""Load the previously dumped tag.
Args:
raw: Serialized tag representation to be loaded.
Returns:
Tag instance.
"""
meta = toml.loads(raw.decode('utf-8'))
return cls(
training=cls.Training(timestamp=meta['training']['timestamp'], ordinal=meta['training'].get('ordinal')),
tuning=cls.Tuning(timestamp=meta['tuning'].get('timestamp'), score=meta['tuning'].get('score')),
states=(uuid.UUID(s) for s in meta['states']),
)
NOTAG = Tag()
TAGS = _directory.Cache(_persistent.Registry.open)
STATES = _directory.Cache(_persistent.Registry.read)
# pylint: disable=unsubscriptable-object; https://github.com/PyCQA/pylint/issues/2822
class Generation(_directory.Level):
"""Snapshot of project states in its particular training iteration."""
class Key(_directory.Level.Key, int):
"""Project model generation key - i.e. generation *sequence number*.
This must be a natural integer starting from 1.
"""
MIN = 1
def __new__(cls, key: typing.Optional[typing.Union[str, int, 'Generation.Key']] = MIN):
try:
instance = super().__new__(cls, str(key))
except ValueError as err:
raise cls.Invalid(f'Invalid key {key} (not an integer)') from err
if instance < cls.MIN:
raise cls.Invalid(f'Invalid key {key} (not natural)')
return instance
@property
def next(self) -> 'Generation.Key':
return self.__class__(self + 1)
def __init__(
self, release: 'lngmod.Release', key: typing.Optional[typing.Union[str, int, 'Generation.Key']] = None
):
super().__init__(key, parent=release)
@property
def project(self) -> 'prjmod.Project':
"""Get the project of this generation.
Returns:
Project of this generation.
"""
return self.release.project
@property
def release(self) -> 'lngmod.Release':
"""Get the release key of this generation.
Returns:
Release key of this generation.
"""
return self._parent
@property
def tag(self) -> 'Tag':
"""Generation metadata. In case of implicit generation and empty release this returns a "null" tag (a Tag object
with all fields empty).
Returns:
Generation tag (metadata) object.
"""
# project/release must exist so let's fetch it outside of try-except
project = self.project.key
release = self.release.key
try:
generation = self.key
except self.Listing.Empty: # generation doesn't exist
LOGGER.debug('No previous generations found - using a null tag')
return NOTAG
return TAGS(self.registry, project, release, generation)
def list(self) -> _directory.Level.Listing:
"""Return the listing of this level.
Returns:
Level listing.
"""
return self.Listing(self.tag.states)
def get(self, key: typing.Union[uuid.UUID, int]) -> bytes:
"""Load the state based on provided id or positional index.
Args:
key: Index or absolute id of the state object to be loaded.
Returns:
Serialized state.
"""
if not self.tag.training:
return b''
if isinstance(key, int):
key = self.tag.states[key]
if key not in self.tag.states:
raise Generation.Invalid(f'Unknown state reference for {self}: {key}')
LOGGER.debug('%s: Getting state %s', self, key)
return STATES(self.registry, self.project.key, self.release.key, self.key, key)
| [
"antonymayi@yahoo.com"
] | antonymayi@yahoo.com |
28f1a3340bb89c7960f2c5837f353de0bef683ad | 6a087c6fb00ba91f815f997450306a3fac020a8b | /ALL/test用车管理.py | 0f11649d1543fe37e76cb14152b7d9d7eb3c6ee9 | [] | no_license | anghu3/xizangbianfang | cd2037af5e06cc558bf3ef9ff145e0c33495139b | f4f35195963017c22bd4875853ef31f280f4b6a8 | refs/heads/master | 2020-03-28T22:31:06.591507 | 2018-12-14T07:36:47 | 2018-12-14T07:36:47 | 149,238,712 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,159 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 21 14:11:17 2018
@author: PCCC
"""
import unittest
from selenium import webdriver
import selenium.webdriver.support.ui as ui
from selenium.webdriver.support.ui import Select
import time
import os
import re
from public_package.pubilc_package import url,login_name,login_name_test,login_password,login_password_test
from public_package.pubilc_package import TESTCASE
import HTMLTestRunner
'''
用例名称:
用例编号:
用例场景:
用例作者:
'''
class TESTCAST_YONGCHEGUANLI(TESTCASE):
def setUp(self):
self.dr = webdriver.Chrome()
self.dr.maximize_window()
def tearDown(self):
# print("脚本执行完成")
self.dr.quit()
def login(self, username, password):
self.dr.get(url)
self.dr.find_element_by_id('vv').send_keys(username)
self.dr.find_element_by_xpath('//*[@id="login_ff"]/div[2]/input').send_keys(password)
self.dr.find_element_by_xpath('//*[@id="login_ff"]/a').click()
def yongcheguanli_search(self):
self.login(login_name, login_password)
self.dr.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div/div/div/div/div/a[2]/div[2]/img[2]').click()
time.sleep(5)
self.assertEqual('勤务管理',self.dr.find_element_by_xpath('//*[@id="currMenu"]').text, '勤务管理')
self.dr.find_element_by_xpath('/html/body/div[1]/div/div[3]/div[2]/div/ul/li/p[2]').click()
self.dr.find_element_by_xpath('//*[@id="585"]').click()
self.dr.switch_to.frame('iframeb')
time.sleep(5)
self.assertEqual('用车管理列表', self.dr.find_element_by_xpath('/html/body/div[1]/div').text,
'用车管理')
def test1_yongcheguanli_add(self):
self.yongcheguanli_search()
add_value_place='拉萨贡嘎国际机场'
self.dr.find_element_by_xpath('/html/body/div[3]/div[1]/div[2]/a[2]').click()
self.dr.find_element_by_xpath('//*[@id="useCarDate"]').click()
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[1]/div/div[2]/ul/li[2]/table/tbody/tr/td[2]/a/span').click()
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[2]/div/input').send_keys(add_value_place)
Select(self.dr.find_element_by_xpath('//*[@id="carNo"]')).select_by_value('藏A02453')
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[5]/div/input').click()
time.sleep(1)
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[5]/div/input').clear()
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[5]/div/input').send_keys('2018-09-11 08:30:00')
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[5]/div/div[2]/ul/li[2]/table/tbody/tr/td[2]/a/span').click()
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[6]/div/input').click()
time.sleep(1)
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[6]/div/input').clear()
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[6]/div/input').send_keys('2018-09-11 18:00:00')
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[6]/div/div[2]/ul/li[2]/table/tbody/tr/td[2]/a/span').click()
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[2]/div[1]/div/input').send_keys('四名武警')
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[2]/div[2]/div/textarea').send_keys('城关区武警中队-山南市贡嘎县甲竹林镇。')
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[3]/div[1]/div/input').send_keys('马汉')
Select(self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[3]/div[2]/div/select')).select_by_value('包拯')
self.dr.find_element_by_xpath('//*[@id="save"]').click()
self.dr.implicitly_wait(5)
self.dr.find_element_by_xpath('/html/body/a').click()
self.dr.implicitly_wait(2)
self.assertEqual('藏A02453',self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[5]').text,'校验新增、返回和默认排序')
print('勤务管理-用车管理:新增功能正常')
def test2_yongcheguanli_search_place(self):
self.yongcheguanli_search()
search_value_place='拉萨贡嘎国际机场'
self.dr.find_element_by_xpath('//*[@id="form"]/div[1]/div/input').send_keys(search_value_place)
self.dr.find_element_by_xpath('//*[@id="search"]').click()
self.dr.switch_to.default_content()
time.sleep(3)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath('/html/body/div[3]/div[2]/div/div[4]/div[1]/span[1]').text
column = 4
self.pagination_num(paginal_number, search_value_place, column)
self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr/td[11]/a').click()
self.assertEqual(search_value_place,self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[2]/div/input').get_attribute('value'),'校验详情页面出行地点')
self.dr.implicitly_wait(5)
self.dr.find_element_by_xpath('/html/body/a').click()
self.dr.implicitly_wait(2)
self.assertEqual(search_value_place,self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[4]').text,'校验返回和默认排序')
print('勤务管理-用车管理:出行地点条件查询功能正常')
def test3_yongcheguanli_search_carNo(self):
self.yongcheguanli_search()
search_value_carNo='藏A02453'
self.dr.find_element_by_xpath('//*[@id="form"]/div[2]/div/input').send_keys(search_value_carNo)
self.dr.find_element_by_xpath('//*[@id="search"]').click()
self.dr.switch_to.default_content()
time.sleep(3)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath('/html/body/div[3]/div[2]/div/div[4]/div[1]/span[1]').text
column = 5
self.pagination_num(paginal_number, search_value_carNo, column)
self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr/td[11]/a').click()
self.assertEqual(search_value_carNo,self.dr.find_element_by_xpath('//*[@id="carNo"]').get_attribute('value'),'校验详情页面警车编号')
self.dr.implicitly_wait(5)
self.dr.find_element_by_xpath('/html/body/a').click()
self.dr.implicitly_wait(2)
self.assertEqual(search_value_carNo,self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[5]').text,'校验返回和默认排序')
print('勤务管理-用车管理:警车编号条件查询功能正常')
def test4_yongcheguanli_search_carType(self):
self.yongcheguanli_search()
search_value_carType='巡逻车'
self.dr.find_element_by_xpath('//*[@id="form"]/div[3]/div/input').send_keys(search_value_carType)
self.dr.find_element_by_xpath('//*[@id="search"]').click()
self.dr.switch_to.default_content()
time.sleep(3)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath('/html/body/div[3]/div[2]/div/div[4]/div[1]/span[1]').text
column = 6
self.pagination_num(paginal_number, search_value_carType, column)
self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr/td[11]/a').click()
self.assertEqual(search_value_carType,self.dr.find_element_by_xpath('//*[@id="carType"]').get_attribute('value'),'校验详情页面警车型号')
self.dr.implicitly_wait(5)
self.dr.find_element_by_xpath('/html/body/a').click()
self.dr.implicitly_wait(2)
self.assertEqual(search_value_carType,self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[6]').text,'校验返回和默认排序')
print('勤务管理-用车管理:警车型号条件查询功能正常')
def test5_yongcheguanli_edit(self):
self.yongcheguanli_search()
search_value_carNo='藏A02453'
self.dr.find_element_by_xpath('//*[@id="form"]/div[2]/div/input').send_keys(search_value_carNo)
self.dr.find_element_by_xpath('//*[@id="search"]').click()
self.dr.switch_to.default_content()
time.sleep(3)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath('/html/body/div[3]/div[2]/div/div[4]/div[1]/span[1]').text
column = 5
self.pagination_num(paginal_number, search_value_carNo, column)
self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr/td[11]/a').click()
edit_value_place='贡嘎国际机场'
edit_value_carNo='藏A24567'
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[2]/div/input').clear()
self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[2]/div/input').send_keys(edit_value_place)
Select(self.dr.find_element_by_xpath('//*[@id="carNo"]')).select_by_value('藏A24567')
self.dr.find_element_by_xpath('//*[@id="save"]').click()
time.sleep(3)
self.assertEqual(edit_value_place,self.dr.find_element_by_xpath('//*[@id="usecarForm"]/div[1]/div[2]/div/input').get_attribute('value'),'校验编辑功能')
self.dr.implicitly_wait(5)
self.dr.find_element_by_xpath('/html/body/a').click()
self.dr.implicitly_wait(2)
self.assertEqual(edit_value_carNo,self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[5]').text,'校验编辑返回和默认排序')
self.assertEqual(edit_value_place,self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[4]').text,'校验编辑返回和默认排序')
print('勤务管理-用车管理:编辑功能正常')
def test6_yongcheguanli_delete(self):
self.yongcheguanli_search()
search_value_place='贡嘎国际机场'
self.dr.find_element_by_xpath('//*[@id="form"]/div[1]/div/input').send_keys(search_value_place)
self.dr.find_element_by_xpath('//*[@id="search"]').click()
self.dr.switch_to.default_content()
time.sleep(3)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath('/html/body/div[3]/div[2]/div/div[4]/div[1]/span[1]').text
column = 4
self.pagination_num(paginal_number, search_value_place, column)
self.dr.find_element_by_xpath('//*[@id="list"]/thead/tr/th[1]/div[1]/input').click()
self.dr.find_element_by_xpath('/html/body/div[3]/div[1]/div[2]/a[1]').click()
self.dr.switch_to.default_content()
time.sleep(2)
self.dr.find_element_by_xpath('/html/body/div[3]/div[3]/div/button[2]/span').click()
time.sleep(3)
self.dr.switch_to.frame('iframeb')
self.assertEqual('没有找到匹配的记录', self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr/td').text, '校验删除是否成功')
print('勤务管理-用车管理:删除功能正常')
if __name__=='__main__':
unittest.main() | [
"935331858@qq.com"
] | 935331858@qq.com |
b38ea4d4abae4e15f95f4f20b561b5ece41b2d6a | 108034973f9046a7603d5fe3f26c59b20a7e68da | /lab/lab05/lab05.py | d7a06219e929cbbf620509d6269d6ca247e50d07 | [] | no_license | paulhzq/cs61a | b1b1387cefbaaf1823c02d535891db7d085f3b04 | 9eee13df9ad113591dc55d106561951cea34abc5 | refs/heads/master | 2020-05-23T08:16:14.193086 | 2017-01-15T02:06:18 | 2017-01-15T02:06:18 | 70,255,875 | 8 | 8 | null | null | null | null | UTF-8 | Python | false | false | 3,704 | py | ## Lab 5: Mutable Sequences and Trees ##
# Sequences
def map(fn, seq):
"""Applies fn onto each element in seq and returns a list.
>>> map(lambda x: x*x, [1, 2, 3])
[1, 4, 9]
"""
"*** YOUR CODE HERE ***"
return [fn(x) for x in seq]
def filter(pred, seq):
"""Keeps elements in seq only if they satisfy pred.
>>> filter(lambda x: x % 2 == 0, [1, 2, 3, 4])
[2, 4]
"""
"*** YOUR CODE HERE ***"
return [x for x in seq if pred(x)]
def reduce(combiner, seq):
"""Combines elements in seq using combiner.
>>> reduce(lambda x, y: x + y, [1, 2, 3, 4])
10
>>> reduce(lambda x, y: x * y, [1, 2, 3, 4])
24
>>> reduce(lambda x, y: x * y, [4])
4
"""
"*** YOUR CODE HERE ***"
total = seq[0]
for elem in seq[1:]:
total = combiner(total,elem)
return total
# pyTunes
def make_pytunes(username):
"""Return a pyTunes tree as shown in the diagram with USERNAME as the value
of the root.
>>> pytunes = make_pytunes('i_love_music')
>>> print_tree(pytunes)
i_love_music
pop
justin bieber
single
what do you mean?
2015 pop mashup
trance
darude
sandstorm
"""
"*** YOUR CODE HERE ***"
return tree(username,[tree('pop',[tree('justin bieber',[tree('single',[tree('what do you mean?')])]),tree('2015 pop mashup')]), tree('trance',[tree('darude',[tree('sandstorm')])])])
def num_songs(t):
"""Return the number of songs in the pyTunes tree, t.
>>> pytunes = make_pytunes('i_love_music')
>>> num_songs(pytunes)
3
"""
"*** YOUR CODE HERE ***"
if is_leaf(t):
return 1
return sum([num_songs(b) for b in branches(t)])
def add_song(t, song, category):
"""Returns a new tree with SONG added to CATEGORY. Assume the CATEGORY
already exists.
>>> indie_tunes = tree('indie_tunes',
... [tree('indie',
... [tree('vance joy',
... [tree('riptide')])])])
>>> new_indie = add_song(indie_tunes, 'georgia', 'vance joy')
>>> print_tree(new_indie)
indie_tunes
indie
vance joy
riptide
georgia
"""
"*** YOUR CODE HERE ***"
if root(t) == category:
return tree(root(t), branches(t) + [tree(song)])
all_branches = [add_song(b, song, category) for b in branches(t)]
return tree(root(t), all_branches)
# Tree ADT
def tree(root, branches=[]):
for branch in branches:
assert is_tree(branch), 'branches must be trees'
return [root] + list(branches)
def root(tree):
return tree[0]
def branches(tree):
return tree[1:]
def is_tree(tree):
if type(tree) != list or len(tree) < 1:
return False
for branch in branches(tree):
if not is_tree(branch):
return False
return True
def is_leaf(tree):
return not branches(tree)
def print_tree(t, indent=0):
"""Print a representation of this tree in which each node is
indented by two spaces times its depth from the entry.
>>> print_tree(tree(1))
1
>>> print_tree(tree(1, [tree(2)]))
1
2
>>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])])
>>> print_tree(numbers)
1
2
3
4
5
6
7
"""
print(' ' * indent + str(root(t)))
for b in branches(t):
print_tree(b, indent + 1)
def copy_tree(t):
"""Returns a copy of t. Only for testing purposes.
>>> t = tree(5)
>>> copy = copy_tree(t)
>>> t = tree(6)
>>> print_tree(copy)
5
"""
return tree(root(t), [copy_tree(b) for b in branches(t)]) | [
"paul_hzq@hotmail.com"
] | paul_hzq@hotmail.com |
58feed308c72cb6c93a22077050cffeedb86fe77 | ee5196e60f2aff4c5756ad3afd641969ce6e75a1 | /swagger_client/api/mailboxes_api.py | ab271c60a6a70c66b3c628ef236fa2e2492a5d94 | [] | no_license | pc-m/asyncio-ari-ast-id | e52d9729db2d9759f53a4b50e4b8aa407740a634 | 198ff286ca9cebeebc1c28592dfad4882e3bbdac | refs/heads/master | 2021-02-18T12:45:31.651885 | 2020-01-08T12:21:29 | 2020-01-08T12:21:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,040 | py | # coding: utf-8
"""
localhost:8088
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 4.0.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class MailboxesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def mailboxes_get(self, **kwargs): # noqa: E501
"""List all mailboxes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: list[Mailbox]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.mailboxes_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.mailboxes_get_with_http_info(**kwargs) # noqa: E501
return data
def mailboxes_get_with_http_info(self, **kwargs): # noqa: E501
"""List all mailboxes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: list[Mailbox]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_asterisk_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method mailboxes_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_asterisk_id' in params:
header_params['X-Asterisk-ID'] = params['x_asterisk_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/mailboxes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Mailbox]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def mailboxes_mailbox_name_delete(self, mailbox_name, **kwargs): # noqa: E501
"""Destroy a mailbox. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_mailbox_name_delete(mailbox_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str mailbox_name: Name of the mailbox (required)
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.mailboxes_mailbox_name_delete_with_http_info(mailbox_name, **kwargs) # noqa: E501
else:
(data) = self.mailboxes_mailbox_name_delete_with_http_info(mailbox_name, **kwargs) # noqa: E501
return data
def mailboxes_mailbox_name_delete_with_http_info(self, mailbox_name, **kwargs): # noqa: E501
"""Destroy a mailbox. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_mailbox_name_delete_with_http_info(mailbox_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str mailbox_name: Name of the mailbox (required)
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mailbox_name', 'x_asterisk_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method mailboxes_mailbox_name_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mailbox_name' is set
if ('mailbox_name' not in params or
params['mailbox_name'] is None):
raise ValueError("Missing the required parameter `mailbox_name` when calling `mailboxes_mailbox_name_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'mailbox_name' in params:
path_params['mailboxName'] = params['mailbox_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_asterisk_id' in params:
header_params['X-Asterisk-ID'] = params['x_asterisk_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/mailboxes/{mailboxName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def mailboxes_mailbox_name_get(self, mailbox_name, **kwargs): # noqa: E501
"""Retrieve the current state of a mailbox. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_mailbox_name_get(mailbox_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str mailbox_name: Name of the mailbox (required)
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: Mailbox
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.mailboxes_mailbox_name_get_with_http_info(mailbox_name, **kwargs) # noqa: E501
else:
(data) = self.mailboxes_mailbox_name_get_with_http_info(mailbox_name, **kwargs) # noqa: E501
return data
def mailboxes_mailbox_name_get_with_http_info(self, mailbox_name, **kwargs): # noqa: E501
"""Retrieve the current state of a mailbox. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_mailbox_name_get_with_http_info(mailbox_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str mailbox_name: Name of the mailbox (required)
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: Mailbox
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mailbox_name', 'x_asterisk_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method mailboxes_mailbox_name_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mailbox_name' is set
if ('mailbox_name' not in params or
params['mailbox_name'] is None):
raise ValueError("Missing the required parameter `mailbox_name` when calling `mailboxes_mailbox_name_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'mailbox_name' in params:
path_params['mailboxName'] = params['mailbox_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_asterisk_id' in params:
header_params['X-Asterisk-ID'] = params['x_asterisk_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/mailboxes/{mailboxName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Mailbox', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def mailboxes_mailbox_name_put(self, mailbox_name, old_messages, new_messages, **kwargs): # noqa: E501
"""Change the state of a mailbox. (Note - implicitly creates the mailbox). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_mailbox_name_put(mailbox_name, old_messages, new_messages, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str mailbox_name: Name of the mailbox (required)
:param int old_messages: Count of old messages in the mailbox (required)
:param int new_messages: Count of new messages in the mailbox (required)
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.mailboxes_mailbox_name_put_with_http_info(mailbox_name, old_messages, new_messages, **kwargs) # noqa: E501
else:
(data) = self.mailboxes_mailbox_name_put_with_http_info(mailbox_name, old_messages, new_messages, **kwargs) # noqa: E501
return data
def mailboxes_mailbox_name_put_with_http_info(self, mailbox_name, old_messages, new_messages, **kwargs): # noqa: E501
"""Change the state of a mailbox. (Note - implicitly creates the mailbox). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mailboxes_mailbox_name_put_with_http_info(mailbox_name, old_messages, new_messages, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str mailbox_name: Name of the mailbox (required)
:param int old_messages: Count of old messages in the mailbox (required)
:param int new_messages: Count of new messages in the mailbox (required)
:param str x_asterisk_id: Asterisk ID used to route the request through the API Gateway
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mailbox_name', 'old_messages', 'new_messages', 'x_asterisk_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method mailboxes_mailbox_name_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mailbox_name' is set
if ('mailbox_name' not in params or
params['mailbox_name'] is None):
raise ValueError("Missing the required parameter `mailbox_name` when calling `mailboxes_mailbox_name_put`") # noqa: E501
# verify the required parameter 'old_messages' is set
if ('old_messages' not in params or
params['old_messages'] is None):
raise ValueError("Missing the required parameter `old_messages` when calling `mailboxes_mailbox_name_put`") # noqa: E501
# verify the required parameter 'new_messages' is set
if ('new_messages' not in params or
params['new_messages'] is None):
raise ValueError("Missing the required parameter `new_messages` when calling `mailboxes_mailbox_name_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'mailbox_name' in params:
path_params['mailboxName'] = params['mailbox_name'] # noqa: E501
query_params = []
if 'old_messages' in params:
query_params.append(('oldMessages', params['old_messages'])) # noqa: E501
if 'new_messages' in params:
query_params.append(('newMessages', params['new_messages'])) # noqa: E501
header_params = {}
if 'x_asterisk_id' in params:
header_params['X-Asterisk-ID'] = params['x_asterisk_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/mailboxes/{mailboxName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"safchain@gmail.com"
] | safchain@gmail.com |
c14629b1a6e08472ce43288bee91fa595f6c20f5 | 32735726b6b6416f66e8f6d719382f3ffe3d1177 | /or2yw_examples/NYPL/script/ConfigTool.py | 0457fd07bb42522d033fa75db24e08329696f2b1 | [] | no_license | yixuan21/OR2YWTool | 5489d25a2a15d6d2513f909508fd5df97ee6d69b | eada19dccc6d03e80f0795efef949bc19f757fa0 | refs/heads/master | 2023-02-12T17:27:58.034974 | 2021-01-07T06:56:16 | 2021-01-07T06:56:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 494 | py | import argparse
import LinearOriginalOR2YW as Linear
import SPOriginalOR2YW as SP
def main():
parser=argparse.ArgumentParser()
parser.add_argument('-L','--Linear',help="Generate Linear YW model",action="store_true")
parser.add_argument('-SP','--SerialParallel',help="Generate Serial-Parallel YW model",action="store_true")
args=parser.parse_args()
if args.Linear:
Linear.main()
elif args.SerialParallel:
SP.main()
if __name__=='__main__':
main()
| [
"lilan.scut@gmail.com"
] | lilan.scut@gmail.com |
0b2d82c28a801219156d6a83facb2da9e5069808 | d47956c25f4f7ce0ae4a39cde40ee8f958f6b8bd | /products/migrations/0009_auto_20161018_0947.py | 79ad2a25f0bc70671a7e4a9b1e670c066e1ab9dc | [] | no_license | vaerjngiar/eShopper | de6e6bc4aece91fd766fed35a0654feaf0285163 | 5c43bc8867e484e03cd18e1a7ed4faf64cad4b7a | refs/heads/master | 2020-06-14T09:41:58.021570 | 2016-12-31T13:25:47 | 2016-12-31T13:25:47 | 77,740,671 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-10-18 06:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0008_product_size'),
]
operations = [
migrations.AlterModelOptions(
name='catalogcategory',
options={'ordering': ('name',), 'verbose_name': 'category', 'verbose_name_plural': 'categories'},
),
migrations.RenameField(
model_name='product',
old_name='size',
new_name='code',
),
migrations.AddField(
model_name='product',
name='available',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='product',
name='name',
field=models.CharField(db_index=True, max_length=255),
),
migrations.AlterField(
model_name='product',
name='slug',
field=models.SlugField(max_length=255),
),
]
| [
"vaerjngiar@gmail.com"
] | vaerjngiar@gmail.com |
3663318075fd771c2027ed016c8bd9ecab76c0e4 | 3996539eae965e8e3cf9bd194123989741825525 | /RecoEgamma/EgammaHLTProducers/hltEgammaHLTNxNClusterProducer_cfi.py | 57de83bc38ca0be104c1d2bdd00e2e8a2c4304b3 | [] | no_license | cms-sw/cmssw-cfipython | 01990ea8fcb97a57f0b0cc44a8bf5cde59af2d98 | 25ee4c810103c4a507ca1b949109399a23a524c5 | refs/heads/CMSSW_11_2_X | 2023-09-01T16:56:00.658845 | 2022-06-20T22:49:19 | 2022-06-20T22:49:19 | 136,184,115 | 1 | 0 | null | 2022-10-19T14:04:01 | 2018-06-05T13:47:28 | Python | UTF-8 | Python | false | false | 1,133 | py | import FWCore.ParameterSet.Config as cms
hltEgammaHLTNxNClusterProducer = cms.EDProducer('EgammaHLTNxNClusterProducer',
doBarrel = cms.bool(True),
doEndcaps = cms.bool(True),
barrelHitProducer = cms.InputTag('hltEcalRegionalPi0EtaRecHit', 'EcalRecHitsEB'),
endcapHitProducer = cms.InputTag('hltEcalRegionalPi0EtaRecHit', 'EcalRecHitsEE'),
clusEtaSize = cms.int32(3),
clusPhiSize = cms.int32(3),
barrelClusterCollection = cms.string('Simple3x3ClustersBarrel'),
endcapClusterCollection = cms.string('Simple3x3ClustersEndcap'),
clusSeedThr = cms.double(0.5),
clusSeedThrEndCap = cms.double(1),
useRecoFlag = cms.bool(False),
flagLevelRecHitsToUse = cms.int32(1),
useDBStatus = cms.bool(True),
statusLevelRecHitsToUse = cms.int32(1),
posCalcParameters = cms.PSet(
T0_barl = cms.double(7.4),
T0_endc = cms.double(3.1),
T0_endcPresh = cms.double(1.2),
W0 = cms.double(4.2),
X0 = cms.double(0.89),
LogWeighted = cms.bool(True)
),
maxNumberofSeeds = cms.int32(1000),
maxNumberofClusters = cms.int32(200),
debugLevel = cms.int32(0),
mightGet = cms.optional.untracked.vstring
)
| [
"cmsbuild@cern.ch"
] | cmsbuild@cern.ch |
88c2969816720474e934b4cd7f2263875bbd96ae | 5935025f9c6e83aa006cd64755f71644009dedb5 | /core/utils.py | 75e2c316b8726d87611f4ba2bf628ecd43c882d9 | [] | no_license | zdYng/MyQuantification | 24bd546ce1b7ed94115a1eb94b2e11598df95f6b | 2fa874be4c8707e9c10bd7620cec2796946badcc | refs/heads/master | 2020-09-14T02:10:59.194409 | 2019-11-21T03:17:18 | 2019-11-21T03:17:18 | 222,980,387 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | import datetime as dt
class Timer():
def __init__(self):
self.start_dt = None
def start(self):
self.start_dt = dt.datetime.now()
def stop(self):
end_dt = dt.datetime.now()
print('Time token: %s'%(end_dt-self.start_dt))
| [
"qianzhongdao@163.com"
] | qianzhongdao@163.com |
14ec0e21f7e22754fe439ab3ac53c8b332534207 | d308fffe3db53b034132fb1ea6242a509f966630 | /pirates/world/ModularAreaBuilder.py | 7a52e5645c421e21103107fa08fc0966c217c0e6 | [
"BSD-3-Clause"
] | permissive | rasheelprogrammer/pirates | 83caac204965b77a1b9c630426588faa01a13391 | 6ca1e7d571c670b0d976f65e608235707b5737e3 | refs/heads/master | 2020-03-18T20:03:28.687123 | 2018-05-28T18:05:25 | 2018-05-28T18:05:25 | 135,193,362 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,335 | py | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.world.ModularAreaBuilder
from pandac.PandaModules import *
from otp.otpbase import OTPRender
from pirates.world.SectionAreaBuilder import SectionAreaBuilder
from pirates.leveleditor import EditorGlobals
class ModularAreaBuilder(SectionAreaBuilder):
__module__ = __name__
def __init__(self, master):
SectionAreaBuilder.__init__(self, master)
self.subLights = {}
self.adjTable = {}
self.subLights = {}
self.areaLights = {}
def _postLoadStep(self):
SectionAreaBuilder._postLoadStep(self)
adjTable = base.worldCreator.uidAdjTables.get(self.master.uniqueId, {})
for light in self.areaGeometry.findAllMatches('**/=SubLight;+s'):
zone = self.sectionsToParent.get(light.getTag('SubLight'))
if zone:
self.addSubLight(zone, light.find('**/+Light').node())
self.lightObjects()
def addChildObj(self, levelObj):
root = SectionAreaBuilder.addChildObj(self, levelObj)
if levelObj['Type'] == 'Cave_Pieces':
root.setTag('modular', '1')
if levelObj.get('OverrideFog', False):
root.setTag('fog-onset', str(levelObj.get('FogOnSet', 0)))
root.setTag('fog-peak', str(levelObj.get('FogPeak', 100)))
def lightObjects(self):
self.generateAdjLightSets()
for zone in self.sections:
parent = self.sectionsToParent[zone]
lightAttrib = self.areaLights.get(parent)
if not lightAttrib:
continue
self.sections[zone].setAttrib(lightAttrib)
for uid, obj in self.largeObjects.iteritems():
visZone = obj.getTag('visZone')
modular = obj.getTag('modular')
if modular:
self.largeObjects[uid].setAttrib(self.areaLights[uid])
elif visZone:
self.largeObjects[uid].setAttrib(self.areaLights[self.sectionsToParent[visZone]])
for node in self.areaGeometry.findAllMatches('**/=PortalVis'):
visZone = node.getTag('PortalVis')
node.setAttrib(self.areaLights[self.sectionsToParent[visZone]])
def generateAdjLightSets(self):
for zone in self.adjTable:
lightAttrib = LightAttrib.make()
group = self.subLights.get(zone, [])
for light in group:
lightAttrib = lightAttrib.addLight(light)
for adjZone in self.adjTable[zone]:
adjGroup = self.subLights.get(adjZone, [])
for light in adjGroup:
lightAttrib = lightAttrib.addLight(light)
self.areaLights[zone] = lightAttrib
def addSubLight(self, zone, light):
subLightGroup = self.subLights.get(zone)
if not subLightGroup:
subLightGroup = self.subLights[zone] = []
subLightGroup.append(light)
def makeLight(self, levelObj):
light = EditorGlobals.LightModular(levelObj, self.areaGeometry, drawIcon=False)
if levelObj.get('VisZone'):
if light:
light.setTag('SubLight', levelObj.get('VisZone'))
OTPRender.renderReflection(False, light, 'p_light', None)
return light
def handleLighting(self, obj, visZone):
parent = self.sectionsToParent.get(visZone)
if parent and self.areaLights.has_key(parent):
obj.setAttrib(self.areaLights[parent])
SectionAreaBuilder.handleLighting(self, obj, visZone)
def localAvLeaving(self):
localAvatar.clearAttrib(LightAttrib.getClassType())
def disableDynamicLights(self):
pass
def addSectionObj(self, obj, visZone, logError=0):
SectionAreaBuilder.addSectionObj(self, obj, visZone)
parent = self.sectionsToParent.get(visZone)
if parent:
self.areaLights.has_key(parent) and obj.setAttrib(self.areaLights[parent])
else:
if logError:
errorMessage = 'Chest missing parent visZone %s location %s position %s' % (visZone, localAvatar.getLocation(), localAvatar.getPos())
localAvatar.sendAILog(errorMessage)
else:
if __dev__:
set_trace()
def arrived(self):
render.setClipPlane(base.farCull)
def left(self):
render.clearClipPlane()
def triggerEffects(self, visZone):
SectionAreaBuilder.triggerEffects(self, visZone)
parent = self.sectionsToParent.get(visZone)
if parent:
module = self.largeObjects.get(parent)
if module:
if module.getTag('modular'):
onset = module.getTag('fog-onset')
peak = module.getTag('fog-peak')
onset = onset and float(onset)
peak = float(peak)
base.cr.timeOfDayManager.lerpLinearFog(onset, peak)
else:
base.cr.timeOfDayManager.restoreLinearFog()
def unloadObjects(self):
self.areaLights = {}
self.subLights = {}
self.adjTable = {}
SectionAreaBuilder.unloadObjects(self) | [
"33942724+itsyaboyrocket@users.noreply.github.com"
] | 33942724+itsyaboyrocket@users.noreply.github.com |
3be9d317ad09dd674355187358e491d131b899a5 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/AlipayAcquireCreateandpayResponse.py | c14ee8bde981d6a5568c67d3c189308b17bf169e | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 2,910 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayAcquireCreateandpayResponse(AlipayResponse):
def __init__(self):
super(AlipayAcquireCreateandpayResponse, self).__init__()
self._buyer_logon_id = None
self._buyer_user_id = None
self._detail_error_code = None
self._detail_error_des = None
self._extend_info = None
self._out_trade_no = None
self._result_code = None
self._trade_no = None
@property
def buyer_logon_id(self):
return self._buyer_logon_id
@buyer_logon_id.setter
def buyer_logon_id(self, value):
self._buyer_logon_id = value
@property
def buyer_user_id(self):
return self._buyer_user_id
@buyer_user_id.setter
def buyer_user_id(self, value):
self._buyer_user_id = value
@property
def detail_error_code(self):
return self._detail_error_code
@detail_error_code.setter
def detail_error_code(self, value):
self._detail_error_code = value
@property
def detail_error_des(self):
return self._detail_error_des
@detail_error_des.setter
def detail_error_des(self, value):
self._detail_error_des = value
@property
def extend_info(self):
return self._extend_info
@extend_info.setter
def extend_info(self, value):
self._extend_info = value
@property
def out_trade_no(self):
return self._out_trade_no
@out_trade_no.setter
def out_trade_no(self, value):
self._out_trade_no = value
@property
def result_code(self):
return self._result_code
@result_code.setter
def result_code(self, value):
self._result_code = value
@property
def trade_no(self):
return self._trade_no
@trade_no.setter
def trade_no(self, value):
self._trade_no = value
def parse_response_content(self, response_content):
response = super(AlipayAcquireCreateandpayResponse, self).parse_response_content(response_content)
if 'buyer_logon_id' in response:
self.buyer_logon_id = response['buyer_logon_id']
if 'buyer_user_id' in response:
self.buyer_user_id = response['buyer_user_id']
if 'detail_error_code' in response:
self.detail_error_code = response['detail_error_code']
if 'detail_error_des' in response:
self.detail_error_des = response['detail_error_des']
if 'extend_info' in response:
self.extend_info = response['extend_info']
if 'out_trade_no' in response:
self.out_trade_no = response['out_trade_no']
if 'result_code' in response:
self.result_code = response['result_code']
if 'trade_no' in response:
self.trade_no = response['trade_no']
| [
"liuqun.lq@alibaba-inc.com"
] | liuqun.lq@alibaba-inc.com |
ab60bdb394634c3f7bf7a2949170c5796910f7d6 | ed06ef44c944707276a2fca16d61e7820596f51c | /Python/create-target-array-in-the-given-order.py | 79a41b9196316ba78a9aa7642dbd5d9bc4c51130 | [] | no_license | sm2774us/leetcode_interview_prep_2021 | 15842bef80637c6ff43542ed7988ec4b2d03e82c | 33b41bea66c266b733372d9a8b9d2965cd88bf8c | refs/heads/master | 2023-05-29T14:14:49.074939 | 2021-06-12T19:52:07 | 2021-06-12T19:52:07 | 374,725,760 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | # Time: O(n^2)
# Space: O(1)
class Solution(object):
def createTargetArray(self, nums, index):
"""
:type nums: List[int]
:type index: List[int]
:rtype: List[int]
"""
for i in range(len(nums)):
for j in range(i):
if index[j] >= index[i]:
index[j] += 1
result = [0]*(len(nums))
for i in range(len(nums)):
result[index[i]] = nums[i]
return result
# Time: O(n^2)
# Space: O(1)
import itertools
class Solution2(object):
def createTargetArray(self, nums, index):
"""
:type nums: List[int]
:type index: List[int]
:rtype: List[int]
"""
result = []
for i, x in itertools.zip(index, nums):
result.insert(i, x)
return result
| [
"sm2774us@gmail.com"
] | sm2774us@gmail.com |
7d944e3db94712dc9662468f900911a794ff785a | 27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f | /plugins/module_utils/network/slxos/slxos.py | 32f64d815194641c0d66b6c029f0e8f8720df836 | [] | no_license | coll-test/notstdlib.moveitallout | eb33a560070bbded5032385d0aea2f3cf60e690b | 0987f099b783c6cf977db9233e1c3d9efcbcb3c7 | refs/heads/master | 2020-12-19T22:28:33.369557 | 2020-01-23T18:51:26 | 2020-01-23T18:51:26 | 235,865,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,455 | py | #
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
from ansible_collections.notstdlib.moveitallout.plugins.module_utils._text import to_text
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.network.common.utils import to_list, ComplexList
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.connection import Connection
def get_connection(module):
"""Get switch connection
Creates reusable SSH connection to the switch described in a given module.
Args:
module: A valid AnsibleModule instance.
Returns:
An instance of `ansible.module_utils.connection.Connection` with a
connection to the switch described in the provided module.
Raises:
AnsibleConnectionFailure: An error occurred connecting to the device
"""
if hasattr(module, 'slxos_connection'):
return module.slxos_connection
capabilities = get_capabilities(module)
network_api = capabilities.get('network_api')
if network_api == 'cliconf':
module.slxos_connection = Connection(module._socket_path)
else:
module.fail_json(msg='Invalid connection type %s' % network_api)
return module.slxos_connection
def get_capabilities(module):
"""Get switch capabilities
Collects and returns a python object with the switch capabilities.
Args:
module: A valid AnsibleModule instance.
Returns:
A dictionary containing the switch capabilities.
"""
if hasattr(module, 'slxos_capabilities'):
return module.slxos_capabilities
capabilities = Connection(module._socket_path).get_capabilities()
module.slxos_capabilities = json.loads(capabilities)
return module.slxos_capabilities
def run_commands(module, commands):
"""Run command list against connection.
Get new or previously used connection and send commands to it one at a time,
collecting response.
Args:
module: A valid AnsibleModule instance.
commands: Iterable of command strings.
Returns:
A list of output strings.
"""
responses = list()
connection = get_connection(module)
for cmd in to_list(commands):
if isinstance(cmd, dict):
command = cmd['command']
prompt = cmd['prompt']
answer = cmd['answer']
else:
command = cmd
prompt = None
answer = None
out = connection.get(command, prompt, answer)
try:
out = to_text(out, errors='surrogate_or_strict')
except UnicodeError:
module.fail_json(msg=u'Failed to decode output from %s: %s' % (cmd, to_text(out)))
responses.append(out)
return responses
def get_config(module):
"""Get switch configuration
Gets the described device's current configuration. If a configuration has
already been retrieved it will return the previously obtained configuration.
Args:
module: A valid AnsibleModule instance.
Returns:
A string containing the configuration.
"""
if not hasattr(module, 'device_configs'):
module.device_configs = {}
elif module.device_configs != {}:
return module.device_configs
connection = get_connection(module)
out = connection.get_config()
cfg = to_text(out, errors='surrogate_then_replace').strip()
module.device_configs = cfg
return cfg
def load_config(module, commands):
"""Apply a list of commands to a device.
Given a list of commands apply them to the device to modify the
configuration in bulk.
Args:
module: A valid AnsibleModule instance.
commands: Iterable of command strings.
Returns:
None
"""
connection = get_connection(module)
connection.edit_config(commands)
| [
"wk@sydorenko.org.ua"
] | wk@sydorenko.org.ua |
e8495641c46462cc9829d844091a011b0a87c217 | 2ed86a79d0fcd299ad4a01310954c5eddcf01edf | /homeassistant/components/zwave_js/update.py | 70d12b22dedc1ecb6f6f210e8af6fc7989b19981 | [
"Apache-2.0"
] | permissive | konnected-io/home-assistant | 037f12c87bb79e19220192eb918e49db1b1a8b3e | 2e65b77b2b5c17919939481f327963abdfdc53f0 | refs/heads/dev | 2023-05-11T08:57:41.891518 | 2023-05-07T20:03:37 | 2023-05-07T20:03:37 | 109,931,626 | 24 | 10 | Apache-2.0 | 2023-02-22T06:24:01 | 2017-11-08T05:27:21 | Python | UTF-8 | Python | false | false | 13,810 | py | """Representation of Z-Wave updates."""
from __future__ import annotations
import asyncio
from collections import Counter
from collections.abc import Callable
from dataclasses import asdict, dataclass
from datetime import datetime, timedelta
from typing import Any, Final
from awesomeversion import AwesomeVersion
from zwave_js_server.client import Client as ZwaveClient
from zwave_js_server.const import NodeStatus
from zwave_js_server.exceptions import BaseZwaveJSServerError, FailedZWaveCommand
from zwave_js_server.model.driver import Driver
from zwave_js_server.model.node import Node as ZwaveNode
from zwave_js_server.model.node.firmware import (
NodeFirmwareUpdateInfo,
NodeFirmwareUpdateProgress,
NodeFirmwareUpdateResult,
)
from homeassistant.components.update import (
ATTR_LATEST_VERSION,
UpdateDeviceClass,
UpdateEntity,
UpdateEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import CoreState, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.restore_state import ExtraStoredData
from .const import API_KEY_FIRMWARE_UPDATE_SERVICE, DATA_CLIENT, DOMAIN, LOGGER
from .helpers import get_device_info, get_valueless_base_unique_id
PARALLEL_UPDATES = 1
UPDATE_DELAY_STRING = "delay"
UPDATE_DELAY_INTERVAL = 5 # In minutes
@dataclass
class ZWaveNodeFirmwareUpdateExtraStoredData(ExtraStoredData):
"""Extra stored data for Z-Wave node firmware update entity."""
latest_version_firmware: NodeFirmwareUpdateInfo | None
def as_dict(self) -> dict[str, Any]:
"""Return a dict representation of the extra data."""
return {
"latest_version_firmware": asdict(self.latest_version_firmware)
if self.latest_version_firmware
else None
}
@classmethod
def from_dict(cls, data: dict[str, Any]) -> ZWaveNodeFirmwareUpdateExtraStoredData:
"""Initialize the extra data from a dict."""
if not (firmware_dict := data["latest_version_firmware"]):
return cls(None)
return cls(NodeFirmwareUpdateInfo.from_dict(firmware_dict))
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Z-Wave update entity from config entry."""
client: ZwaveClient = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT]
cnt: Counter = Counter()
@callback
def async_add_firmware_update_entity(node: ZwaveNode) -> None:
"""Add firmware update entity."""
# We need to delay the first update of each entity to avoid flooding the network
# so we maintain a counter to schedule first update in UPDATE_DELAY_INTERVAL
# minute increments.
cnt[UPDATE_DELAY_STRING] += 1
delay = timedelta(minutes=(cnt[UPDATE_DELAY_STRING] * UPDATE_DELAY_INTERVAL))
driver = client.driver
assert driver is not None # Driver is ready before platforms are loaded.
async_add_entities([ZWaveNodeFirmwareUpdate(driver, node, delay)])
config_entry.async_on_unload(
async_dispatcher_connect(
hass,
f"{DOMAIN}_{config_entry.entry_id}_add_firmware_update_entity",
async_add_firmware_update_entity,
)
)
class ZWaveNodeFirmwareUpdate(UpdateEntity):
"""Representation of a firmware update entity."""
_attr_entity_category = EntityCategory.CONFIG
_attr_device_class = UpdateDeviceClass.FIRMWARE
_attr_supported_features = (
UpdateEntityFeature.INSTALL
| UpdateEntityFeature.RELEASE_NOTES
| UpdateEntityFeature.PROGRESS
)
_attr_has_entity_name = True
_attr_should_poll = False
def __init__(self, driver: Driver, node: ZwaveNode, delay: timedelta) -> None:
"""Initialize a Z-Wave device firmware update entity."""
self.driver = driver
self.node = node
self._latest_version_firmware: NodeFirmwareUpdateInfo | None = None
self._status_unsub: Callable[[], None] | None = None
self._poll_unsub: Callable[[], None] | None = None
self._progress_unsub: Callable[[], None] | None = None
self._finished_unsub: Callable[[], None] | None = None
self._finished_event = asyncio.Event()
self._result: NodeFirmwareUpdateResult | None = None
self._delay: Final[timedelta] = delay
# Entity class attributes
self._attr_name = "Firmware"
self._base_unique_id = get_valueless_base_unique_id(driver, node)
self._attr_unique_id = f"{self._base_unique_id}.firmware_update"
self._attr_installed_version = node.firmware_version
# device may not be precreated in main handler yet
self._attr_device_info = get_device_info(driver, node)
@property
def extra_restore_state_data(self) -> ZWaveNodeFirmwareUpdateExtraStoredData:
"""Return ZWave Node Firmware Update specific state data to be restored."""
return ZWaveNodeFirmwareUpdateExtraStoredData(self._latest_version_firmware)
@callback
def _update_on_status_change(self, _: dict[str, Any]) -> None:
"""Update the entity when node is awake."""
self._status_unsub = None
self.hass.async_create_task(self._async_update())
@callback
def _update_progress(self, event: dict[str, Any]) -> None:
"""Update install progress on event."""
progress: NodeFirmwareUpdateProgress = event["firmware_update_progress"]
if not self._latest_version_firmware:
return
self._attr_in_progress = int(progress.progress)
self.async_write_ha_state()
@callback
def _update_finished(self, event: dict[str, Any]) -> None:
"""Update install progress on event."""
result: NodeFirmwareUpdateResult = event["firmware_update_finished"]
self._result = result
self._finished_event.set()
@callback
def _unsub_firmware_events_and_reset_progress(
self, write_state: bool = True
) -> None:
"""Unsubscribe from firmware events and reset update install progress."""
if self._progress_unsub:
self._progress_unsub()
self._progress_unsub = None
if self._finished_unsub:
self._finished_unsub()
self._finished_unsub = None
self._result = None
self._finished_event.clear()
self._attr_in_progress = False
if write_state:
self.async_write_ha_state()
async def _async_update(self, _: HomeAssistant | datetime | None = None) -> None:
"""Update the entity."""
if self._poll_unsub:
self._poll_unsub()
self._poll_unsub = None
# If hass hasn't started yet, push the next update to the next day so that we
# can preserve the offsets we've created between each node
if self.hass.state != CoreState.running:
self._poll_unsub = async_call_later(
self.hass, timedelta(days=1), self._async_update
)
return
# If device is asleep/dead, wait for it to wake up/become alive before
# attempting an update
for status, event_name in (
(NodeStatus.ASLEEP, "wake up"),
(NodeStatus.DEAD, "alive"),
):
if self.node.status == status:
if not self._status_unsub:
self._status_unsub = self.node.once(
event_name, self._update_on_status_change
)
return
try:
available_firmware_updates = (
await self.driver.controller.async_get_available_firmware_updates(
self.node, API_KEY_FIRMWARE_UPDATE_SERVICE
)
)
except FailedZWaveCommand as err:
LOGGER.debug(
"Failed to get firmware updates for node %s: %s",
self.node.node_id,
err,
)
else:
# If we have an available firmware update that is a higher version than
# what's on the node, we should advertise it, otherwise the installed
# version is the latest.
if (
available_firmware_updates
and (
latest_firmware := max(
available_firmware_updates,
key=lambda x: AwesomeVersion(x.version),
)
)
and AwesomeVersion(latest_firmware.version)
> AwesomeVersion(self.node.firmware_version)
):
self._latest_version_firmware = latest_firmware
self._attr_latest_version = latest_firmware.version
self.async_write_ha_state()
elif self._attr_latest_version != self._attr_installed_version:
self._attr_latest_version = self._attr_installed_version
self.async_write_ha_state()
finally:
self._poll_unsub = async_call_later(
self.hass, timedelta(days=1), self._async_update
)
async def async_release_notes(self) -> str | None:
"""Get release notes."""
if self._latest_version_firmware is None:
return None
return self._latest_version_firmware.changelog
async def async_install(
self, version: str | None, backup: bool, **kwargs: Any
) -> None:
"""Install an update."""
firmware = self._latest_version_firmware
assert firmware
self._unsub_firmware_events_and_reset_progress(False)
self._attr_in_progress = True
self.async_write_ha_state()
self._progress_unsub = self.node.on(
"firmware update progress", self._update_progress
)
self._finished_unsub = self.node.on(
"firmware update finished", self._update_finished
)
try:
await self.driver.controller.async_firmware_update_ota(
self.node, firmware.files
)
except BaseZwaveJSServerError as err:
self._unsub_firmware_events_and_reset_progress()
raise HomeAssistantError(err) from err
# We need to block until we receive the `firmware update finished` event
await self._finished_event.wait()
assert self._result is not None
# If the update was not successful, we should throw an error
# to let the user know
if not self._result.success:
error_msg = self._result.status.name.replace("_", " ").title()
self._unsub_firmware_events_and_reset_progress()
raise HomeAssistantError(error_msg)
# If we get here, all files were installed successfully
self._attr_installed_version = self._attr_latest_version = firmware.version
self._latest_version_firmware = None
self._unsub_firmware_events_and_reset_progress()
async def async_poll_value(self, _: bool) -> None:
"""Poll a value."""
LOGGER.error(
"There is no value to refresh for this entity so the zwave_js.refresh_value"
" service won't work for it"
)
async def async_added_to_hass(self) -> None:
"""Call when entity is added."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self.unique_id}_poll_value",
self.async_poll_value,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self._base_unique_id}_remove_entity",
self.async_remove,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self._base_unique_id}_remove_entity_on_ready_node",
self.async_remove,
)
)
# If we have a complete previous state, use that to set the latest version
if (state := await self.async_get_last_state()) and (
extra_data := await self.async_get_last_extra_data()
):
self._attr_latest_version = state.attributes[ATTR_LATEST_VERSION]
self._latest_version_firmware = (
ZWaveNodeFirmwareUpdateExtraStoredData.from_dict(
extra_data.as_dict()
).latest_version_firmware
)
# If we have no state to restore, we can set the latest version to installed
# so that the entity starts as off. If we have partial restore data due to an
# upgrade to an HA version where this feature is released from one that is not
# the entity will start in an unknown state until we can correct on next update
elif not state:
self._attr_latest_version = self._attr_installed_version
# Spread updates out in 5 minute increments to avoid flooding the network
self.async_on_remove(
async_call_later(self.hass, self._delay, self._async_update)
)
async def async_will_remove_from_hass(self) -> None:
"""Call when entity will be removed."""
if self._status_unsub:
self._status_unsub()
self._status_unsub = None
if self._poll_unsub:
self._poll_unsub()
self._poll_unsub = None
self._unsub_firmware_events_and_reset_progress(False)
| [
"noreply@github.com"
] | konnected-io.noreply@github.com |
e4ebbe330afb1e6f903c4911cb59cd16a3843dbd | f62fd455e593a7ad203a5c268e23129473d968b6 | /senlin-3.0.1/senlin/tests/unit/policies/test_region_placement.py | 3939e9d93d1a7316471447103097329d9aaf624f | [
"Apache-2.0"
] | permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 16,780 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
from senlin.common import consts
from senlin.common import exception as exc
from senlin.common import scaleutils as su
from senlin.engine import cluster as cm
from senlin.objects import cluster as co
from senlin.objects import node as no
from senlin.policies import base as pb
from senlin.policies import region_placement as rp
from senlin.tests.unit.common import base
from senlin.tests.unit.common import utils
class TestRegionPlacementPolicy(base.SenlinTestCase):
def setUp(self):
super(TestRegionPlacementPolicy, self).setUp()
self.context = utils.dummy_context()
self.spec = {
'type': 'senlin.policy.region_placement',
'version': '1.0',
'properties': {
'regions': [
{'name': 'R1', 'weight': 100, 'cap': 50},
{'name': 'R2', 'weight': 50, 'cap': 50},
{'name': 'R3', 'weight': 30, 'cap': -1},
{'name': 'R4', 'weight': 20, 'cap': -1}
]
}
}
def test_policy_init(self):
policy = rp.RegionPlacementPolicy('test-policy', self.spec)
self.assertIsNone(policy.id)
self.assertIsNone(policy. _keystoneclient)
self.assertEqual('test-policy', policy.name)
self.assertEqual('senlin.policy.region_placement-1.0', policy.type)
expected = {
'R1': {
'weight': 100,
'cap': 50
},
'R2': {
'weight': 50,
'cap': 50,
},
'R3': {
'weight': 30,
'cap': -1,
},
'R4': {
'weight': 20,
'cap': -1,
}
}
self.assertEqual(expected, policy.regions)
@mock.patch.object(pb.Policy, 'validate')
def test_validate_okay(self, mock_base_validate):
policy = rp.RegionPlacementPolicy('test-policy', self.spec)
kc = mock.Mock()
kc.validate_regions.return_value = ['R1', 'R2', 'R3', 'R4']
policy._keystoneclient = kc
ctx = mock.Mock(user='U1', project='P1')
res = policy.validate(ctx, True)
self.assertTrue(res)
mock_base_validate.assert_called_once_with(ctx, True)
kc.validate_regions.assert_called_once_with(['R1', 'R2', 'R3', 'R4'])
@mock.patch.object(pb.Policy, 'validate')
def test_validate_no_validate_props(self, mock_base_validate):
policy = rp.RegionPlacementPolicy('test-policy', self.spec)
ctx = mock.Mock(user='U1', project='P1')
res = policy.validate(ctx, False)
self.assertTrue(res)
mock_base_validate.assert_called_once_with(ctx, False)
@mock.patch.object(pb.Policy, 'validate')
def test_validate_region_not_found(self, mock_base_validate):
policy = rp.RegionPlacementPolicy('test-policy', self.spec)
kc = mock.Mock()
kc.validate_regions.return_value = ['R2', 'R4']
policy._keystoneclient = kc
ctx = mock.Mock(user='U1', project='P1')
ex = self.assertRaises(exc.InvalidSpec,
policy.validate,
ctx, True)
mock_base_validate.assert_called_once_with(ctx, True)
kc.validate_regions.assert_called_once_with(['R1', 'R2', 'R3', 'R4'])
self.assertEqual("The specified regions '['R1', 'R3']' could not "
"be found.", six.text_type(ex))
def test__create_plan(self):
policy = rp.RegionPlacementPolicy('p1', self.spec)
regions = policy.regions
current = {'R1': 2, 'R2': 2, 'R3': 2, 'R4': 1}
result = policy._create_plan(current, regions, 5, True)
expected = {'R1': 4, 'R2': 1}
self.assertEqual(expected, result)
current = {'R1': 2, 'R2': 2, 'R3': 0, 'R4': 1}
plan = policy._create_plan(current, regions, 5, True)
answer = {'R1': 3, 'R2': 1, 'R3': 1}
self.assertEqual(answer, plan)
current = {'R1': 2, 'R2': 2, 'R3': 0, 'R4': 1}
plan = policy._create_plan(current, regions, 3, False)
answer = {'R2': 2, 'R4': 1}
self.assertEqual(answer, plan)
current = {'R1': 4, 'R2': 2, 'R3': 1, 'R4': 1}
plan = policy._create_plan(current, regions, 3, False)
answer = {'R2': 1, 'R3': 1, 'R4': 1}
self.assertEqual(answer, plan)
def test__get_count_node_create_no_region(self):
x_profile = mock.Mock(CONTEXT='context', properties={'context': {}})
x_node = mock.Mock(rt={'profile': x_profile})
action = mock.Mock(action=consts.NODE_CREATE, node=x_node)
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(1, res)
def test__get_count_node_create_region_specified(self):
x_profile = mock.Mock(CONTEXT='context',
properties={'context': {'region_name': 'foo'}})
x_node = mock.Mock(rt={'profile': x_profile})
action = mock.Mock(action=consts.NODE_CREATE, node=x_node)
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(0, res)
def test__get_count_resize_deletion(self):
action = mock.Mock(action=consts.CLUSTER_RESIZE,
data={'deletion': {'count': 3}})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(-3, res)
def test__get_count_resize_creation(self):
action = mock.Mock(action=consts.CLUSTER_RESIZE,
data={'creation': {'count': 3}})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(3, res)
@mock.patch.object(no.Node, 'count_by_cluster')
@mock.patch.object(su, 'parse_resize_params')
@mock.patch.object(co.Cluster, 'get')
def test__get_count_resize_parse_error(self, mock_cluster, mock_parse,
mock_count):
x_cluster = mock.Mock()
mock_cluster.return_value = x_cluster
mock_count.return_value = 10
mock_parse.return_value = (pb.CHECK_ERROR, 'Something wrong.')
action = mock.Mock(action=consts.CLUSTER_RESIZE, data={})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(0, res)
self.assertEqual(pb.CHECK_ERROR, action.data['status'])
mock_count.assert_called_once_with(action.context, 'FOO')
mock_parse.assert_called_once_with(action, x_cluster, 10)
self.assertEqual('Something wrong.', action.data['reason'])
@mock.patch.object(no.Node, 'count_by_cluster')
@mock.patch.object(su, 'parse_resize_params')
@mock.patch.object(co.Cluster, 'get')
def test__get_count_resize_parse_creation(self, mock_cluster, mock_parse,
mock_count):
def fake_parse(action, cluster, current):
action.data = {'creation': {'count': 3}}
return pb.CHECK_OK, ''
x_cluster = mock.Mock()
mock_cluster.return_value = x_cluster
mock_count.return_value = 0
mock_parse.side_effect = fake_parse
action = mock.Mock(action=consts.CLUSTER_RESIZE, data={})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(3, res)
mock_count.assert_called_once_with(action.context, 'FOO')
mock_parse.assert_called_once_with(action, x_cluster, 0)
mock_cluster.assert_called_once_with(action.context, 'FOO')
@mock.patch.object(no.Node, 'count_by_cluster')
@mock.patch.object(su, 'parse_resize_params')
@mock.patch.object(co.Cluster, 'get')
def test__get_count_resize_parse_deletion(self, mock_cluster, mock_parse,
mock_count):
def fake_parse(action, cluster, current):
action.data = {'deletion': {'count': 3}}
return pb.CHECK_OK, ''
x_cluster = mock.Mock()
mock_cluster.return_value = x_cluster
mock_count.return_value = 6
mock_parse.side_effect = fake_parse
action = mock.Mock(action=consts.CLUSTER_RESIZE, data={})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(-3, res)
mock_count.assert_called_once_with(action.context, 'FOO')
mock_parse.assert_called_once_with(action, x_cluster, 6)
mock_cluster.assert_called_once_with(action.context, 'FOO')
def test__get_count_scale_in_with_data(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_IN,
data={'deletion': {'count': 3}})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(-3, res)
def test__get_count_scale_in_with_no_data(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_IN,
data={'deletion': {'num': 3}})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(-1, res)
def test__get_count_scale_in_with_inputs(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_IN, data={},
inputs={'count': 3})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(-3, res)
def test__get_count_scale_in_with_incorrect_inputs(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_IN, data={},
inputs={'num': 3})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(-1, res)
def test__get_count_scale_out_with_data(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_OUT,
data={'creation': {'count': 3}})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(3, res)
def test__get_count_scale_out_with_no_data(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_OUT,
data={'creation': {'num': 3}})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(1, res)
def test__get_count_scale_out_with_inputs(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_OUT, data={},
inputs={'count': 3})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(3, res)
def test__get_count_scale_out_with_incorrect_inputs(self):
action = mock.Mock(action=consts.CLUSTER_SCALE_OUT, data={},
inputs={'num': 3})
policy = rp.RegionPlacementPolicy('p1', self.spec)
res = policy._get_count('FOO', action)
self.assertEqual(1, res)
@mock.patch.object(cm.Cluster, 'load')
def test_pre_op(self, mock_load):
# test pre_op method whether returns the correct action.data
policy = rp.RegionPlacementPolicy('p1', self.spec)
regions = policy.regions
kc = mock.Mock()
kc.validate_regions.return_value = regions.keys()
policy._keystoneclient = kc
plan = {'R1': 1, 'R3': 2}
self.patchobject(policy, '_create_plan', return_value=plan)
action = mock.Mock()
action.context = self.context
action.action = 'CLUSTER_SCALE_OUT'
action.inputs = {}
action.data = {
'creation': {
'count': 3,
}
}
cluster = mock.Mock()
current_dist = {'R1': 0, 'R2': 0, 'R3': 0, 'R4': 0}
cluster.get_region_distribution.return_value = current_dist
mock_load.return_value = cluster
res = policy.pre_op('FAKE_CLUSTER', action)
self.assertIsNone(res)
self.assertEqual(3, action.data['creation']['count'])
dist = action.data['creation']['regions']
self.assertEqual(2, len(dist))
self.assertEqual(1, dist['R1'])
self.assertEqual(2, dist['R3'])
mock_load.assert_called_once_with(action.context, 'FAKE_CLUSTER')
kc.validate_regions.assert_called_once_with(regions.keys())
cluster.get_region_distribution.assert_called_once_with(regions.keys())
policy._create_plan.assert_called_once_with(
current_dist, regions, 3, True)
@mock.patch.object(cm.Cluster, 'load')
def test_pre_op_count_from_inputs(self, mock_load):
# test pre_op method whether returns the correct action.data
policy = rp.RegionPlacementPolicy('p1', self.spec)
regions = policy.regions
kc = mock.Mock()
kc.validate_regions.return_value = regions.keys()
policy._keystoneclient = kc
cluster = mock.Mock()
current_dist = {'R1': 0, 'R2': 0, 'R3': 0, 'R4': 0}
cluster.get_region_distribution.return_value = current_dist
mock_load.return_value = cluster
plan = {'R1': 1, 'R3': 2}
self.patchobject(policy, '_create_plan', return_value=plan)
action = mock.Mock()
action.context = self.context
action.action = 'CLUSTER_SCALE_OUT'
action.inputs = {'count': 3}
action.data = {}
res = policy.pre_op('FAKE_CLUSTER', action)
self.assertIsNone(res)
self.assertEqual(3, action.data['creation']['count'])
dist = action.data['creation']['regions']
self.assertEqual(2, len(dist))
self.assertEqual(1, dist['R1'])
self.assertEqual(2, dist['R3'])
@mock.patch.object(cm.Cluster, 'load')
def test_pre_op_no_regions(self, mock_load):
# test pre_op method whether returns the correct action.data
policy = rp.RegionPlacementPolicy('p1', self.spec)
kc = mock.Mock()
kc.validate_regions.return_value = []
policy._keystoneclient = kc
action = mock.Mock()
action.action = 'CLUSTER_SCALE_OUT'
action.context = self.context
action.data = {'creation': {'count': 3}}
cluster = mock.Mock()
mock_load.return_value = cluster
res = policy.pre_op('FAKE_CLUSTER', action)
self.assertIsNone(res)
self.assertEqual('ERROR', action.data['status'])
self.assertEqual('No region is found usable.', action.data['reason'])
@mock.patch.object(cm.Cluster, 'load')
def test_pre_op_no_feasible_plan(self, mock_load):
# test pre_op method whether returns the correct action.data
policy = rp.RegionPlacementPolicy('p1', self.spec)
regions = policy.regions
kc = mock.Mock()
kc.validate_regions.return_value = regions.keys()
policy._keystoneclient = kc
self.patchobject(policy, '_create_plan', return_value=None)
action = mock.Mock()
action.action = 'CLUSTER_SCALE_OUT'
action.context = self.context
action.inputs = {}
action.data = {'creation': {'count': 3}}
cluster = mock.Mock()
current_dist = {'R1': 0, 'R2': 0, 'R3': 0, 'R4': 0}
cluster.get_region_distribution.return_value = current_dist
mock_load.return_value = cluster
res = policy.pre_op('FAKE_CLUSTER', action)
self.assertIsNone(res)
self.assertEqual('ERROR', action.data['status'])
self.assertEqual('There is no feasible plan to handle all nodes.',
action.data['reason'])
mock_load.assert_called_once_with(action.context, 'FAKE_CLUSTER')
kc.validate_regions.assert_called_once_with(regions.keys())
cluster.get_region_distribution.assert_called_once_with(regions.keys())
policy._create_plan.assert_called_once_with(
current_dist, regions, 3, True)
| [
"gongwayne@hotmail.com"
] | gongwayne@hotmail.com |
e3a6d3a7c141c5ff1c7cc9d862462fb3c5c9136b | c8a6246f1695521c9acb0eb1ba8552c7f1917ce7 | /provider/manga.py | 84d1514624cb497c704deed06f0e64e1137d7b1a | [
"Apache-2.0",
"MIT",
"CC-BY-4.0"
] | permissive | wafle/ebedke | 5a391c78e94f056a7aa8f7dda40a5a1bbad3c96b | 94d6a3431b674aafc00d88826307dcb74bde3943 | refs/heads/master | 2020-04-06T11:17:44.297396 | 2019-01-27T21:08:42 | 2019-01-27T21:08:42 | 157,411,406 | 0 | 0 | NOASSERTION | 2018-11-13T16:31:05 | 2018-11-13T16:31:05 | null | UTF-8 | Python | false | false | 644 | py | from datetime import datetime as dt, timedelta
from provider.utils import get_dom, on_workdays
URL = "http://mangacowboy.hu/"
@on_workdays
def getMenu(today):
dom = get_dom(URL)
date = today.strftime("%Y. %m. %d.")
menu = dom.xpath(f'//section[@id="weekly_menu"]/ul/li[.//time[contains(text(), "{ date }")]]'
'//div[@class="weeklyMenuPreview-content"]')
if menu:
menu = list(menu[0].xpath("./p/text()"))
else:
menu = []
return menu
menu = {
'name': 'Manga',
'id': 'mc',
'url': URL,
'get': getMenu,
'ttl': timedelta(hours=23),
'cards': ['szep', 'erzs']
}
| [
"ijanos@gmail.com"
] | ijanos@gmail.com |
a55e1d3310d8edf5e2ef9a5d7119e535b1785777 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_bookstores.py | c045d55bc6dc153405204c67df564094fb1eb46b | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py |
#calss header
class _BOOKSTORES():
def __init__(self,):
self.name = "BOOKSTORES"
self.definitions = bookstore
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['bookstore']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
9a1ab659558877fc943ccdb9a204d0b3ea6f68aa | 683417cb26b5a4d5b17b437eb49aa4fdbc77de72 | /src/nameless/cli.py | 591c249cbfaa60b8431e5246476eba4deed5a2fd | [
"BSD-2-Clause"
] | permissive | admdev8/python-nameless | 1c36286b0a2f308c4fdc43e05abf5ec403f91c97 | 838e1f0d90c9a33abba820ffd05beb39b2ef6763 | refs/heads/master | 2023-04-15T18:19:59.065808 | 2020-08-06T10:57:15 | 2020-08-06T10:57:15 | 292,904,530 | 0 | 0 | BSD-2-Clause | 2023-04-04T01:54:02 | 2020-09-04T17:10:41 | null | UTF-8 | Python | false | false | 917 | py | """
Module that contains the command line app.
Why does this file exist, and why not put this in __main__?
You might be tempted to import things from __main__ later, but that will cause
problems: the code will get executed twice:
- When you run `python -mnameless` python will execute
``__main__.py`` as a script. That means there won't be any
``nameless.__main__`` in ``sys.modules``.
- When you import __main__ it will get executed again (as a module) because
there's no ``nameless.__main__`` in ``sys.modules``.
Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration
"""
import argparse
parser = argparse.ArgumentParser(description='Command description.')
parser.add_argument('names', metavar='NAME', nargs=argparse.ZERO_OR_MORE,
help="A name of something.")
def main(args=None):
args = parser.parse_args(args=args)
print(args.names)
| [
"contact@ionelmc.ro"
] | contact@ionelmc.ro |
ce1fbf14e7b412cb90f2965c86787eac5d2a1735 | 4b37380d52f64d121d4401b2175ed7c8ad0f9fb1 | /BertForMultipleChoice/predict.py | 98665484c548dced72dfba88cd1013d790f35ddc | [] | no_license | DMStudent/Nlp | 09e41b87cd57dbb3a6b3b1b4a4fc211440769c9f | 4ff21634b6bdf238e54568828b4a85f5189d6997 | refs/heads/master | 2020-06-10T17:07:05.591023 | 2019-06-25T11:37:13 | 2019-06-25T11:37:13 | 193,686,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,412 | py | # -*- coding: utf-8 -*-
# File : predict.py
# Author: wangyuan
# mail: wyxidian@gmail.com
# Created Time: 2019/1/29
#!/bin/bash
"""BERT finetuning runner."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import sys
import csv
import modeling
import optimization
import tokenization
import tensorflow as tf
import numpy as np
flags = tf.flags
FLAGS = flags.FLAGS
## Required parameters
flags.DEFINE_string(
"bert_config_file", "/search/wangyuan/bert/chinese_L-12_H-768_A-12/bert_config.json",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string("vocab_file", "/search/wangyuan/bert/chinese_L-12_H-768_A-12/vocab.txt",
"The vocabulary file that the BERT model was trained on.")
## Other parameters
flags.DEFINE_string(
"init_checkpoint", "/search/wangyuan/BertForMultipleChoice/output/model.ckpt-115000",
"Initial checkpoint (usually from a pre-trained BERT model).")
flags.DEFINE_bool(
"do_lower_case", True,
"Whether to lower case the input text. Should be True for uncased "
"models and False for cased models.")
flags.DEFINE_integer(
"max_seq_length", 30,
"The maximum total input sequence length after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded.")
flags.DEFINE_bool("do_train", False, "Whether to run training.")
flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")
flags.DEFINE_bool(
"do_predict", True,
"Whether to run the model in inference mode on the test set.")
flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.")
flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.")
flags.DEFINE_integer("predict_batch_size", 2000, "Total batch size for predict.")
flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.")
flags.DEFINE_float("num_train_epochs", 3.0,
"Total number of training epochs to perform.")
flags.DEFINE_float(
"warmup_proportion", 0.1,
"Proportion of training to perform linear learning rate warmup for. "
"E.g., 0.1 = 10% of training.")
flags.DEFINE_integer("save_checkpoints_steps", 1000,
"How often to save the model checkpoint.")
flags.DEFINE_integer("iterations_per_loop", 1000,
"How many steps to make in each estimator call.")
flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.")
tf.flags.DEFINE_string(
"tpu_name", None,
"The Cloud TPU to use for training. This should be either the name "
"used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 "
"url.")
tf.flags.DEFINE_string(
"tpu_zone", None,
"[Optional] GCE zone where the Cloud TPU is located in. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string(
"gcp_project", None,
"[Optional] Project name for the Cloud TPU-enabled project. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.")
class SentenceExample(object):
"""A single training/test example for the Sentence dataset."""
def __init__(self,
swag_id,
context_sentence,
ending_0,
ending_1,
label = None):
self.swag_id = swag_id
self.context_sentence = context_sentence
self.endings = [
ending_0,
ending_1,
]
self.label = label
def __str__(self):
return self.__repr__()
def __repr__(self):
l = [
"swag_id: {}".format(self.swag_id),
"context_sentence: {}".format(self.context_sentence),
"ending_0: {}".format(self.endings[0]),
"ending_1: {}".format(self.endings[1]),
]
if self.label is not None:
l.append("label: {}".format(self.label))
return ", ".join(l)
class PaddingInputExample(object):
"""Fake example so the num input examples is a multiple of the batch size.
When running eval/predict on the TPU, we need to pad the number of examples
to be a multiple of the batch size, because the TPU requires a fixed batch
size. The alternative is to drop the last batch, which is bad because it means
the entire output data won't be generated.
We use this class instead of `None` because treating `None` as padding
battches could cause silent errors.
"""
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self,
input_ids,
input_mask,
segment_ids,
label_id,
is_real_example=True):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.label_id = label_id
self.is_real_example = is_real_example
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_test_examples(self, data_dir):
"""Gets a collection of `InputExample`s for prediction."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
class MultipleChoiceProcessor(DataProcessor):
"""Processor for the Incest data set (GLUE version)."""
def __init__(self):
self.labels = ["0", "1"]
def get_train_examples(self, data_dir):
"""See base class."""
# logger.info("LOOKING AT {}".format(os.path.join(data_dir, "train.tsv")))
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.csv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.csv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.csv")), "test")
def get_labels(self):
"""See base class."""
return self.labels
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
examples.append(
SentenceExample(
swag_id=guid,
context_sentence=line[0],
ending_0=line[1],
ending_1=line[2],
label=int(line[3])))
return examples
def convert_single_example(ex_index, example, label_list, max_seq_length,
tokenizer):
"""Converts a single `InputExample` into a single `InputFeatures`."""
context_tokens = tokenizer.tokenize(example.context_sentence)
features = []
for ending_index, ending in enumerate(example.endings):
# We create a copy of the context tokens in order to be
# able to shrink it according to ending_tokens
context_tokens_choice = context_tokens[:]
ending_tokens = tokenizer.tokenize(ending)
_truncate_seq_pair(context_tokens_choice, ending_tokens, max_seq_length - 3)
tokens = ["[CLS]"] + context_tokens_choice + ["[SEP]"] + ending_tokens + ["[SEP]"]
segment_ids = [0] * (len(context_tokens_choice) + 2) + [1] * (len(ending_tokens) + 1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
padding = [0] * (max_seq_length - len(input_ids))
input_ids += padding
input_mask += padding
segment_ids += padding
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
def create_int_feature(values):
f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
return f
feature = InputFeatures(
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
label_id=example.label,
is_real_example=True)
features.append(feature)
return features
def _truncate_seq_pair(tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal percent
# of tokens from each, since if one sequence is very short then each token
# that's truncated likely contains more information than a longer sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def create_model(bert_config, is_training, input_ids, input_mask, segment_ids,
labels, num_labels, use_one_hot_embeddings):
"""Creates a classification model."""
model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
# In the demo, we are doing a simple classification task on the entire
# segment.
#
# If you want to use the token-level output, use model.get_sequence_output()
# instead.
output_layer = model.get_pooled_output()
hidden_size = output_layer.shape[-1].value
output_weights = tf.get_variable(
"output_weights", [1, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"output_bias", [1], initializer=tf.zeros_initializer())
with tf.variable_scope("loss"):
if is_training:
# I.e., 0.1 dropout
output_layer = tf.nn.dropout(output_layer, keep_prob=0.9)
logits = tf.matmul(output_layer, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
reshaped_logits = tf.reshape(logits, [-1, num_labels])
labels = tf.reshape(labels, [-1, num_labels])
mean_labels = tf.reduce_mean(labels, axis=1)
# probabilities = tf.nn.softmax(reshaped_logits, axis=-1)
# log_probs = tf.log(probabilities)
probabilities = tf.math.sigmoid(reshaped_logits)
# loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=reshaped_logits)
# loss = tf.reduce_mean(loss)
log_probs = tf.nn.log_softmax(reshaped_logits, axis=-1)
one_hot_labels = tf.one_hot(mean_labels, depth=num_labels, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (per_example_loss, input_ids, one_hot_labels, log_probs, loss, mean_labels, logits, probabilities)
def model_fn_builder(bert_config, num_labels, init_checkpoint, learning_rate,
num_train_steps, num_warmup_steps, use_tpu,
use_one_hot_embeddings):
"""Returns `model_fn` closure for TPUEstimator."""
def model_fn(features, labels, mode, params): # pylint: disable=unused-argument
"""The `model_fn` for TPUEstimator."""
tf.logging.info("*** Features ***")
for name in sorted(features.keys()):
tf.logging.info(" name = %s, shape = %s" % (name, features[name].shape))
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
label_ids = features["label_ids"]
is_real_example = None
if "is_real_example" in features:
is_real_example = tf.cast(features["is_real_example"], dtype=tf.float32)
else:
is_real_example = tf.ones(tf.shape(label_ids), dtype=tf.float32)
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
(per_example_loss, input_ids, one_hot_labels, log_probs, total_loss, labels, logits, probabilities) = create_model(
bert_config, is_training, input_ids, input_mask, segment_ids, label_ids,
num_labels, use_one_hot_embeddings)
tvars = tf.trainable_variables()
initialized_variable_names = {}
scaffold_fn = None
if init_checkpoint:
(assignment_map, initialized_variable_names
) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
if use_tpu:
def tpu_scaffold():
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
tf.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape,
init_string)
output_spec = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = optimization.create_optimizer(
total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu)
logging_hook = tf.train.LoggingTensorHook({"loss": total_loss}, every_n_iter=10)
output_spec = tf.estimator.EstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op,
training_hooks=[logging_hook])
elif mode == tf.estimator.ModeKeys.EVAL:
def metric_fn(per_example_loss, label_ids, logits, is_real_example):
predictions = tf.argmax(logits, axis=-1, output_type=tf.int32)
accuracy = tf.metrics.accuracy(
labels=label_ids, predictions=predictions, weights=is_real_example)
loss = tf.metrics.mean(values=per_example_loss, weights=is_real_example)
return {
"eval_accuracy": accuracy,
"eval_loss": loss,
}
eval_metrics = (metric_fn,
[per_example_loss, label_ids, logits, is_real_example])
output_spec = tf.estimator.EstimatorSpec(
mode=mode,
loss=total_loss,
eval_metric_ops=eval_metrics)
else:
output_spec = tf.estimator.EstimatorSpec(
mode=mode,
predictions={"probabilities": probabilities})
return output_spec
return model_fn
# This function is not used by this file but is still used by the Colab and
# people who depend on it.
def input_fn_builder(features, batch_size, seq_length, is_training, drop_remainder):
"""Creates an `input_fn` closure to be passed to TPUEstimator."""
all_input_ids = []
all_input_mask = []
all_segment_ids = []
all_label_ids = []
for feature in features:
all_input_ids.append(feature.input_ids)
all_input_mask.append(feature.input_mask)
all_segment_ids.append(feature.segment_ids)
all_label_ids.append(feature.label_id)
def input_fn(params):
"""The actual input function."""
num_examples = len(features)
# This is for demo purposes and does NOT scale to large data sets. We do
# not use Dataset.from_generator() because that uses tf.py_func which is
# not TPU compatible. The right way to load data is with TFRecordReader.
d = tf.data.Dataset.from_tensor_slices({
"input_ids":
tf.constant(
all_input_ids, shape=[num_examples, seq_length],
dtype=tf.int32),
"input_mask":
tf.constant(
all_input_mask,
shape=[num_examples, seq_length],
dtype=tf.int32),
"segment_ids":
tf.constant(
all_segment_ids,
shape=[num_examples, seq_length],
dtype=tf.int32),
"label_ids":
tf.constant(all_label_ids, shape=[num_examples], dtype=tf.int32),
})
d = d.batch(batch_size=batch_size, drop_remainder=drop_remainder)
return d
return input_fn
# This function is not used by this file but is still used by the Colab and
# people who depend on it.
def convert_examples_to_features(examples, label_list, max_seq_length,
tokenizer):
"""Convert a set of `InputExample`s to a list of `InputFeatures`."""
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
tf.logging.info("Reading example %d of %d" % (ex_index, len(examples)))
feature = convert_single_example(ex_index, example, label_list,
max_seq_length, tokenizer)
features.extend(feature)
return features
def main(_):
tf.logging.set_verbosity(tf.logging.FATAL)
tokenization.validate_case_matches_checkpoint(FLAGS.do_lower_case, FLAGS.init_checkpoint)
bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
processor = MultipleChoiceProcessor()
label_list = processor.get_labels()
tokenizer = tokenization.FullTokenizer(
vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case)
session_config = tf.ConfigProto(device_count={"CPU": 8}, inter_op_parallelism_threads=8,
intra_op_parallelism_threads=8)
run_config = tf.estimator.RunConfig(
model_dir=FLAGS.output_dir,
save_checkpoints_steps=FLAGS.save_checkpoints_steps,
keep_checkpoint_max=FLAGS.keep_checkpoint_max)
model_fn = model_fn_builder(
bert_config=bert_config,
num_labels=len(label_list),
init_checkpoint=FLAGS.init_checkpoint,
learning_rate=FLAGS.learning_rate,
num_train_steps=None,
num_warmup_steps=None,
use_tpu=FLAGS.use_tpu,
use_one_hot_embeddings=FLAGS.use_tpu)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPU.
estimator = tf.estimator.Estimator(
model_fn=model_fn,
config=run_config)
predict_examples = []
idx = 0
texts = []
for line in sys.stdin:
line_lst = line.split("\t")
guid = "%s-%s" % ("test", idx)
predict_examples.append(SentenceExample(
swag_id=guid,
context_sentence=line_lst[0],
ending_0=line_lst[1],
ending_1=line_lst[2],
label=0))
idx += 1
if idx%FLAGS.predict_batch_size == 0:
num_actual_predict_examples = len(predict_examples)
features = convert_examples_to_features(predict_examples, label_list, FLAGS.max_seq_length, tokenizer)
predict_input_fn = input_fn_builder(
features=features,
batch_size=FLAGS.predict_batch_size,
seq_length=FLAGS.max_seq_length,
is_training=True,
drop_remainder=True)
result = estimator.predict(input_fn=predict_input_fn)
for (i, prediction) in enumerate(result):
probabilities = prediction["probabilities"]
for (j, prediction) in enumerate(probabilities):
output_line = predict_examples[i].context_sentence + "\t" \
+ predict_examples[i].endings[j] + "\t" \
+ str(prediction)
print(output_line)
predict_examples = []
texts = []
num_actual_predict_examples = len(predict_examples)
if num_actual_predict_examples<1:
return
features = convert_examples_to_features(predict_examples, label_list, FLAGS.max_seq_length, tokenizer)
predict_input_fn = input_fn_builder(
features=features,
batch_size=num_actual_predict_examples*2,
seq_length=FLAGS.max_seq_length,
is_training=True,
drop_remainder=True)
result = estimator.predict(input_fn=predict_input_fn)
for (i, prediction) in enumerate(result):
probabilities = prediction["probabilities"]
for (j, prediction) in enumerate(probabilities):
output_line = predict_examples[i].context_sentence + "\t" \
+ predict_examples[i].endings[j] + "\t" \
+ str(prediction)
print(output_line)
if __name__ == "__main__":
tf.app.run()
| [
"wyxidian@gmail.com"
] | wyxidian@gmail.com |
aa3a9c4250b0fe2f225eebda9b637ae200a75b8d | 707afa88f15e7bbf7d0d1c89b5076a9d6d17547e | /Python-Selenium unittest/VisionImpairedTC_BigType.py | bb3350c01d25f5f84e9b5988a730a28dc1acbc58 | [] | no_license | Longmann94/DFI_testing | 769f7211313b6303271b556b97e1038992d927a9 | a42430a25bdea98c5551f51003078a547335f7c6 | refs/heads/master | 2020-03-15T19:11:51.965865 | 2018-06-07T15:40:27 | 2018-06-07T15:40:27 | 132,302,477 | 3 | 0 | null | 2018-05-27T15:04:44 | 2018-05-06T03:11:25 | Python | UTF-8 | Python | false | false | 11,466 | py | import unittest
import time
import HtmlTestRunner
import os
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
cwd = os.getcwd()+'/results'
print("Please do not use the computer while the tests are running to ensure correct results")
print("Your test results will be created inside: " + cwd)
print("This test will also create screenshots of each test ran inside the current folder: " + os.getcwd())
url = input("Please enter website version to test when you are ready: ")
class Test_setUp_menu_buttons(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.maximize_window()
def tearDown(self):
self.driver.close()
#@unittest.skip("skip")
def test_TC_BigType_01_iconClick(self):
driver = self.driver
driver.get(url)
time.sleep(2)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//img[@alt='I need Bigger Type']").click()
time.sleep(2)
self.assertTrue(driver.find_element_by_id("bigtype").is_selected())
driver.save_screenshot('SC_TC_BigType_01_iconClick.png')
#@unittest.skip("skip")
def test_TC_BigType_01_checkboxClick(self):
driver = self.driver
driver.get(url)
time.sleep(2)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label/span").click()
time.sleep(2)
self.assertTrue(driver.find_element_by_id("bigtype").is_selected())
driver.save_screenshot('SC_TC_BigType_01_checkboxClick.png')
#@unittest.skip("skip")
def test_TC_BigType_01_textClick(self):
driver = self.driver
driver.get(url)
time.sleep(2)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label").click()
time.sleep(2)
self.assertTrue(driver.find_element_by_id("bigtype").is_selected())
driver.save_screenshot('SC_TC_BigType_01_textClick.png')
#@unittest.skip("skip")
def test_TC_BigType_01_boxAreaClick(self):
driver = self.driver
driver.get(url)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li").click()
time.sleep(2)
self.assertTrue(driver.find_element_by_id("bigtype").is_selected())
driver.save_screenshot('SC_TC_BigType_01_boxAreaClick.png')
#@unittest.skip("skip")
def test_TC_BigType_02a(self):
driver = self.driver
driver.get(url)
time.sleep(2)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label/span").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label").click()
driver.find_element_by_xpath("//img[@alt='I need Bigger Type']").click()
time.sleep(2)
self.assertFalse(driver.find_element_by_id("bigtype").is_selected())
driver.save_screenshot('SC_TC_BigType_02a_unselect.png')
#@unittest.skip("skip")
def test_TC_BigType_02b(self):
driver = self.driver
driver.get(url)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label/span").click()
driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label").click()
driver.find_element_by_xpath("//img[@alt='I need Bigger Type']").click()
time.sleep(2)
self.assertTrue(driver.find_element_by_id("bigtype").is_selected())
driver.save_screenshot('SC_TC_BigType_02b_select.png')
#@unittest.skip("skip")
def test_TC_BigType_03(self):
driver = self.driver
driver.get(url)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
button = driver.find_element_by_xpath("//img[@alt='I need Bigger Type']")
ActionChains(driver).move_to_element(button).click_and_hold(button).pause(6).release().perform()
time.sleep(2)
self.assertTrue(driver.find_element_by_id("bigtype").is_selected())
# assertEqual()
driver.save_screenshot('SC_TC_BigType_03.png')
#@unittest.skip("skip still working on it, drag mouse over button")
def test_TC_BigType_04(self):
driver = self.driver
driver.get(url)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//img[@alt='I need Bigger Type']")
button = driver.find_element_by_xpath("//div[@id='setup1']/ul/div[2]/li/label")
ActionChains(driver).drag_and_drop_by_offset(button, 30, 10).perform()
time.sleep(2)
self.assertTrue(driver.find_element_by_id("bigtype").is_selected())
# assertEqual()
driver.save_screenshot('SC_TC_BigType_04.png')
#@unittest.skip("skip")
def test_TC_BigType_05(self):
driver = self.driver
driver.get(url)
time.sleep(2)
driver.find_element_by_xpath("//div[@id='firsttime']/div[2]/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='wrapper']/div[8]/div/button/div[2]").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select").click()
Select(driver.find_element_by_xpath("//div[@id='setupContainer']/div[2]/select")).select_by_visible_text("Vision Impaired")
driver.find_element_by_xpath("//img[@alt='setup icon']").click()
driver.find_element_by_xpath("//img[@alt='Seeing clearly']").click()
driver.find_element_by_xpath("//img[@alt='I need Bigger Type']").click()
driver.find_element_by_xpath("//div[@id='setupContainer']/div/button/div[2]").click()
time.sleep(2)
driver.find_element_by_id("inputname").click()
driver.find_element_by_id("inputname").clear()
driver.find_element_by_id("inputname").send_keys("TEST")
driver.find_element_by_id("inputsub").click()
driver.find_element_by_id("inputsub").clear()
driver.find_element_by_id("inputsub").send_keys("Mornington")
driver.find_element_by_id("inputsub").send_keys(Keys.ENTER)
time.sleep(2)
# assertEqual()
driver.save_screenshot('SC_TC_BigType_05.png')
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testDFI_Search']
unittest.main(testRunner=HtmlTestRunner.HTMLTestRunner(output=cwd))
| [
"noreply@github.com"
] | Longmann94.noreply@github.com |
8d9ac36f24336db0e63b921c37369c5a633ef374 | 31e53fb09646ad38a780abd94ecdf2ac95f42a2c | /mietrechtspraxis/mietrechtspraxis/doctype/mietrechtspraxis_api/mietrechtspraxis_api.py | 6cc2b1b7829925fb9cd5ca5b38d7a1291038dcd6 | [
"MIT"
] | permissive | alkuhlani/mietrechtspraxis | 9521a1bee47b7195bc0e2bdc45969e71af220cbe | d46b404af4833c374fc1a66878f84d18a4842a08 | refs/heads/master | 2023-06-29T22:14:42.230038 | 2021-08-03T14:01:11 | 2021-08-03T14:01:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,256 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2021, libracore AG and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
import requests
import json
import hashlib
from cryptography.fernet import Fernet, InvalidToken
from frappe.utils import cstr, encode
from mietrechtspraxis.mietrechtspraxis.doctype.mp_abo.mp_abo import create_random_pw
class mietrechtspraxisAPI(Document):
pass
def _request(**kwargs):
'''
call on [IP]/api/method/mietrechtspraxis.api.request
Mandatory Parameter:
- token
- method
- username
'''
# check that token is present
try:
token = kwargs['token']
except:
# 400 Bad Request (Missing Token)
return raise_4xx(400, 'Bad Request', 'Token Required')
# check that token is correct
if not token == frappe.db.get_single_value('mietrechtspraxis API', 'token'):
# 401 Unauthorized (Invalid Token)
return raise_4xx(401, 'Unauthorized', 'Invalid Token')
# route to method
try:
method = kwargs['method']
except:
# 400 Bad Request (Missing Method)
return raise_4xx(400, 'Bad Request', 'Missing Method')
if method == 'login':
return check_login(**kwargs)
elif method == 'update':
return update(**kwargs)
elif method == 'reset':
return pw_reset(**kwargs)
elif method == 'reset_mail':
return pw_reset_mail(**kwargs)
elif method == 'newsletter':
return newsletter(**kwargs)
else:
# 404 Not Found (unknown method)
return raise_4xx(404, 'Not Found', 'Unknown Method')
def check_login(**kwargs):
# check username
try:
username = kwargs['username']
except:
# 400 Bad Request (Username Missing)
return raise_4xx(400, 'Bad Request', 'Username Missing')
mp_user = find_user(username)
if mp_user:
# check password
try:
password = kwargs['password']
except:
# 400 Bad Request (Password Missing)
return raise_4xx(400, 'Bad Request', 'Password Missing')
mp_password = hashlib.sha256()
mp_password.update(encode("{pwd}".format(pwd=mp_user.mp_password)))
if mp_password.hexdigest() == password:
return raise_200()
else:
# 401 Unauthorized (Invalid Password)
return raise_4xx(401, 'Unauthorized', 'Invalid Password')
else:
# 404 Not Found (No User-Credentials found)
return raise_4xx(404, 'Not Found', 'No User-Credentials found')
def update(**kwargs):
# check username
try:
username = kwargs['username']
except:
# 400 Bad Request (Username Missing)
return raise_4xx(400, 'Bad Request', 'Username Missing')
# check user data for update
try:
user_data = kwargs['user']
except:
# 400 Bad Request (User-Data Missing)
return raise_4xx(400, 'Bad Request', 'User-Data Missing')
mp_user = find_user(username)
if mp_user:
old_data = {}
user_data = json.loads(kwargs['user'])
if 'salutation' in user_data:
old_data['salutation'] = mp_user.salutation
mp_user.salutation = user_data['salutation']
if 'firstname' in user_data:
old_data['firstname'] = mp_user.first_name
mp_user.first_name = user_data['firstname']
if 'lastname' in user_data:
old_data['lastname'] = mp_user.last_name
mp_user.last_name = user_data['lastname']
if 'email' in user_data:
for email in mp_user.email_ids:
if email.is_primary:
old_data['email'] = email.email_id
email.email_id = user_data['email']
mp_user.email_id = user_data['email']
if 'newsletters' in user_data:
old_data['newsletters'] = {}
old_data['newsletters']['1'] = mp_user.nl_1
old_data['newsletters']['2'] = mp_user.nl_2
old_data['newsletters']['3'] = mp_user.nl_3
old_data['newsletters']['4'] = mp_user.nl_4
mp_user.nl_1 = user_data['newsletters']['1']
mp_user.nl_2 = user_data['newsletters']['2']
mp_user.nl_3 = user_data['newsletters']['3']
mp_user.nl_4 = user_data['newsletters']['4']
if 'new_password' in user_data:
# zuerst encrypt(mp_user.mp_password)!!!
old_data['password'] = mp_user.mp_password
# zuerst decrypt(user_data['new_password'])!!!
mp_user.mp_password = user_data['new_password']
mp_user.save(ignore_permissions=True)
frappe.db.commit()
return raise_200({'old_data': old_data, 'updated_data': user_data})
else:
# 404 Not Found (No User found)
return raise_4xx(404, 'Not Found', 'No User found')
def pw_reset(**kwargs):
# check username
try:
username = kwargs['username']
except:
# 400 Bad Request (Username Missing)
return raise_4xx(400, 'Bad Request', 'Username Missing')
mp_user = find_user(username)
if mp_user:
# check new_password
try:
new_password = kwargs['new_password']
except:
# 400 Bad Request (New Password Missing)
return raise_4xx(400, 'Bad Request', 'New Password Missing')
# zuerst new_password = encrypt(new_password)!!!
mp_user.mp_password = new_password
mp_user.save(ignore_permissions=True)
frappe.db.commit()
return raise_200()
else:
# 404 Not Found (No User found)
return raise_4xx(404, 'Not Found', 'No User found')
def pw_reset_mail(**kwargs):
# check username
try:
username = kwargs['username']
except:
# 400 Bad Request (Username Missing)
return raise_4xx(400, 'Bad Request', 'Username Missing')
mp_user = find_user(username)
if mp_user:
if mp_user.email_id:
new_password = create_random_pw()
mp_user.mp_password = new_password
mp_user.save(ignore_permissions=True)
frappe.db.commit()
frappe.sendmail(recipients=mp_user.email_id, message="Ihr neues Passwort lautet: {pwd}".format(pwd=new_password))
return raise_200()
else:
# 400 Bad Request (User has no E-Mail)
return raise_4xx(400, 'Bad Request', 'User has no E-Mail')
else:
# 404 Not Found (No User found)
return raise_4xx(404, 'Not Found', 'No User found')
def newsletter(**kwargs):
# check email
try:
email = kwargs['email']
except:
# 400 Bad Request (E-Mail Missing)
return raise_4xx(400, 'Bad Request', 'E-Mail Missing')
# check newsletter data
try:
newsletter = json.loads(kwargs['newsletters'])
except:
# 400 Bad Request (Newsletters Missing)
return raise_4xx(400, 'Bad Request', 'Newsletters Missing')
mp_user = find_user(username)
if mp_user:
old_data = {}
old_data['newsletters'] = {}
old_data['newsletters']['1'] = mp_user.nl_1
old_data['newsletters']['2'] = mp_user.nl_2
old_data['newsletters']['3'] = mp_user.nl_3
old_data['newsletters']['4'] = mp_user.nl_4
mp_user.nl_1 = newsletter['1']
mp_user.nl_2 = newsletter['2']
mp_user.nl_3 = newsletter['3']
mp_user.nl_4 = newsletter['4']
mp_user.save(ignore_permissions=True)
frappe.db.commit()
return raise_200({'old_data': old_data, 'updated_data': newsletter})
else:
# 404 Not Found (No User found)
return raise_4xx(404, 'Not Found', 'No User found')
def find_user(search_key):
# 1. Abo-Nr
login = frappe.db.sql("""SELECT `name` FROM `tabContact` WHERE `mp_username` = '{search_key}'""".format(search_key=search_key), as_dict=True)
if len(login) > 0:
mp_user = frappe.get_doc("Contact", login[0].name)
return mp_user
else:
# 2. old Abo-Nr.
login = frappe.db.sql("""SELECT `name` FROM `tabContact` WHERE `mp_abo_old` = '{search_key}'""".format(search_key=search_key), as_dict=True)
if len(login) > 0:
mp_user = frappe.get_doc("Contact", login[0].name)
return mp_user
else:
# 3. E-Mail based
login = frappe.db.sql("""SELECT `name` FROM `tabContact` WHERE `email_id` = '{search_key}'""".format(search_key=search_key), as_dict=True)
if len(login) > 0:
mp_user = frappe.get_doc("Contact", login[0].name)
return mp_user
else:
return False
def raise_4xx(code, title, message):
# 4xx Bad Request / Unauthorized / Not Found
return ['{code} {title}'.format(code=code, title=title), {
"error": {
"code": code,
"message": "{message}".format(message=message)
}
}]
def raise_200(answer=False):
# 200 OK
if not answer:
answer = {
"code": 200,
"message": "OK"
}
return ['200 OK', answer]
def get_encryption_key(new=False):
if new:
encryption_key = Fernet.generate_key().decode()
return encryption_key
else:
return frappe.db.get_single_value('mietrechtspraxis API', 'secret_key')
def encrypt(pwd):
if len(pwd) > 100:
# encrypting > 100 chars will lead to truncation
frappe.throw(_('Password cannot be more than 100 characters long'))
cipher_suite = Fernet(encode(get_encryption_key()))
cipher_text = cstr(cipher_suite.encrypt(encode(pwd)))
return cipher_text
def decrypt(pwd):
try:
cipher_suite = Fernet(encode(get_encryption_key()))
plain_text = cstr(cipher_suite.decrypt(encode(pwd)))
return plain_text
except InvalidToken:
# encryption_key not valid
frappe.throw(_('Encryption key is invalid'))
| [
"joel.mesmer@libracore.com"
] | joel.mesmer@libracore.com |
0609114ce349631e21e78619f2e13aaeae8803f9 | 06933693ff601156402dc1c0a424cf292bf0c6ed | /home/management/commands/load_initial_data.py | 9cb5d99213107dee3c487d2b72e77805191def94 | [] | no_license | crowdbotics-apps/onboard-5295 | 77f6daad336d7a14c296cf7015147d711d412505 | 45cd684a02613a2527e52faadf96a38991b76987 | refs/heads/master | 2022-12-14T23:39:31.154518 | 2019-06-27T23:42:07 | 2019-06-27T23:42:07 | 194,176,736 | 0 | 0 | null | 2022-12-08T20:50:16 | 2019-06-27T23:41:28 | Python | UTF-8 | Python | false | false | 719 | py |
from django.core.management import BaseCommand
from home.models import CustomText, HomePage
def load_initial_data():
homepage_body = """
<h1 class="display-4 text-center">OnBoard.</h1>
<p class="lead">
This is the sample application created and deployed from the crowdbotics slack app. You can
view list of packages selected for this application below
</p>"""
customtext_title = 'OnBoard.'
CustomText.objects.create(title=customtext_title)
HomePage.objects.create(body=homepage_body)
class Command(BaseCommand):
can_import_settings = True
help = 'Load initial data to db'
def handle(self, *args, **options):
load_initial_data()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
ac8854b98b82c5621e509614b257be316cedf096 | 066ee4df594a5dc90335d271b9d5a1b1e2a4d34c | /src/lib/wtforms/ext/i18n/form.py | 60544824f90cb1b77f8f92b83f9b91d28f78b627 | [] | permissive | ychen820/microblog | a2d82447525325ec58285c2e5db58b79cceaca1b | d379afa2db3582d5c3be652165f0e9e2e0c154c6 | refs/heads/master | 2021-01-20T05:58:48.424357 | 2015-04-28T22:03:09 | 2015-04-28T22:03:09 | 32,948,331 | 0 | 2 | BSD-3-Clause | 2020-07-25T05:04:35 | 2015-03-26T19:45:07 | Python | UTF-8 | Python | false | false | 1,259 | py | from wtforms import form
from wtforms.ext.i18n.utils import get_translations
translations_cache = {}
class Form(form.Form):
"""
Base form for a simple localized WTForms form.
This will use the stdlib gettext library to retrieve an appropriate
translations object for the language, by default using the locale
information from the environment.
If the LANGUAGES class variable is overridden and set to a sequence of
strings, this will be a list of languages by priority to use instead, e.g::
LANGUAGES = ['en_GB', 'en']
One can also provide the languages by passing `LANGUAGES=` to the
constructor of the form.
Translations objects are cached to prevent having to get a new one for the
same languages every instantiation.
"""
LANGUAGES = None
def __init__(self, *args, **kwargs):
if 'LANGUAGES' in kwargs:
self.LANGUAGES = kwargs.pop('LANGUAGES')
super(Form, self).__init__(*args, **kwargs)
def _get_translations(self):
languages = tuple(self.LANGUAGES) if self.LANGUAGES else None
if languages not in translations_cache:
translations_cache[languages] = get_translations(languages)
return translations_cache[languages]
| [
"ychen207@binghamton.edu"
] | ychen207@binghamton.edu |
29254a283d32280f056293e7391c7261bde9d15c | 3655215852ee2fb1864dbfa1ce924290a2c4f4b9 | /Tuple_operation.py | 43fe5822ba8f491fdff046af229963119819a470 | [] | no_license | shubhamkanade/Niyander-Python | 8b318df2b8ae33b943dcf83eb01c00577914ca59 | 2b6a4780707e26852aa950d7c3e54be1f3b2080b | refs/heads/main | 2023-06-16T03:44:07.833619 | 2021-07-06T03:09:16 | 2021-07-06T03:09:16 | 383,021,140 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py |
Tuple=('a',)
Tuple1="a","b","c","d"
Tuple2=(1,2,3,4,5)
print Tuple1+Tuple2
print Tuple
del Tuple
print Tuple #name 'tuple is not defined'
| [
"shubhamkanade98@gmail.com"
] | shubhamkanade98@gmail.com |
e623bae8fc46dc8fb0e597e972905d9563a89e96 | fcd965c9333ee328ec51bc41f5bc0300cc06dc33 | /LeetCode/Blind 75/Dynamic Programming/1D Dynamic/91_decode_ways.py | d0549868e32f709b896aa8002f90fd411e175c0e | [] | no_license | henrylin2008/Coding_Problems | 699bb345481c14dc3faa8bab439776c7070a1cb0 | 281067e872f73a27f76ae10ab0f1564916bddd28 | refs/heads/master | 2023-01-11T11:55:47.936163 | 2022-12-24T07:50:17 | 2022-12-24T07:50:17 | 170,151,972 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,074 | py | # 91. Decode Ways
# Link: https://leetcode.com/problems/decode-ways/
# Medium
# A message containing letters from A-Z can be encoded into numbers using the following mapping:
#
# 'A' -> "1"
# 'B' -> "2"
# ...
# 'Z' -> "26"
# To decode an encoded message, all the digits must be grouped then mapped back into letters using the reverse of the
# mapping above (there may be multiple ways). For example, "11106" can be mapped into:
#
# "AAJF" with the grouping (1 1 10 6)
# "KJF" with the grouping (11 10 6)
# Note that the grouping (1 11 06) is invalid because "06" cannot be mapped into 'F' since "6" is different from "06".
#
# Given a string s containing only digits, return the number of ways to decode it.
#
# The test cases are generated so that the answer fits in a 32-bit integer.
#
#
#
# Example 1:
# Input: s = "12"
# Output: 2
# Explanation: "12" could be decoded as "AB" (1 2) or "L" (12).
#
# Example 2:
# Input: s = "226"
# Output: 3
# Explanation: "226" could be decoded as "BZ" (2 26), "VF" (22 6), or "BBF" (2 2 6).
#
# Example 3:
# Input: s = "06"
# Output: 0
# Explanation: "06" cannot be mapped to "F" because of the leading zero ("6" is different from "06").
#
#
# Constraints:
#
# 1 <= s.length <= 100
# s contains only digits and may contain leading zero(s).
# Note: can cur char be decoded in one or two ways? Recursion -> cache -> iterative dp solution, a lot of edge cases
# to determine, 52, 31, 29, 10, 20 only decoded one way, 11, 26 decoded two ways
class Solution:
# recursive + cache solution
# Time: O(n)
# Space: O(n)
def numDecodings(self, s: str) -> int:
dp = {len(s): 1} # cache; empty string = 1
def dfs(i): # recursive func, i: current position
if i in dp: # i is already cached or i is the last position of s
return dp[i]
if s[i] == "0": # base case, if char starts with "0", return 0 (no way to decode it)
return 0
res = dfs(i + 1) # sub-problem: next position
# condition for following char: i+1 is inbound, and next char is 1 or 2 and the following char in "0123456"
# if next char inbound, and the following 2 strs/digs is between 10 and 26:
if i + 1 < len(s) and (s[i] == "1" or s[i] == "2" and s[i + 1] in "0123456"):
res += dfs(i + 2) # add the following str
dp[i] = res # cached it; dp[i] = dp[i+1] + dp[i+2]
return res
return dfs(0)
# Dynamic: bottom-up solution
# Time: O(n)
# Space: O(n)
# def numDecodings(self, s: str) -> int:
# dp = {len(s): 1} # cache; base case: len(s): 1 if it's an empty str
#
# for i in range(len(s) - 1, -1, -1): # iterate through in the reverse order
# if s[i] == "0": # base case
# dp[i] = 0
# else:
# dp[i] = dp[i + 1]
# if i + 1 < len(s) and (s[i] == "1" or s[i] == "2" and s[i + 1] in "0123456"):
# dp[i] += dp[i + 2] # add the following str
# return dp[0]
| [
"henrylin2008@yahoo.com"
] | henrylin2008@yahoo.com |
31e19091980ddd7815391fd7ccbb4d6062a92ac5 | 571a89f94f3ebd9ec8e6b618cddb7d05811e0d62 | /abc177/c/main.py | 0341cb88173c4b0336bda72891d1c6cad1f960a8 | [] | no_license | ryu19-1/atcoder_python | 57de9e1db8ff13a107b5861f8f6a231e40366313 | cc24b3c2895aad71d40cefbb8e2893dc397b8f4f | refs/heads/master | 2023-05-10T05:32:16.507207 | 2021-05-19T17:48:10 | 2021-05-19T17:48:10 | 368,954,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | #!/usr/bin/env python3
import sys
from collections import deque, Counter
from heapq import heappop, heappush
from bisect import bisect_right
from itertools import accumulate
sys.setrecursionlimit(10**6)
INF = 10**12
m = 10**9 + 7
def main():
N = int(input())
A = list(map(int, input().split()))
ans = 0
for i in range(N):
ans += A[i]
ans %= m
ans = ans**2
ans %= m
for i in range(N):
ans -= pow(A[i],2,m)
ans %= m
ans = ans * pow(2,m-2,m) % m
print(ans)
if __name__ == "__main__":
main() | [
"ryu1007kami@gmail.com"
] | ryu1007kami@gmail.com |
75a8e3640a339ab44e8583e808b41f364ce368d4 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/test/test_plat_shop_map_vo.py | 8fd6e86688a996ef5f7a879a93e7551fba632853 | [
"Apache-2.0"
] | permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 656 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import baiduads
from baiduads.subshopopenapi.model.plat_shop_map_vo import PlatShopMapVo
class TestPlatShopMapVo(unittest.TestCase):
"""PlatShopMapVo unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPlatShopMapVo(self):
"""Test PlatShopMapVo"""
# FIXME: construct object with mandatory attributes with example values
# model = PlatShopMapVo() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"yangxp@YangxpdeMacBook-Pro.local"
] | yangxp@YangxpdeMacBook-Pro.local |
4bc7a4f38cde5846e34d476537c242652d64db22 | c71fc2c91b119eeac9eafe2038b0e42ef11098f7 | /new/trucks/migrations/0005_auto_20190124_1843.py | 1af8d75d9619bbc06040522809a2a6f4308001c1 | [] | no_license | abhinavsharma629/Reminder-System | c3848fda4947506bcd35759c2966392b34e8ef6a | fb95e3b8ae8ce6eee116fb0a0aac9187189d515d | refs/heads/master | 2020-04-20T08:47:40.068485 | 2019-02-01T19:36:20 | 2019-02-01T19:36:20 | 168,749,751 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | # Generated by Django 2.1.5 on 2019-01-24 18:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trucks', '0004_remove_notifications_boolean1'),
]
operations = [
migrations.AddField(
model_name='notifications',
name='fitness_id',
field=models.CharField(default=-1, max_length=30),
),
migrations.AddField(
model_name='notifications',
name='insurance_id',
field=models.PositiveIntegerField(default=1),
),
migrations.AlterField(
model_name='notifications',
name='truck_number',
field=models.CharField(max_length=14),
),
]
| [
"abhinavsharma629@gmail.com"
] | abhinavsharma629@gmail.com |
d753f4c532b52b9485026a9353250a6782eec47b | 1d6abe27a802d53f7fbd6eb5e59949044cbb3b98 | /tensorflow/python/keras/layers/preprocessing/normalization_distribution_test.py | 4bf15da435814ac8a2af97f42807594c54a819ee | [
"Apache-2.0"
] | permissive | STSjeerasak/tensorflow | 6bc8bf27fb74fd51a71150f25dc1127129f70222 | b57499d4ec0c24adc3a840a8e7e82bd4ce0d09ed | refs/heads/master | 2022-12-20T20:32:15.855563 | 2020-09-29T21:22:35 | 2020-09-29T21:29:31 | 299,743,927 | 5 | 1 | Apache-2.0 | 2020-09-29T21:38:19 | 2020-09-29T21:38:18 | null | UTF-8 | Python | false | false | 5,270 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for keras.layers.preprocessing.normalization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python import keras
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import combinations as ds_combinations
from tensorflow.python.distribute import strategy_combinations
from tensorflow.python.eager import context
from tensorflow.python.framework import test_combinations as combinations
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras.layers.preprocessing import normalization
from tensorflow.python.keras.layers.preprocessing import normalization_v1
from tensorflow.python.keras.layers.preprocessing import preprocessing_test_utils
from tensorflow.python.platform import test
def get_layer_class():
if context.executing_eagerly():
return normalization.Normalization
else:
return normalization_v1.Normalization
def _get_layer_computation_test_cases():
test_cases = ({
"adapt_data": np.array([[1.], [2.], [3.], [4.], [5.]], dtype=np.float32),
"axis": -1,
"test_data": np.array([[1.], [2.], [3.]], np.float32),
"expected": np.array([[-1.414214], [-.707107], [0]], np.float32),
"testcase_name": "2d_single_element"
}, {
"adapt_data": np.array([[1.], [2.], [3.], [4.], [5.]], dtype=np.float32),
"axis": None,
"test_data": np.array([[1.], [2.], [3.]], np.float32),
"expected": np.array([[-1.414214], [-.707107], [0]], np.float32),
"testcase_name": "2d_single_element_none_axis"
}, {
"adapt_data": np.array([[1., 2., 3., 4., 5.]], dtype=np.float32),
"axis": None,
"test_data": np.array([[1.], [2.], [3.]], np.float32),
"expected": np.array([[-1.414214], [-.707107], [0]], np.float32),
"testcase_name": "2d_single_element_none_axis_flat_data"
}, {
"adapt_data":
np.array([[[1., 2., 3.], [2., 3., 4.]], [[3., 4., 5.], [4., 5., 6.]]],
np.float32),
"axis":
1,
"test_data":
np.array([[[1., 2., 3.], [2., 3., 4.]], [[3., 4., 5.], [4., 5., 6.]]],
np.float32),
"expected":
np.array([[[-1.549193, -0.774597, 0.], [-1.549193, -0.774597, 0.]],
[[0., 0.774597, 1.549193], [0., 0.774597, 1.549193]]],
np.float32),
"testcase_name":
"3d_internal_axis"
}, {
"adapt_data":
np.array(
[[[1., 0., 3.], [2., 3., 4.]], [[3., -1., 5.], [4., 5., 8.]]],
np.float32),
"axis": (1, 2),
"test_data":
np.array(
[[[3., 1., -1.], [2., 5., 4.]], [[3., 0., 5.], [2., 5., 8.]]],
np.float32),
"expected":
np.array(
[[[1., 3., -5.], [-1., 1., -1.]], [[1., 1., 1.], [-1., 1., 1.]]],
np.float32),
"testcase_name":
"3d_multiple_axis"
})
crossed_test_cases = []
# Cross above test cases with use_dataset in (True, False)
for use_dataset in (True, False):
for case in test_cases:
case = case.copy()
if use_dataset:
case["testcase_name"] = case["testcase_name"] + "_with_dataset"
case["use_dataset"] = use_dataset
crossed_test_cases.append(case)
return crossed_test_cases
@ds_combinations.generate(
combinations.times(
combinations.combine(
distribution=strategy_combinations.all_strategies,
mode=["eager", "graph"]), _get_layer_computation_test_cases()))
class NormalizationTest(keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_layer_computation(self, distribution, adapt_data, axis, test_data,
use_dataset, expected):
input_shape = tuple([None for _ in range(test_data.ndim - 1)])
if use_dataset:
# Keras APIs expect batched datasets
adapt_data = dataset_ops.Dataset.from_tensor_slices(adapt_data).batch(
test_data.shape[0] // 2)
test_data = dataset_ops.Dataset.from_tensor_slices(test_data).batch(
test_data.shape[0] // 2)
with distribution.scope():
input_data = keras.Input(shape=input_shape)
layer = get_layer_class()(axis=axis)
layer.adapt(adapt_data)
output = layer(input_data)
model = keras.Model(input_data, output)
output_data = model.predict(test_data)
self.assertAllClose(expected, output_data)
if __name__ == "__main__":
test.main()
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
ef358f189d6d0f154253c9a6be092c1bde2adf0f | 4085874e861c40f94b0ba82fdbff273762c26eb2 | /test/functional/p2p_permissions.py | 058d216bc61d9623ec3c1c5d214f10d8cf5c2b01 | [
"MIT"
] | permissive | zortcoin/zortcoin | 82632c9ae46d57ef77ea7e79ab000f2e002dfaf3 | 379a1d01cc40c5c8ea50bdc41eded0dfbd1724f1 | refs/heads/master | 2022-07-27T20:33:34.543723 | 2021-08-25T16:25:17 | 2021-08-25T16:25:17 | 380,851,757 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,160 | py | #!/usr/bin/env python3
# Copyright (c) 2015-2020 The Zortcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test p2p permission message.
Test that permissions are correctly calculated and applied
"""
from test_framework.address import ADDRESS_BCRT1_P2WSH_OP_TRUE
from test_framework.messages import (
CTransaction,
CTxInWitness,
FromHex,
)
from test_framework.p2p import P2PDataStore
from test_framework.script import (
CScript,
OP_TRUE,
)
from test_framework.test_node import ErrorMatch
from test_framework.test_framework import ZortcoinTestFramework
from test_framework.util import (
assert_equal,
p2p_port,
)
class P2PPermissionsTests(ZortcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def run_test(self):
self.check_tx_relay()
self.checkpermission(
# default permissions (no specific permissions)
["-whitelist=127.0.0.1"],
# Make sure the default values in the command line documentation match the ones here
["relay", "noban", "mempool", "download"],
True)
self.checkpermission(
# check without deprecatedrpc=whitelisted
["-whitelist=127.0.0.1"],
# Make sure the default values in the command line documentation match the ones here
["relay", "noban", "mempool", "download"],
None)
self.checkpermission(
# no permission (even with forcerelay)
["-whitelist=@127.0.0.1", "-whitelistforcerelay=1"],
[],
False)
self.checkpermission(
# relay permission removed (no specific permissions)
["-whitelist=127.0.0.1", "-whitelistrelay=0"],
["noban", "mempool", "download"],
True)
self.checkpermission(
# forcerelay and relay permission added
# Legacy parameter interaction which set whitelistrelay to true
# if whitelistforcerelay is true
["-whitelist=127.0.0.1", "-whitelistforcerelay"],
["forcerelay", "relay", "noban", "mempool", "download"],
True)
# Let's make sure permissions are merged correctly
# For this, we need to use whitebind instead of bind
# by modifying the configuration file.
ip_port = "127.0.0.1:{}".format(p2p_port(1))
self.replaceinconfig(1, "bind=127.0.0.1", "whitebind=bloomfilter,forcerelay@" + ip_port)
self.checkpermission(
["-whitelist=noban@127.0.0.1"],
# Check parameter interaction forcerelay should activate relay
["noban", "bloomfilter", "forcerelay", "relay", "download"],
False)
self.replaceinconfig(1, "whitebind=bloomfilter,forcerelay@" + ip_port, "bind=127.0.0.1")
self.checkpermission(
# legacy whitelistrelay should be ignored
["-whitelist=noban,mempool@127.0.0.1", "-whitelistrelay"],
["noban", "mempool", "download"],
False)
self.checkpermission(
# check without deprecatedrpc=whitelisted
["-whitelist=noban,mempool@127.0.0.1", "-whitelistrelay"],
["noban", "mempool", "download"],
None)
self.checkpermission(
# legacy whitelistforcerelay should be ignored
["-whitelist=noban,mempool@127.0.0.1", "-whitelistforcerelay"],
["noban", "mempool", "download"],
False)
self.checkpermission(
# missing mempool permission to be considered legacy whitelisted
["-whitelist=noban@127.0.0.1"],
["noban", "download"],
False)
self.checkpermission(
# all permission added
["-whitelist=all@127.0.0.1"],
["forcerelay", "noban", "mempool", "bloomfilter", "relay", "download", "addr"],
False)
self.stop_node(1)
self.nodes[1].assert_start_raises_init_error(["-whitelist=oopsie@127.0.0.1"], "Invalid P2P permission", match=ErrorMatch.PARTIAL_REGEX)
self.nodes[1].assert_start_raises_init_error(["-whitelist=noban@127.0.0.1:230"], "Invalid netmask specified in", match=ErrorMatch.PARTIAL_REGEX)
self.nodes[1].assert_start_raises_init_error(["-whitebind=noban@127.0.0.1/10"], "Cannot resolve -whitebind address", match=ErrorMatch.PARTIAL_REGEX)
def check_tx_relay(self):
block_op_true = self.nodes[0].getblock(self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_P2WSH_OP_TRUE)[0])
self.sync_all()
self.log.debug("Create a connection from a forcerelay peer that rebroadcasts raw txs")
# A test framework p2p connection is needed to send the raw transaction directly. If a full node was used, it could only
# rebroadcast via the inv-getdata mechanism. However, even for forcerelay connections, a full node would
# currently not request a txid that is already in the mempool.
self.restart_node(1, extra_args=["-whitelist=forcerelay@127.0.0.1"])
p2p_rebroadcast_wallet = self.nodes[1].add_p2p_connection(P2PDataStore())
self.log.debug("Send a tx from the wallet initially")
tx = FromHex(
CTransaction(),
self.nodes[0].createrawtransaction(
inputs=[{
'txid': block_op_true['tx'][0],
'vout': 0,
}], outputs=[{
ADDRESS_BCRT1_P2WSH_OP_TRUE: 5,
}]),
)
tx.wit.vtxinwit = [CTxInWitness()]
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
txid = tx.rehash()
self.log.debug("Wait until tx is in node[1]'s mempool")
p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
self.log.debug("Check that node[1] will send the tx to node[0] even though it is already in the mempool")
self.connect_nodes(1, 0)
with self.nodes[1].assert_debug_log(["Force relaying tx {} from peer=0".format(txid)]):
p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
self.wait_until(lambda: txid in self.nodes[0].getrawmempool())
self.log.debug("Check that node[1] will not send an invalid tx to node[0]")
tx.vout[0].nValue += 1
txid = tx.rehash()
# Send the transaction twice. The first time, it'll be rejected by ATMP because it conflicts
# with a mempool transaction. The second time, it'll be in the recentRejects filter.
p2p_rebroadcast_wallet.send_txs_and_test(
[tx],
self.nodes[1],
success=False,
reject_reason='{} from peer=0 was not accepted: txn-mempool-conflict'.format(txid)
)
p2p_rebroadcast_wallet.send_txs_and_test(
[tx],
self.nodes[1],
success=False,
reject_reason='Not relaying non-mempool transaction {} from forcerelay peer=0'.format(txid)
)
def checkpermission(self, args, expectedPermissions, whitelisted):
if whitelisted is not None:
args = [*args, '-deprecatedrpc=whitelisted']
self.restart_node(1, args)
self.connect_nodes(0, 1)
peerinfo = self.nodes[1].getpeerinfo()[0]
if whitelisted is None:
assert 'whitelisted' not in peerinfo
else:
assert_equal(peerinfo['whitelisted'], whitelisted)
assert_equal(len(expectedPermissions), len(peerinfo['permissions']))
for p in expectedPermissions:
if not p in peerinfo['permissions']:
raise AssertionError("Expected permissions %r is not granted." % p)
def replaceinconfig(self, nodeid, old, new):
with open(self.nodes[nodeid].zortcoinconf, encoding="utf8") as f:
newText = f.read().replace(old, new)
with open(self.nodes[nodeid].zortcoinconf, 'w', encoding="utf8") as f:
f.write(newText)
if __name__ == '__main__':
P2PPermissionsTests().main()
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
f1d0ba5bd2156ebedefeb9b9ed9f945bc33c6384 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-config/aliyunsdkconfig/request/v20200907/UpdateConfigRuleRequest.py | 675e72a0b4aec9655860d7318e001d677ada76f5 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 4,698 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkconfig.endpoint import endpoint_data
class UpdateConfigRuleRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Config', '2020-09-07', 'UpdateConfigRule')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ConfigRuleId(self): # String
return self.get_body_params().get('ConfigRuleId')
def set_ConfigRuleId(self, ConfigRuleId): # String
self.add_body_params('ConfigRuleId', ConfigRuleId)
def get_TagKeyScope(self): # String
return self.get_body_params().get('TagKeyScope')
def set_TagKeyScope(self, TagKeyScope): # String
self.add_body_params('TagKeyScope', TagKeyScope)
def get_ClientToken(self): # String
return self.get_body_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_body_params('ClientToken', ClientToken)
def get_ResourceTypesScope(self): # Array
return self.get_body_params().get('ResourceTypesScope')
def set_ResourceTypesScope(self, ResourceTypesScope): # Array
for index1, value1 in enumerate(ResourceTypesScope):
self.add_body_params('ResourceTypesScope.' + str(index1 + 1), value1)
def get_Description(self): # String
return self.get_body_params().get('Description')
def set_Description(self, Description): # String
self.add_body_params('Description', Description)
def get_ConfigRuleTriggerTypes(self): # String
return self.get_body_params().get('ConfigRuleTriggerTypes')
def set_ConfigRuleTriggerTypes(self, ConfigRuleTriggerTypes): # String
self.add_body_params('ConfigRuleTriggerTypes', ConfigRuleTriggerTypes)
def get_TagValueScope(self): # String
return self.get_body_params().get('TagValueScope')
def set_TagValueScope(self, TagValueScope): # String
self.add_body_params('TagValueScope', TagValueScope)
def get_RegionIdsScope(self): # String
return self.get_body_params().get('RegionIdsScope')
def set_RegionIdsScope(self, RegionIdsScope): # String
self.add_body_params('RegionIdsScope', RegionIdsScope)
def get_RiskLevel(self): # Integer
return self.get_body_params().get('RiskLevel')
def set_RiskLevel(self, RiskLevel): # Integer
self.add_body_params('RiskLevel', RiskLevel)
def get_ResourceGroupIdsScope(self): # String
return self.get_body_params().get('ResourceGroupIdsScope')
def set_ResourceGroupIdsScope(self, ResourceGroupIdsScope): # String
self.add_body_params('ResourceGroupIdsScope', ResourceGroupIdsScope)
def get_InputParameters(self): # String
return self.get_body_params().get('InputParameters')
def set_InputParameters(self, InputParameters): # String
self.add_body_params('InputParameters', InputParameters)
def get_ConfigRuleName(self): # String
return self.get_body_params().get('ConfigRuleName')
def set_ConfigRuleName(self, ConfigRuleName): # String
self.add_body_params('ConfigRuleName', ConfigRuleName)
def get_TagKeyLogicScope(self): # String
return self.get_body_params().get('TagKeyLogicScope')
def set_TagKeyLogicScope(self, TagKeyLogicScope): # String
self.add_body_params('TagKeyLogicScope', TagKeyLogicScope)
def get_MaximumExecutionFrequency(self): # String
return self.get_body_params().get('MaximumExecutionFrequency')
def set_MaximumExecutionFrequency(self, MaximumExecutionFrequency): # String
self.add_body_params('MaximumExecutionFrequency', MaximumExecutionFrequency)
def get_ExcludeResourceIdsScope(self): # String
return self.get_body_params().get('ExcludeResourceIdsScope')
def set_ExcludeResourceIdsScope(self, ExcludeResourceIdsScope): # String
self.add_body_params('ExcludeResourceIdsScope', ExcludeResourceIdsScope)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
511596a5455b5c79e535abba7edf200f8b190a99 | a7b175357e1ed29dc8332a950e320e64f5db6703 | /venv/Lib/site-packages/wx/grid.pyi | 26fff646ebf1389174b5c523aa73d193cfd8830b | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | saleguas/deskOrg | f65b8603464dbb0e17363ca8a724c12d45da8116 | c21d9abf56e1756fa8073ccc3547ec9a85d83e2a | refs/heads/master | 2022-12-13T18:06:33.029636 | 2020-04-05T20:19:56 | 2020-04-05T20:19:56 | 164,255,371 | 3 | 1 | MIT | 2022-12-08T01:42:51 | 2019-01-05T22:15:27 | Python | UTF-8 | Python | false | false | 105,319 | pyi | # -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# This file is generated by wxPython's PI generator. Do not edit by hand.
#
# The *.pyi files are used by PyCharm and other development tools to provide
# more information, such as PEP 484 type hints, than it is able to glean from
# introspection of extension types and methods. They are not intended to be
# imported, executed or used for any other purpose other than providing info
# to the tools. If you don't use use a tool that makes use of .pyi files then
# you can safely ignore this file.
#
# See: https://www.python.org/dev/peps/pep-0484/
# https://www.jetbrains.com/help/pycharm/2016.1/type-hinting-in-pycharm.html
#
# Copyright: (c) 2018 by Total Control Software
# License: wxWindows License
#---------------------------------------------------------------------------
"""
The Grid and related classes in this module provide functionality similar to a
spreadsheet, where the application can display rows and columns of data of
various types, which the user can edit and otherwise interact with.
"""
#-- begin-_grid --#
import wx
#-- end-_grid --#
#-- begin-grid --#
GridNameStr = ""
GRID_AUTOSIZE = 0
GRID_COLUMN = 0
GRID_ROW = 0
GRID_FLOAT_FORMAT_FIXED = 0
GRID_FLOAT_FORMAT_SCIENTIFIC = 0
GRID_FLOAT_FORMAT_COMPACT = 0
GRID_FLOAT_FORMAT_UPPER = 0
GRID_FLOAT_FORMAT_DEFAULT = 0
GRIDTABLE_REQUEST_VIEW_GET_VALUES = 0
GRIDTABLE_REQUEST_VIEW_SEND_VALUES = 0
GRIDTABLE_NOTIFY_ROWS_INSERTED = 0
GRIDTABLE_NOTIFY_ROWS_APPENDED = 0
GRIDTABLE_NOTIFY_ROWS_DELETED = 0
GRIDTABLE_NOTIFY_COLS_INSERTED = 0
GRIDTABLE_NOTIFY_COLS_APPENDED = 0
GRIDTABLE_NOTIFY_COLS_DELETED = 0
GRID_DRAW_ROWS_HEADER = 0
GRID_DRAW_COLS_HEADER = 0
GRID_DRAW_CELL_LINES = 0
GRID_DRAW_BOX_RECT = 0
GRID_DRAW_SELECTION = 0
GRID_DRAW_DEFAULT = 0
wxEVT_GRID_CELL_LEFT_CLICK = 0
wxEVT_GRID_CELL_RIGHT_CLICK = 0
wxEVT_GRID_CELL_LEFT_DCLICK = 0
wxEVT_GRID_CELL_RIGHT_DCLICK = 0
wxEVT_GRID_LABEL_LEFT_CLICK = 0
wxEVT_GRID_LABEL_RIGHT_CLICK = 0
wxEVT_GRID_LABEL_LEFT_DCLICK = 0
wxEVT_GRID_LABEL_RIGHT_DCLICK = 0
wxEVT_GRID_ROW_SIZE = 0
wxEVT_GRID_COL_SIZE = 0
wxEVT_GRID_COL_AUTO_SIZE = 0
wxEVT_GRID_RANGE_SELECT = 0
wxEVT_GRID_CELL_CHANGING = 0
wxEVT_GRID_CELL_CHANGED = 0
wxEVT_GRID_SELECT_CELL = 0
wxEVT_GRID_EDITOR_SHOWN = 0
wxEVT_GRID_EDITOR_HIDDEN = 0
wxEVT_GRID_EDITOR_CREATED = 0
wxEVT_GRID_CELL_BEGIN_DRAG = 0
wxEVT_GRID_COL_MOVE = 0
wxEVT_GRID_COL_SORT = 0
wxEVT_GRID_TABBING = 0
class GridCellCoords(object):
"""
GridCellCoords()
GridCellCoords(row, col)
Represents coordinates of a grid cell.
"""
def __init__(self, *args, **kw):
"""
GridCellCoords()
GridCellCoords(row, col)
Represents coordinates of a grid cell.
"""
def GetRow(self):
"""
GetRow() -> int
Return the row of the coordinate.
"""
def SetRow(self, n):
"""
SetRow(n)
Set the row of the coordinate.
"""
def GetCol(self):
"""
GetCol() -> int
Return the column of the coordinate.
"""
def SetCol(self, n):
"""
SetCol(n)
Set the column of the coordinate.
"""
def Set(self, row, col):
"""
Set(row, col)
Set the row and column of the coordinate.
"""
def __eq__(self):
"""
"""
def __ne__(self):
"""
"""
Col = property(None, None)
Row = property(None, None)
def Get(self):
"""
Get() -> (row,col)
Return the row and col properties as a tuple.
"""
def GetIM(self):
"""
Returns an immutable representation of the ``wx.GridCellCoords`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.GridCellCoords``
with a simple statement like this: ``obj = wx.GridCellCoords(imObj)``.
"""
def __str__(self):
"""
"""
def __repr__(self):
"""
"""
def __len__(self):
"""
"""
def __nonzero__(self):
"""
"""
def __bool__(self):
"""
"""
def __reduce__(self):
"""
"""
def __getitem__(self, idx):
"""
"""
def __setitem__(self, idx, val):
"""
"""
__safe_for_unpickling__ = True
# end of class GridCellCoords
class GridCellRenderer(wx.ClientDataContainer, wx.RefCounter):
"""
GridCellRenderer()
This class is responsible for actually drawing the cell in the grid.
"""
def __init__(self):
"""
GridCellRenderer()
This class is responsible for actually drawing the cell in the grid.
"""
def Clone(self):
"""
Clone() -> GridCellRenderer
This function must be implemented in derived classes to return a copy
of itself.
"""
def Draw(self, grid, attr, dc, rect, row, col, isSelected):
"""
Draw(grid, attr, dc, rect, row, col, isSelected)
Draw the given cell on the provided DC inside the given rectangle
using the style specified by the attribute and the default or selected
state corresponding to the isSelected value.
"""
def GetBestSize(self, grid, attr, dc, row, col):
"""
GetBestSize(grid, attr, dc, row, col) -> wx.Size
Get the preferred size of the cell for its contents.
"""
# end of class GridCellRenderer
class GridCellStringRenderer(GridCellRenderer):
"""
GridCellStringRenderer()
This class may be used to format string data in a cell; it is the
default for string cells.
"""
def __init__(self):
"""
GridCellStringRenderer()
This class may be used to format string data in a cell; it is the
default for string cells.
"""
# end of class GridCellStringRenderer
class GridCellAutoWrapStringRenderer(GridCellStringRenderer):
"""
GridCellAutoWrapStringRenderer()
This class may be used to format string data in a cell.
"""
def __init__(self):
"""
GridCellAutoWrapStringRenderer()
This class may be used to format string data in a cell.
"""
# end of class GridCellAutoWrapStringRenderer
class GridCellBoolRenderer(GridCellRenderer):
"""
GridCellBoolRenderer()
This class may be used to format boolean data in a cell.
"""
def __init__(self):
"""
GridCellBoolRenderer()
This class may be used to format boolean data in a cell.
"""
# end of class GridCellBoolRenderer
class GridCellDateTimeRenderer(GridCellStringRenderer):
"""
GridCellDateTimeRenderer(outformat=wx.DefaultDateTimeFormat, informat=wx.DefaultDateTimeFormat)
This class may be used to format a date/time data in a cell.
"""
def __init__(self, outformat=wx.DefaultDateTimeFormat, informat=wx.DefaultDateTimeFormat):
"""
GridCellDateTimeRenderer(outformat=wx.DefaultDateTimeFormat, informat=wx.DefaultDateTimeFormat)
This class may be used to format a date/time data in a cell.
"""
def SetParameters(self, params):
"""
SetParameters(params)
Sets the strptime()-like format string which will be used to parse the
date/time.
"""
# end of class GridCellDateTimeRenderer
class GridCellEnumRenderer(GridCellStringRenderer):
"""
GridCellEnumRenderer(choices=wx.EmptyString)
This class may be used to render in a cell a number as a textual
equivalent.
"""
def __init__(self, choices=wx.EmptyString):
"""
GridCellEnumRenderer(choices=wx.EmptyString)
This class may be used to render in a cell a number as a textual
equivalent.
"""
def SetParameters(self, params):
"""
SetParameters(params)
Sets the comma separated string content of the enum.
"""
# end of class GridCellEnumRenderer
class GridCellFloatRenderer(GridCellStringRenderer):
"""
GridCellFloatRenderer(width=-1, precision=-1, format=GRID_FLOAT_FORMAT_DEFAULT)
This class may be used to format floating point data in a cell.
"""
def __init__(self, width=-1, precision=-1, format=GRID_FLOAT_FORMAT_DEFAULT):
"""
GridCellFloatRenderer(width=-1, precision=-1, format=GRID_FLOAT_FORMAT_DEFAULT)
This class may be used to format floating point data in a cell.
"""
def GetFormat(self):
"""
GetFormat() -> int
Returns the specifier used to format the data to string.
"""
def GetPrecision(self):
"""
GetPrecision() -> int
Returns the precision.
"""
def GetWidth(self):
"""
GetWidth() -> int
Returns the width.
"""
def SetFormat(self, format):
"""
SetFormat(format)
Set the format to use for display the number.
"""
def SetParameters(self, params):
"""
SetParameters(params)
The parameters string format is "width[,precision[,format]]" where
format should be chosen between f|e|g|E|G (f is used by default)
"""
def SetPrecision(self, precision):
"""
SetPrecision(precision)
Sets the precision.
"""
def SetWidth(self, width):
"""
SetWidth(width)
Sets the width.
"""
Format = property(None, None)
Precision = property(None, None)
Width = property(None, None)
# end of class GridCellFloatRenderer
class GridCellNumberRenderer(GridCellStringRenderer):
"""
GridCellNumberRenderer()
This class may be used to format integer data in a cell.
"""
def __init__(self):
"""
GridCellNumberRenderer()
This class may be used to format integer data in a cell.
"""
# end of class GridCellNumberRenderer
class GridCellEditor(wx.ClientDataContainer, wx.RefCounter):
"""
GridCellEditor()
This class is responsible for providing and manipulating the in-place
edit controls for the grid.
"""
def __init__(self):
"""
GridCellEditor()
This class is responsible for providing and manipulating the in-place
edit controls for the grid.
"""
def BeginEdit(self, row, col, grid):
"""
BeginEdit(row, col, grid)
Fetch the value from the table and prepare the edit control to begin
editing.
"""
def Clone(self):
"""
Clone() -> GridCellEditor
Create a new object which is the copy of this one.
"""
def Create(self, parent, id, evtHandler):
"""
Create(parent, id, evtHandler)
Creates the actual edit control.
"""
def Destroy(self):
"""
Destroy()
Final cleanup.
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
def ApplyEdit(self, row, col, grid):
"""
ApplyEdit(row, col, grid)
Effectively save the changes in the grid.
"""
def HandleReturn(self, event):
"""
HandleReturn(event)
Some types of controls on some platforms may need some help with the
Return key.
"""
def IsCreated(self):
"""
IsCreated() -> bool
Returns true if the edit control has been created.
"""
def PaintBackground(self, dc, rectCell, attr):
"""
PaintBackground(dc, rectCell, attr)
Draws the part of the cell not occupied by the control: the base class
version just fills it with background colour from the attribute.
"""
def Reset(self):
"""
Reset()
Reset the value in the control back to its starting value.
"""
def SetSize(self, rect):
"""
SetSize(rect)
Size and position the edit control.
"""
def Show(self, show, attr=None):
"""
Show(show, attr=None)
Show or hide the edit control, use the specified attributes to set
colours/fonts for it.
"""
def StartingClick(self):
"""
StartingClick()
If the editor is enabled by clicking on the cell, this method will be
called.
"""
def StartingKey(self, event):
"""
StartingKey(event)
If the editor is enabled by pressing keys on the grid, this will be
called to let the editor do something about that first key if desired.
"""
def GetValue(self):
"""
GetValue() -> String
Returns the value currently in the editor control.
"""
def GetControl(self):
"""
GetControl() -> wx.Control
Get the wxControl used by this editor.
"""
def SetControl(self, control):
"""
SetControl(control)
Set the wxControl that will be used by this cell editor for editing
the value.
"""
Control = property(None, None)
Value = property(None, None)
# end of class GridCellEditor
class GridCellTextEditor(GridCellEditor):
"""
GridCellTextEditor(maxChars=0)
Grid cell editor for string/text data.
"""
def __init__(self, maxChars=0):
"""
GridCellTextEditor(maxChars=0)
Grid cell editor for string/text data.
"""
def SetParameters(self, params):
"""
SetParameters(params)
The parameters string format is "n" where n is a number representing
the maximum width.
"""
def SetValidator(self, validator):
"""
SetValidator(validator)
Set validator to validate user input.
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
# end of class GridCellTextEditor
class GridCellAutoWrapStringEditor(GridCellTextEditor):
"""
GridCellAutoWrapStringEditor()
Grid cell editor for wrappable string/text data.
"""
def __init__(self):
"""
GridCellAutoWrapStringEditor()
Grid cell editor for wrappable string/text data.
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
# end of class GridCellAutoWrapStringEditor
class GridCellBoolEditor(GridCellEditor):
"""
GridCellBoolEditor()
Grid cell editor for boolean data.
"""
def __init__(self):
"""
GridCellBoolEditor()
Grid cell editor for boolean data.
"""
@staticmethod
def IsTrueValue(value):
"""
IsTrueValue(value) -> bool
Returns true if the given value is equal to the string representation
of the truth value we currently use (see UseStringValues()).
"""
@staticmethod
def UseStringValues(valueTrue="1", valueFalse=wx.EmptyString):
"""
UseStringValues(valueTrue="1", valueFalse=wx.EmptyString)
This method allows you to customize the values returned by GetValue()
for the cell using this editor.
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
# end of class GridCellBoolEditor
class GridCellChoiceEditor(GridCellEditor):
"""
GridCellChoiceEditor(choices, allowOthers=False)
Grid cell editor for string data providing the user a choice from a
list of strings.
"""
def __init__(self, choices, allowOthers=False):
"""
GridCellChoiceEditor(choices, allowOthers=False)
Grid cell editor for string data providing the user a choice from a
list of strings.
"""
def SetParameters(self, params):
"""
SetParameters(params)
Parameters string format is "item1[,item2[...,itemN]]".
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
# end of class GridCellChoiceEditor
class GridCellEnumEditor(GridCellChoiceEditor):
"""
GridCellEnumEditor(choices=wx.EmptyString)
Grid cell editor which displays an enum number as a textual equivalent
(eg.
"""
def __init__(self, choices=wx.EmptyString):
"""
GridCellEnumEditor(choices=wx.EmptyString)
Grid cell editor which displays an enum number as a textual equivalent
(eg.
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
# end of class GridCellEnumEditor
class GridCellFloatEditor(GridCellTextEditor):
"""
GridCellFloatEditor(width=-1, precision=-1, format=GRID_FLOAT_FORMAT_DEFAULT)
The editor for floating point numbers data.
"""
def __init__(self, width=-1, precision=-1, format=GRID_FLOAT_FORMAT_DEFAULT):
"""
GridCellFloatEditor(width=-1, precision=-1, format=GRID_FLOAT_FORMAT_DEFAULT)
The editor for floating point numbers data.
"""
def SetParameters(self, params):
"""
SetParameters(params)
The parameters string format is "width[,precision[,format]]" where
format should be chosen between f|e|g|E|G (f is used by default)
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
# end of class GridCellFloatEditor
class GridCellNumberEditor(GridCellTextEditor):
"""
GridCellNumberEditor(min=-1, max=-1)
Grid cell editor for numeric integer data.
"""
def __init__(self, min=-1, max=-1):
"""
GridCellNumberEditor(min=-1, max=-1)
Grid cell editor for numeric integer data.
"""
def SetParameters(self, params):
"""
SetParameters(params)
Parameters string format is "min,max".
"""
def EndEdit(self, row, col, grid, oldval):
"""
EndEdit(row, col, grid, oldval)
End editing the cell.
This function must check if the current value of the editing cell
is valid and different from the original value in its string
form. If not then simply return None. If it has changed then
this method should save the new value so that ApplyEdit can
apply it later and the string representation of the new value
should be returned.
Notice that this method shoiuld not modify the grid as the
change could still be vetoed.
"""
# end of class GridCellNumberEditor
class GridCellAttr(wx.ClientDataContainer, wx.RefCounter):
"""
GridCellAttr(attrDefault=None)
GridCellAttr(colText, colBack, font, hAlign, vAlign)
This class can be used to alter the cells' appearance in the grid by
changing their attributes from the defaults.
"""
Any = 0
Cell = 0
Row = 0
Col = 0
Default = 0
Merged = 0
def __init__(self, *args, **kw):
"""
GridCellAttr(attrDefault=None)
GridCellAttr(colText, colBack, font, hAlign, vAlign)
This class can be used to alter the cells' appearance in the grid by
changing their attributes from the defaults.
"""
def Clone(self):
"""
Clone() -> GridCellAttr
Creates a new copy of this object.
"""
def DecRef(self):
"""
DecRef()
This class is reference counted: it is created with ref count of 1, so
calling DecRef() once will delete it.
"""
def GetAlignment(self):
"""
GetAlignment() -> (hAlign, vAlign)
Get the alignment to use for the cell with the given attribute.
"""
def GetBackgroundColour(self):
"""
GetBackgroundColour() -> wx.Colour
Returns the background colour.
"""
def GetEditor(self, grid, row, col):
"""
GetEditor(grid, row, col) -> GridCellEditor
Returns the cell editor.
"""
def GetFont(self):
"""
GetFont() -> wx.Font
Returns the font.
"""
def GetNonDefaultAlignment(self):
"""
GetNonDefaultAlignment() -> (hAlign, vAlign)
Get the alignment defined by this attribute.
"""
def GetRenderer(self, grid, row, col):
"""
GetRenderer(grid, row, col) -> GridCellRenderer
Returns the cell renderer.
"""
def GetTextColour(self):
"""
GetTextColour() -> wx.Colour
Returns the text colour.
"""
def HasAlignment(self):
"""
HasAlignment() -> bool
Returns true if this attribute has a valid alignment set.
"""
def HasBackgroundColour(self):
"""
HasBackgroundColour() -> bool
Returns true if this attribute has a valid background colour set.
"""
def HasEditor(self):
"""
HasEditor() -> bool
Returns true if this attribute has a valid cell editor set.
"""
def HasFont(self):
"""
HasFont() -> bool
Returns true if this attribute has a valid font set.
"""
def HasRenderer(self):
"""
HasRenderer() -> bool
Returns true if this attribute has a valid cell renderer set.
"""
def HasTextColour(self):
"""
HasTextColour() -> bool
Returns true if this attribute has a valid text colour set.
"""
def IncRef(self):
"""
IncRef()
This class is reference counted: it is created with ref count of 1, so
calling DecRef() once will delete it.
"""
def IsReadOnly(self):
"""
IsReadOnly() -> bool
Returns true if this cell is set as read-only.
"""
def SetAlignment(self, hAlign, vAlign):
"""
SetAlignment(hAlign, vAlign)
Sets the alignment.
"""
def SetBackgroundColour(self, colBack):
"""
SetBackgroundColour(colBack)
Sets the background colour.
"""
def SetDefAttr(self, defAttr):
"""
SetDefAttr(defAttr)
"""
def SetEditor(self, editor):
"""
SetEditor(editor)
Sets the editor to be used with the cells with this attribute.
"""
def SetFont(self, font):
"""
SetFont(font)
Sets the font.
"""
def SetReadOnly(self, isReadOnly=True):
"""
SetReadOnly(isReadOnly=True)
Sets the cell as read-only.
"""
def SetRenderer(self, renderer):
"""
SetRenderer(renderer)
Sets the renderer to be used for cells with this attribute.
"""
def SetTextColour(self, colText):
"""
SetTextColour(colText)
Sets the text colour.
"""
def MergeWith(self, mergefrom):
"""
MergeWith(mergefrom)
"""
def SetSize(self, num_rows, num_cols):
"""
SetSize(num_rows, num_cols)
"""
def SetOverflow(self, allow=True):
"""
SetOverflow(allow=True)
"""
def SetKind(self, kind):
"""
SetKind(kind)
"""
def HasReadWriteMode(self):
"""
HasReadWriteMode() -> bool
"""
def HasOverflowMode(self):
"""
HasOverflowMode() -> bool
"""
def HasSize(self):
"""
HasSize() -> bool
"""
def GetSize(self):
"""
GetSize() -> (num_rows, num_cols)
"""
def GetOverflow(self):
"""
GetOverflow() -> bool
"""
def GetKind(self):
"""
GetKind() -> AttrKind
"""
BackgroundColour = property(None, None)
Font = property(None, None)
Kind = property(None, None)
Overflow = property(None, None)
TextColour = property(None, None)
# end of class GridCellAttr
class GridCornerHeaderRenderer(object):
"""
Base class for corner window renderer.
"""
def DrawBorder(self, grid, dc, rect):
"""
DrawBorder(grid, dc, rect)
Called by the grid to draw the corner window border.
"""
# end of class GridCornerHeaderRenderer
class GridHeaderLabelsRenderer(GridCornerHeaderRenderer):
"""
Common base class for row and column headers renderers.
"""
def DrawLabel(self, grid, dc, value, rect, horizAlign, vertAlign, textOrientation):
"""
DrawLabel(grid, dc, value, rect, horizAlign, vertAlign, textOrientation)
Called by the grid to draw the specified label.
"""
# end of class GridHeaderLabelsRenderer
class GridRowHeaderRenderer(GridHeaderLabelsRenderer):
"""
Base class for row headers renderer.
"""
# end of class GridRowHeaderRenderer
class GridColumnHeaderRenderer(GridHeaderLabelsRenderer):
"""
Base class for column headers renderer.
"""
# end of class GridColumnHeaderRenderer
class GridRowHeaderRendererDefault(GridRowHeaderRenderer):
"""
Default row header renderer.
"""
def DrawBorder(self, grid, dc, rect):
"""
DrawBorder(grid, dc, rect)
Implement border drawing for the row labels.
"""
# end of class GridRowHeaderRendererDefault
class GridColumnHeaderRendererDefault(GridColumnHeaderRenderer):
"""
Default column header renderer.
"""
def DrawBorder(self, grid, dc, rect):
"""
DrawBorder(grid, dc, rect)
Implement border drawing for the column labels.
"""
# end of class GridColumnHeaderRendererDefault
class GridCornerHeaderRendererDefault(GridCornerHeaderRenderer):
"""
Default corner window renderer.
"""
def DrawBorder(self, grid, dc, rect):
"""
DrawBorder(grid, dc, rect)
Implement border drawing for the corner window.
"""
# end of class GridCornerHeaderRendererDefault
class GridCellAttrProvider(wx.ClientDataContainer):
"""
GridCellAttrProvider()
Class providing attributes to be used for the grid cells.
"""
def __init__(self):
"""
GridCellAttrProvider()
Class providing attributes to be used for the grid cells.
"""
def SetAttr(self, attr, row, col):
"""
SetAttr(attr, row, col)
Set attribute for the specified cell.
"""
def SetRowAttr(self, attr, row):
"""
SetRowAttr(attr, row)
Set attribute for the specified row.
"""
def SetColAttr(self, attr, col):
"""
SetColAttr(attr, col)
Set attribute for the specified column.
"""
def GetColumnHeaderRenderer(self, col):
"""
GetColumnHeaderRenderer(col) -> GridColumnHeaderRenderer
Return the renderer used for drawing column headers.
"""
def GetRowHeaderRenderer(self, row):
"""
GetRowHeaderRenderer(row) -> GridRowHeaderRenderer
Return the renderer used for drawing row headers.
"""
def GetCornerRenderer(self):
"""
GetCornerRenderer() -> GridCornerHeaderRenderer
Return the renderer used for drawing the corner window.
"""
def GetAttr(self, row, col, kind):
"""
GetAttr(row, col, kind) -> GridCellAttr
Get the attribute to use for the specified cell.
"""
CornerRenderer = property(None, None)
# end of class GridCellAttrProvider
class GridTableBase(wx.Object):
"""
GridTableBase()
The almost abstract base class for grid tables.
"""
def __init__(self):
"""
GridTableBase()
The almost abstract base class for grid tables.
"""
def IsEmptyCell(self, row, col):
"""
IsEmptyCell(row, col) -> bool
May be overridden to implement testing for empty cells.
"""
def IsEmpty(self, coords):
"""
IsEmpty(coords) -> bool
Same as IsEmptyCell() but taking wxGridCellCoords.
"""
def GetValue(self, row, col):
"""
GetValue(row, col) -> PyObject
Must be overridden to implement accessing the table values as text.
"""
def SetValue(self, row, col, value):
"""
SetValue(row, col, value)
Must be overridden to implement setting the table values as text.
"""
def GetTypeName(self, row, col):
"""
GetTypeName(row, col) -> String
Returns the type of the value in the given cell.
"""
def CanGetValueAs(self, row, col, typeName):
"""
CanGetValueAs(row, col, typeName) -> bool
Returns true if the value of the given cell can be accessed as if it
were of the specified type.
"""
def CanSetValueAs(self, row, col, typeName):
"""
CanSetValueAs(row, col, typeName) -> bool
Returns true if the value of the given cell can be set as if it were
of the specified type.
"""
def GetValueAsLong(self, row, col):
"""
GetValueAsLong(row, col) -> long
Returns the value of the given cell as a long.
"""
def GetValueAsDouble(self, row, col):
"""
GetValueAsDouble(row, col) -> double
Returns the value of the given cell as a double.
"""
def GetValueAsBool(self, row, col):
"""
GetValueAsBool(row, col) -> bool
Returns the value of the given cell as a boolean.
"""
def SetValueAsLong(self, row, col, value):
"""
SetValueAsLong(row, col, value)
Sets the value of the given cell as a long.
"""
def SetValueAsDouble(self, row, col, value):
"""
SetValueAsDouble(row, col, value)
Sets the value of the given cell as a double.
"""
def SetValueAsBool(self, row, col, value):
"""
SetValueAsBool(row, col, value)
Sets the value of the given cell as a boolean.
"""
def Clear(self):
"""
Clear()
Clear the table contents.
"""
def InsertRows(self, pos=0, numRows=1):
"""
InsertRows(pos=0, numRows=1) -> bool
Insert additional rows into the table.
"""
def AppendRows(self, numRows=1):
"""
AppendRows(numRows=1) -> bool
Append additional rows at the end of the table.
"""
def DeleteRows(self, pos=0, numRows=1):
"""
DeleteRows(pos=0, numRows=1) -> bool
Delete rows from the table.
"""
def InsertCols(self, pos=0, numCols=1):
"""
InsertCols(pos=0, numCols=1) -> bool
Exactly the same as InsertRows() but for columns.
"""
def AppendCols(self, numCols=1):
"""
AppendCols(numCols=1) -> bool
Exactly the same as AppendRows() but for columns.
"""
def DeleteCols(self, pos=0, numCols=1):
"""
DeleteCols(pos=0, numCols=1) -> bool
Exactly the same as DeleteRows() but for columns.
"""
def GetRowLabelValue(self, row):
"""
GetRowLabelValue(row) -> String
Return the label of the specified row.
"""
def GetColLabelValue(self, col):
"""
GetColLabelValue(col) -> String
Return the label of the specified column.
"""
def SetRowLabelValue(self, row, label):
"""
SetRowLabelValue(row, label)
Set the given label for the specified row.
"""
def SetColLabelValue(self, col, label):
"""
SetColLabelValue(col, label)
Exactly the same as SetRowLabelValue() but for columns.
"""
def SetAttrProvider(self, attrProvider):
"""
SetAttrProvider(attrProvider)
Associate this attributes provider with the table.
"""
def GetAttrProvider(self):
"""
GetAttrProvider() -> GridCellAttrProvider
Returns the attribute provider currently being used.
"""
def GetAttr(self, row, col, kind):
"""
GetAttr(row, col, kind) -> GridCellAttr
Return the attribute for the given cell.
"""
def SetAttr(self, attr, row, col):
"""
SetAttr(attr, row, col)
Set attribute of the specified cell.
"""
def SetRowAttr(self, attr, row):
"""
SetRowAttr(attr, row)
Set attribute of the specified row.
"""
def SetColAttr(self, attr, col):
"""
SetColAttr(attr, col)
Set attribute of the specified column.
"""
def GetNumberRows(self):
"""
GetNumberRows() -> int
Must be overridden to return the number of rows in the table.
"""
def GetNumberCols(self):
"""
GetNumberCols() -> int
Must be overridden to return the number of columns in the table.
"""
def GetRowsCount(self):
"""
GetRowsCount() -> int
Return the number of rows in the table.
"""
def GetColsCount(self):
"""
GetColsCount() -> int
Return the number of columns in the table.
"""
def SetView(self, grid):
"""
SetView(grid)
Called by the grid when the table is associated with it.
"""
def GetView(self):
"""
GetView() -> Grid
Returns the last grid passed to SetView().
"""
def CanHaveAttributes(self):
"""
CanHaveAttributes() -> bool
Returns true if this table supports attributes or false otherwise.
"""
def GetValueAsLong(self, row, col):
"""
"""
def GetValueAsDouble(self, row, col):
"""
"""
def GetValueAsBool(self, row, col):
"""
"""
def SetValueAsLong(self, row, col, value):
"""
"""
def SetValueAsDouble(self, row, col, value):
"""
"""
def SetValueAsBool(self, row, col, value):
"""
"""
AttrProvider = property(None, None)
ColsCount = property(None, None)
NumberCols = property(None, None)
NumberRows = property(None, None)
RowsCount = property(None, None)
View = property(None, None)
# end of class GridTableBase
class GridTableMessage(object):
"""
GridTableMessage()
GridTableMessage(table, id, comInt1=-1, comInt2=-1)
A simple class used to pass messages from the table to the grid.
"""
def __init__(self, *args, **kw):
"""
GridTableMessage()
GridTableMessage(table, id, comInt1=-1, comInt2=-1)
A simple class used to pass messages from the table to the grid.
"""
def SetTableObject(self, table):
"""
SetTableObject(table)
"""
def GetTableObject(self):
"""
GetTableObject() -> GridTableBase
"""
def SetId(self, id):
"""
SetId(id)
"""
def GetId(self):
"""
GetId() -> int
"""
def SetCommandInt(self, comInt1):
"""
SetCommandInt(comInt1)
"""
def GetCommandInt(self):
"""
GetCommandInt() -> int
"""
def SetCommandInt2(self, comInt2):
"""
SetCommandInt2(comInt2)
"""
def GetCommandInt2(self):
"""
GetCommandInt2() -> int
"""
CommandInt = property(None, None)
CommandInt2 = property(None, None)
Id = property(None, None)
TableObject = property(None, None)
# end of class GridTableMessage
class GridStringTable(GridTableBase):
"""
GridStringTable()
GridStringTable(numRows, numCols)
Simplest type of data table for a grid for small tables of strings
that are stored in memory.
"""
def __init__(self, *args, **kw):
"""
GridStringTable()
GridStringTable(numRows, numCols)
Simplest type of data table for a grid for small tables of strings
that are stored in memory.
"""
def GetNumberRows(self):
"""
GetNumberRows() -> int
Must be overridden to return the number of rows in the table.
"""
def GetNumberCols(self):
"""
GetNumberCols() -> int
Must be overridden to return the number of columns in the table.
"""
def GetValue(self, row, col):
"""
GetValue(row, col) -> String
Must be overridden to implement accessing the table values as text.
"""
def SetValue(self, row, col, value):
"""
SetValue(row, col, value)
Must be overridden to implement setting the table values as text.
"""
def Clear(self):
"""
Clear()
Clear the table contents.
"""
def InsertRows(self, pos=0, numRows=1):
"""
InsertRows(pos=0, numRows=1) -> bool
Insert additional rows into the table.
"""
def AppendRows(self, numRows=1):
"""
AppendRows(numRows=1) -> bool
Append additional rows at the end of the table.
"""
def DeleteRows(self, pos=0, numRows=1):
"""
DeleteRows(pos=0, numRows=1) -> bool
Delete rows from the table.
"""
def InsertCols(self, pos=0, numCols=1):
"""
InsertCols(pos=0, numCols=1) -> bool
Exactly the same as InsertRows() but for columns.
"""
def AppendCols(self, numCols=1):
"""
AppendCols(numCols=1) -> bool
Exactly the same as AppendRows() but for columns.
"""
def DeleteCols(self, pos=0, numCols=1):
"""
DeleteCols(pos=0, numCols=1) -> bool
Exactly the same as DeleteRows() but for columns.
"""
def SetRowLabelValue(self, row, label):
"""
SetRowLabelValue(row, label)
Set the given label for the specified row.
"""
def SetColLabelValue(self, col, label):
"""
SetColLabelValue(col, label)
Exactly the same as SetRowLabelValue() but for columns.
"""
def GetRowLabelValue(self, row):
"""
GetRowLabelValue(row) -> String
Return the label of the specified row.
"""
def GetColLabelValue(self, col):
"""
GetColLabelValue(col) -> String
Return the label of the specified column.
"""
NumberCols = property(None, None)
NumberRows = property(None, None)
# end of class GridStringTable
class GridSizesInfo(object):
"""
GridSizesInfo()
GridSizesInfo(defSize, allSizes)
wxGridSizesInfo stores information about sizes of all wxGrid rows or
columns.
"""
def __init__(self, *args, **kw):
"""
GridSizesInfo()
GridSizesInfo(defSize, allSizes)
wxGridSizesInfo stores information about sizes of all wxGrid rows or
columns.
"""
m_sizeDefault = property(None, None)
def GetSize(self, pos):
"""
GetSize(pos) -> int
Get the element size.
"""
# end of class GridSizesInfo
class Grid(wx.ScrolledWindow):
"""
Grid()
Grid(parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.WANTS_CHARS, name=GridNameStr)
wxGrid and its related classes are used for displaying and editing
tabular data.
"""
SelectCells = 0
SelectRows = 0
SelectColumns = 0
SelectRowsOrColumns = 0
CellSpan_Inside = 0
CellSpan_None = 0
CellSpan_Main = 0
Tab_Stop = 0
Tab_Wrap = 0
Tab_Leave = 0
def __init__(self, *args, **kw):
"""
Grid()
Grid(parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.WANTS_CHARS, name=GridNameStr)
wxGrid and its related classes are used for displaying and editing
tabular data.
"""
def Create(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.WANTS_CHARS, name=GridNameStr):
"""
Create(parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.WANTS_CHARS, name=GridNameStr) -> bool
Creates the grid window for an object initialized using the default
constructor.
"""
def CreateGrid(self, numRows, numCols, selmode=GridSelectCells):
"""
CreateGrid(numRows, numCols, selmode=GridSelectCells) -> bool
Creates a grid with the specified initial number of rows and columns.
"""
def _SetTable(self, table, takeOwnership=False, selmode=GridSelectCells):
"""
_SetTable(table, takeOwnership=False, selmode=GridSelectCells) -> bool
Passes a pointer to a custom grid table to be used by the grid.
"""
def ProcessTableMessage(self, msg):
"""
ProcessTableMessage(msg) -> bool
Receive and handle a message from the table.
"""
def EnableGridLines(self, enable=True):
"""
EnableGridLines(enable=True)
Turns the drawing of grid lines on or off.
"""
def GetColGridLinePen(self, col):
"""
GetColGridLinePen(col) -> wx.Pen
Returns the pen used for vertical grid lines.
"""
def GetDefaultGridLinePen(self):
"""
GetDefaultGridLinePen() -> wx.Pen
Returns the pen used for grid lines.
"""
def GetGridLineColour(self):
"""
GetGridLineColour() -> wx.Colour
Returns the colour used for grid lines.
"""
def GetRowGridLinePen(self, row):
"""
GetRowGridLinePen(row) -> wx.Pen
Returns the pen used for horizontal grid lines.
"""
def GridLinesEnabled(self):
"""
GridLinesEnabled() -> bool
Returns true if drawing of grid lines is turned on, false otherwise.
"""
def SetGridLineColour(self, colour):
"""
SetGridLineColour(colour)
Sets the colour used to draw grid lines.
"""
def GetColLabelAlignment(self):
"""
GetColLabelAlignment() -> (horiz, vert)
Sets the arguments to the current column label alignment values.
"""
def GetColLabelTextOrientation(self):
"""
GetColLabelTextOrientation() -> int
Returns the orientation of the column labels (either wxHORIZONTAL or
wxVERTICAL).
"""
def GetColLabelValue(self, col):
"""
GetColLabelValue(col) -> String
Returns the specified column label.
"""
def GetLabelBackgroundColour(self):
"""
GetLabelBackgroundColour() -> wx.Colour
Returns the colour used for the background of row and column labels.
"""
def GetLabelFont(self):
"""
GetLabelFont() -> wx.Font
Returns the font used for row and column labels.
"""
def GetLabelTextColour(self):
"""
GetLabelTextColour() -> wx.Colour
Returns the colour used for row and column label text.
"""
def GetRowLabelAlignment(self):
"""
GetRowLabelAlignment() -> (horiz, vert)
Returns the alignment used for row labels.
"""
def GetRowLabelValue(self, row):
"""
GetRowLabelValue(row) -> String
Returns the specified row label.
"""
def HideColLabels(self):
"""
HideColLabels()
Hides the column labels by calling SetColLabelSize() with a size of 0.
"""
def HideRowLabels(self):
"""
HideRowLabels()
Hides the row labels by calling SetRowLabelSize() with a size of 0.
"""
def SetColLabelAlignment(self, horiz, vert):
"""
SetColLabelAlignment(horiz, vert)
Sets the horizontal and vertical alignment of column label text.
"""
def SetColLabelTextOrientation(self, textOrientation):
"""
SetColLabelTextOrientation(textOrientation)
Sets the orientation of the column labels (either wxHORIZONTAL or
wxVERTICAL).
"""
def SetColLabelValue(self, col, value):
"""
SetColLabelValue(col, value)
Set the value for the given column label.
"""
def SetLabelBackgroundColour(self, colour):
"""
SetLabelBackgroundColour(colour)
Sets the background colour for row and column labels.
"""
def SetLabelFont(self, font):
"""
SetLabelFont(font)
Sets the font for row and column labels.
"""
def SetLabelTextColour(self, colour):
"""
SetLabelTextColour(colour)
Sets the colour for row and column label text.
"""
def SetRowLabelAlignment(self, horiz, vert):
"""
SetRowLabelAlignment(horiz, vert)
Sets the horizontal and vertical alignment of row label text.
"""
def SetRowLabelValue(self, row, value):
"""
SetRowLabelValue(row, value)
Sets the value for the given row label.
"""
def SetUseNativeColLabels(self, native=True):
"""
SetUseNativeColLabels(native=True)
Call this in order to make the column labels use a native look by
using wxRendererNative::DrawHeaderButton() internally.
"""
def UseNativeColHeader(self, native=True):
"""
UseNativeColHeader(native=True)
Enable the use of native header window for column labels.
"""
def GetCellAlignment(self, row, col):
"""
GetCellAlignment(row, col) -> (horiz, vert)
Sets the arguments to the horizontal and vertical text alignment
values for the grid cell at the specified location.
"""
def GetCellBackgroundColour(self, row, col):
"""
GetCellBackgroundColour(row, col) -> wx.Colour
Returns the background colour of the cell at the specified location.
"""
def GetCellFont(self, row, col):
"""
GetCellFont(row, col) -> wx.Font
Returns the font for text in the grid cell at the specified location.
"""
def GetCellTextColour(self, row, col):
"""
GetCellTextColour(row, col) -> wx.Colour
Returns the text colour for the grid cell at the specified location.
"""
def GetDefaultCellAlignment(self):
"""
GetDefaultCellAlignment() -> (horiz, vert)
Returns the default cell alignment.
"""
def GetDefaultCellBackgroundColour(self):
"""
GetDefaultCellBackgroundColour() -> wx.Colour
Returns the current default background colour for grid cells.
"""
def GetDefaultCellFont(self):
"""
GetDefaultCellFont() -> wx.Font
Returns the current default font for grid cell text.
"""
def GetDefaultCellTextColour(self):
"""
GetDefaultCellTextColour() -> wx.Colour
Returns the current default colour for grid cell text.
"""
def SetCellAlignment(self, *args, **kw):
"""
SetCellAlignment(row, col, horiz, vert)
SetCellAlignment(align, row, col)
Sets the horizontal and vertical alignment for grid cell text at the
specified location.
"""
def SetCellBackgroundColour(self, row, col, colour):
"""
SetCellBackgroundColour(row, col, colour)
Set the background colour for the given cell or all cells by default.
"""
def SetCellFont(self, row, col, font):
"""
SetCellFont(row, col, font)
Sets the font for text in the grid cell at the specified location.
"""
def SetCellTextColour(self, *args, **kw):
"""
SetCellTextColour(row, col, colour)
SetCellTextColour(val, row, col)
SetCellTextColour(colour)
Sets the text colour for the given cell.
"""
def SetDefaultCellAlignment(self, horiz, vert):
"""
SetDefaultCellAlignment(horiz, vert)
Sets the default horizontal and vertical alignment for grid cell text.
"""
def SetDefaultCellBackgroundColour(self, colour):
"""
SetDefaultCellBackgroundColour(colour)
Sets the default background colour for grid cells.
"""
def SetDefaultCellFont(self, font):
"""
SetDefaultCellFont(font)
Sets the default font to be used for grid cell text.
"""
def SetDefaultCellTextColour(self, colour):
"""
SetDefaultCellTextColour(colour)
Sets the current default colour for grid cell text.
"""
def CanEnableCellControl(self):
"""
CanEnableCellControl() -> bool
Returns true if the in-place edit control for the current grid cell
can be used and false otherwise.
"""
def DisableCellEditControl(self):
"""
DisableCellEditControl()
Disables in-place editing of grid cells.
"""
def EnableCellEditControl(self, enable=True):
"""
EnableCellEditControl(enable=True)
Enables or disables in-place editing of grid cell data.
"""
def EnableEditing(self, edit):
"""
EnableEditing(edit)
Makes the grid globally editable or read-only.
"""
def GetCellEditor(self, row, col):
"""
GetCellEditor(row, col) -> GridCellEditor
Returns a pointer to the editor for the cell at the specified
location.
"""
def GetCellRenderer(self, row, col):
"""
GetCellRenderer(row, col) -> GridCellRenderer
Returns a pointer to the renderer for the grid cell at the specified
location.
"""
def GetCellValue(self, *args, **kw):
"""
GetCellValue(row, col) -> String
GetCellValue(coords) -> String
Returns the string contained in the cell at the specified location.
"""
def GetDefaultEditor(self):
"""
GetDefaultEditor() -> GridCellEditor
Returns a pointer to the current default grid cell editor.
"""
def GetDefaultEditorForCell(self, *args, **kw):
"""
GetDefaultEditorForCell(row, col) -> GridCellEditor
GetDefaultEditorForCell(c) -> GridCellEditor
Returns the default editor for the specified cell.
"""
def GetDefaultEditorForType(self, typeName):
"""
GetDefaultEditorForType(typeName) -> GridCellEditor
Returns the default editor for the cells containing values of the
given type.
"""
def GetDefaultRenderer(self):
"""
GetDefaultRenderer() -> GridCellRenderer
Returns a pointer to the current default grid cell renderer.
"""
def GetDefaultRendererForCell(self, row, col):
"""
GetDefaultRendererForCell(row, col) -> GridCellRenderer
Returns the default renderer for the given cell.
"""
def GetDefaultRendererForType(self, typeName):
"""
GetDefaultRendererForType(typeName) -> GridCellRenderer
Returns the default renderer for the cell containing values of the
given type.
"""
def HideCellEditControl(self):
"""
HideCellEditControl()
Hides the in-place cell edit control.
"""
def IsCellEditControlEnabled(self):
"""
IsCellEditControlEnabled() -> bool
Returns true if the in-place edit control is currently enabled.
"""
def IsCellEditControlShown(self):
"""
IsCellEditControlShown() -> bool
Returns true if the in-place edit control is currently shown.
"""
def IsCurrentCellReadOnly(self):
"""
IsCurrentCellReadOnly() -> bool
Returns true if the current cell is read-only.
"""
def IsEditable(self):
"""
IsEditable() -> bool
Returns false if the whole grid has been set as read-only or true
otherwise.
"""
def IsReadOnly(self, row, col):
"""
IsReadOnly(row, col) -> bool
Returns true if the cell at the specified location can't be edited.
"""
def RegisterDataType(self, typeName, renderer, editor):
"""
RegisterDataType(typeName, renderer, editor)
Register a new data type.
"""
def SaveEditControlValue(self):
"""
SaveEditControlValue()
Sets the value of the current grid cell to the current in-place edit
control value.
"""
def SetCellEditor(self, row, col, editor):
"""
SetCellEditor(row, col, editor)
Sets the editor for the grid cell at the specified location.
"""
def SetCellRenderer(self, row, col, renderer):
"""
SetCellRenderer(row, col, renderer)
Sets the renderer for the grid cell at the specified location.
"""
def SetCellValue(self, *args, **kw):
"""
SetCellValue(row, col, s)
SetCellValue(coords, s)
Sets the string value for the cell at the specified location.
"""
def SetColFormatBool(self, col):
"""
SetColFormatBool(col)
Sets the specified column to display boolean values.
"""
def SetColFormatCustom(self, col, typeName):
"""
SetColFormatCustom(col, typeName)
Sets the specified column to display data in a custom format.
"""
def SetColFormatFloat(self, col, width=-1, precision=-1):
"""
SetColFormatFloat(col, width=-1, precision=-1)
Sets the specified column to display floating point values with the
given width and precision.
"""
def SetColFormatNumber(self, col):
"""
SetColFormatNumber(col)
Sets the specified column to display integer values.
"""
def SetDefaultEditor(self, editor):
"""
SetDefaultEditor(editor)
Sets the default editor for grid cells.
"""
def SetDefaultRenderer(self, renderer):
"""
SetDefaultRenderer(renderer)
Sets the default renderer for grid cells.
"""
def SetReadOnly(self, row, col, isReadOnly=True):
"""
SetReadOnly(row, col, isReadOnly=True)
Makes the cell at the specified location read-only or editable.
"""
def ShowCellEditControl(self):
"""
ShowCellEditControl()
Displays the in-place cell edit control for the current cell.
"""
def AutoSize(self):
"""
AutoSize()
Automatically sets the height and width of all rows and columns to fit
their contents.
"""
def AutoSizeColLabelSize(self, col):
"""
AutoSizeColLabelSize(col)
Automatically adjusts width of the column to fit its label.
"""
def AutoSizeColumn(self, col, setAsMin=True):
"""
AutoSizeColumn(col, setAsMin=True)
Automatically sizes the column to fit its contents.
"""
def AutoSizeColumns(self, setAsMin=True):
"""
AutoSizeColumns(setAsMin=True)
Automatically sizes all columns to fit their contents.
"""
def AutoSizeRow(self, row, setAsMin=True):
"""
AutoSizeRow(row, setAsMin=True)
Automatically sizes the row to fit its contents.
"""
def AutoSizeRowLabelSize(self, col):
"""
AutoSizeRowLabelSize(col)
Automatically adjusts height of the row to fit its label.
"""
def AutoSizeRows(self, setAsMin=True):
"""
AutoSizeRows(setAsMin=True)
Automatically sizes all rows to fit their contents.
"""
def GetCellOverflow(self, row, col):
"""
GetCellOverflow(row, col) -> bool
Returns true if the cell value can overflow.
"""
def GetColLabelSize(self):
"""
GetColLabelSize() -> int
Returns the current height of the column labels.
"""
def GetColMinimalAcceptableWidth(self):
"""
GetColMinimalAcceptableWidth() -> int
Returns the minimal width to which a column may be resized.
"""
def GetColSize(self, col):
"""
GetColSize(col) -> int
Returns the width of the specified column.
"""
def IsColShown(self, col):
"""
IsColShown(col) -> bool
Returns true if the specified column is not currently hidden.
"""
def GetDefaultCellOverflow(self):
"""
GetDefaultCellOverflow() -> bool
Returns true if the cells can overflow by default.
"""
def GetDefaultColLabelSize(self):
"""
GetDefaultColLabelSize() -> int
Returns the default height for column labels.
"""
def GetDefaultColSize(self):
"""
GetDefaultColSize() -> int
Returns the current default width for grid columns.
"""
def GetDefaultRowLabelSize(self):
"""
GetDefaultRowLabelSize() -> int
Returns the default width for the row labels.
"""
def GetDefaultRowSize(self):
"""
GetDefaultRowSize() -> int
Returns the current default height for grid rows.
"""
def GetRowMinimalAcceptableHeight(self):
"""
GetRowMinimalAcceptableHeight() -> int
Returns the minimal size to which rows can be resized.
"""
def GetRowLabelSize(self):
"""
GetRowLabelSize() -> int
Returns the current width of the row labels.
"""
def GetRowSize(self, row):
"""
GetRowSize(row) -> int
Returns the height of the specified row.
"""
def IsRowShown(self, row):
"""
IsRowShown(row) -> bool
Returns true if the specified row is not currently hidden.
"""
def SetCellOverflow(self, row, col, allow):
"""
SetCellOverflow(row, col, allow)
Sets the overflow permission of the cell.
"""
def SetColLabelSize(self, height):
"""
SetColLabelSize(height)
Sets the height of the column labels.
"""
def SetColMinimalAcceptableWidth(self, width):
"""
SetColMinimalAcceptableWidth(width)
Sets the minimal width to which the user can resize columns.
"""
def SetColMinimalWidth(self, col, width):
"""
SetColMinimalWidth(col, width)
Sets the minimal width for the specified column col.
"""
def SetColSize(self, col, width):
"""
SetColSize(col, width)
Sets the width of the specified column.
"""
def HideCol(self, col):
"""
HideCol(col)
Hides the specified column.
"""
def ShowCol(self, col):
"""
ShowCol(col)
Shows the previously hidden column by resizing it to non-0 size.
"""
def SetDefaultCellOverflow(self, allow):
"""
SetDefaultCellOverflow(allow)
Sets the default overflow permission of the cells.
"""
def SetDefaultColSize(self, width, resizeExistingCols=False):
"""
SetDefaultColSize(width, resizeExistingCols=False)
Sets the default width for columns in the grid.
"""
def SetDefaultRowSize(self, height, resizeExistingRows=False):
"""
SetDefaultRowSize(height, resizeExistingRows=False)
Sets the default height for rows in the grid.
"""
def SetRowLabelSize(self, width):
"""
SetRowLabelSize(width)
Sets the width of the row labels.
"""
def SetRowMinimalAcceptableHeight(self, height):
"""
SetRowMinimalAcceptableHeight(height)
Sets the minimal row height used by default.
"""
def SetRowMinimalHeight(self, row, height):
"""
SetRowMinimalHeight(row, height)
Sets the minimal height for the specified row.
"""
def SetRowSize(self, row, height):
"""
SetRowSize(row, height)
Sets the height of the specified row.
"""
def HideRow(self, col):
"""
HideRow(col)
Hides the specified row.
"""
def ShowRow(self, col):
"""
ShowRow(col)
Shows the previously hidden row.
"""
def GetColSizes(self):
"""
GetColSizes() -> GridSizesInfo
Get size information for all columns at once.
"""
def GetRowSizes(self):
"""
GetRowSizes() -> GridSizesInfo
Get size information for all row at once.
"""
def SetColSizes(self, sizeInfo):
"""
SetColSizes(sizeInfo)
Restore all columns sizes.
"""
def SetRowSizes(self, sizeInfo):
"""
SetRowSizes(sizeInfo)
Restore all rows sizes.
"""
def SetCellSize(self, row, col, num_rows, num_cols):
"""
SetCellSize(row, col, num_rows, num_cols)
Set the size of the cell.
"""
def GetCellSize(self, *args, **kw):
"""
GetCellSize(row, col, num_rows, num_cols) -> CellSpan
GetCellSize(coords) -> wx.Size
Get the size of the cell in number of cells covered by it.
"""
def CanDragCell(self):
"""
CanDragCell() -> bool
Return true if the dragging of cells is enabled or false otherwise.
"""
def CanDragColMove(self):
"""
CanDragColMove() -> bool
Returns true if columns can be moved by dragging with the mouse.
"""
def CanDragColSize(self, col):
"""
CanDragColSize(col) -> bool
Returns true if the given column can be resized by dragging with the
mouse.
"""
def CanDragGridSize(self):
"""
CanDragGridSize() -> bool
Return true if the dragging of grid lines to resize rows and columns
is enabled or false otherwise.
"""
def CanDragRowSize(self, row):
"""
CanDragRowSize(row) -> bool
Returns true if the given row can be resized by dragging with the
mouse.
"""
def DisableColResize(self, col):
"""
DisableColResize(col)
Disable interactive resizing of the specified column.
"""
def DisableRowResize(self, row):
"""
DisableRowResize(row)
Disable interactive resizing of the specified row.
"""
def DisableDragColMove(self):
"""
DisableDragColMove()
Disables column moving by dragging with the mouse.
"""
def DisableDragColSize(self):
"""
DisableDragColSize()
Disables column sizing by dragging with the mouse.
"""
def DisableDragGridSize(self):
"""
DisableDragGridSize()
Disable mouse dragging of grid lines to resize rows and columns.
"""
def DisableDragRowSize(self):
"""
DisableDragRowSize()
Disables row sizing by dragging with the mouse.
"""
def EnableDragCell(self, enable=True):
"""
EnableDragCell(enable=True)
Enables or disables cell dragging with the mouse.
"""
def EnableDragColMove(self, enable=True):
"""
EnableDragColMove(enable=True)
Enables or disables column moving by dragging with the mouse.
"""
def EnableDragColSize(self, enable=True):
"""
EnableDragColSize(enable=True)
Enables or disables column sizing by dragging with the mouse.
"""
def EnableDragGridSize(self, enable=True):
"""
EnableDragGridSize(enable=True)
Enables or disables row and column resizing by dragging gridlines with
the mouse.
"""
def EnableDragRowSize(self, enable=True):
"""
EnableDragRowSize(enable=True)
Enables or disables row sizing by dragging with the mouse.
"""
def GetColAt(self, colPos):
"""
GetColAt(colPos) -> int
Returns the column ID of the specified column position.
"""
def GetColPos(self, colID):
"""
GetColPos(colID) -> int
Returns the position of the specified column.
"""
def SetColPos(self, colID, newPos):
"""
SetColPos(colID, newPos)
Sets the position of the specified column.
"""
def SetColumnsOrder(self, order):
"""
SetColumnsOrder(order)
Sets the positions of all columns at once.
"""
def ResetColPos(self):
"""
ResetColPos()
Resets the position of the columns to the default.
"""
def GetGridCursorCol(self):
"""
GetGridCursorCol() -> int
Returns the current grid cell column position.
"""
def GetGridCursorRow(self):
"""
GetGridCursorRow() -> int
Returns the current grid cell row position.
"""
def GoToCell(self, *args, **kw):
"""
GoToCell(row, col)
GoToCell(coords)
Make the given cell current and ensure it is visible.
"""
def MoveCursorDown(self, expandSelection):
"""
MoveCursorDown(expandSelection) -> bool
Moves the grid cursor down by one row.
"""
def MoveCursorDownBlock(self, expandSelection):
"""
MoveCursorDownBlock(expandSelection) -> bool
Moves the grid cursor down in the current column such that it skips to
the beginning or end of a block of non-empty cells.
"""
def MoveCursorLeft(self, expandSelection):
"""
MoveCursorLeft(expandSelection) -> bool
Moves the grid cursor left by one column.
"""
def MoveCursorLeftBlock(self, expandSelection):
"""
MoveCursorLeftBlock(expandSelection) -> bool
Moves the grid cursor left in the current row such that it skips to
the beginning or end of a block of non-empty cells.
"""
def MoveCursorRight(self, expandSelection):
"""
MoveCursorRight(expandSelection) -> bool
Moves the grid cursor right by one column.
"""
def MoveCursorRightBlock(self, expandSelection):
"""
MoveCursorRightBlock(expandSelection) -> bool
Moves the grid cursor right in the current row such that it skips to
the beginning or end of a block of non-empty cells.
"""
def MoveCursorUp(self, expandSelection):
"""
MoveCursorUp(expandSelection) -> bool
Moves the grid cursor up by one row.
"""
def MoveCursorUpBlock(self, expandSelection):
"""
MoveCursorUpBlock(expandSelection) -> bool
Moves the grid cursor up in the current column such that it skips to
the beginning or end of a block of non-empty cells.
"""
def MovePageDown(self):
"""
MovePageDown() -> bool
Moves the grid cursor down by some number of rows so that the previous
bottom visible row becomes the top visible row.
"""
def MovePageUp(self):
"""
MovePageUp() -> bool
Moves the grid cursor up by some number of rows so that the previous
top visible row becomes the bottom visible row.
"""
def SetGridCursor(self, *args, **kw):
"""
SetGridCursor(row, col)
SetGridCursor(coords)
Set the grid cursor to the specified cell.
"""
def SetTabBehaviour(self, behaviour):
"""
SetTabBehaviour(behaviour)
Set the grid's behaviour when the user presses the TAB key.
"""
def ClearSelection(self):
"""
ClearSelection()
Deselects all cells that are currently selected.
"""
def DeselectRow(self, row):
"""
DeselectRow(row)
Deselects a row of cells.
"""
def DeselectCol(self, col):
"""
DeselectCol(col)
Deselects a column of cells.
"""
def DeselectCell(self, row, col):
"""
DeselectCell(row, col)
Deselects a cell.
"""
def GetSelectedCells(self):
"""
GetSelectedCells() -> GridCellCoordsArray
Returns an array of individually selected cells.
"""
def GetSelectedCols(self):
"""
GetSelectedCols() -> ArrayInt
Returns an array of selected columns.
"""
def GetSelectedRows(self):
"""
GetSelectedRows() -> ArrayInt
Returns an array of selected rows.
"""
def GetSelectionBackground(self):
"""
GetSelectionBackground() -> wx.Colour
Returns the colour used for drawing the selection background.
"""
def GetSelectionBlockBottomRight(self):
"""
GetSelectionBlockBottomRight() -> GridCellCoordsArray
Returns an array of the bottom right corners of blocks of selected
cells.
"""
def GetSelectionBlockTopLeft(self):
"""
GetSelectionBlockTopLeft() -> GridCellCoordsArray
Returns an array of the top left corners of blocks of selected cells.
"""
def GetSelectionForeground(self):
"""
GetSelectionForeground() -> wx.Colour
Returns the colour used for drawing the selection foreground.
"""
def GetSelectionMode(self):
"""
GetSelectionMode() -> GridSelectionModes
Returns the current selection mode.
"""
def IsInSelection(self, *args, **kw):
"""
IsInSelection(row, col) -> bool
IsInSelection(coords) -> bool
Returns true if the given cell is selected.
"""
def IsSelection(self):
"""
IsSelection() -> bool
Returns true if there are currently any selected cells, rows, columns
or blocks.
"""
def SelectAll(self):
"""
SelectAll()
Selects all cells in the grid.
"""
def SelectBlock(self, *args, **kw):
"""
SelectBlock(topRow, leftCol, bottomRow, rightCol, addToSelected=False)
SelectBlock(topLeft, bottomRight, addToSelected=False)
Selects a rectangular block of cells.
"""
def SelectCol(self, col, addToSelected=False):
"""
SelectCol(col, addToSelected=False)
Selects the specified column.
"""
def SelectRow(self, row, addToSelected=False):
"""
SelectRow(row, addToSelected=False)
Selects the specified row.
"""
def SetSelectionBackground(self, c):
"""
SetSelectionBackground(c)
Set the colour to be used for drawing the selection background.
"""
def SetSelectionForeground(self, c):
"""
SetSelectionForeground(c)
Set the colour to be used for drawing the selection foreground.
"""
def SetSelectionMode(self, selmode):
"""
SetSelectionMode(selmode)
Set the selection behaviour of the grid.
"""
def GetScrollLineX(self):
"""
GetScrollLineX() -> int
Returns the number of pixels per horizontal scroll increment.
"""
def GetScrollLineY(self):
"""
GetScrollLineY() -> int
Returns the number of pixels per vertical scroll increment.
"""
def IsVisible(self, *args, **kw):
"""
IsVisible(row, col, wholeCellVisible=True) -> bool
IsVisible(coords, wholeCellVisible=True) -> bool
Returns true if a cell is either entirely or at least partially
visible in the grid window.
"""
def MakeCellVisible(self, *args, **kw):
"""
MakeCellVisible(row, col)
MakeCellVisible(coords)
Brings the specified cell into the visible grid cell area with minimal
scrolling.
"""
def SetScrollLineX(self, x):
"""
SetScrollLineX(x)
Sets the number of pixels per horizontal scroll increment.
"""
def SetScrollLineY(self, y):
"""
SetScrollLineY(y)
Sets the number of pixels per vertical scroll increment.
"""
def BlockToDeviceRect(self, topLeft, bottomRight):
"""
BlockToDeviceRect(topLeft, bottomRight) -> wx.Rect
Convert grid cell coordinates to grid window pixel coordinates.
"""
def CellToRect(self, *args, **kw):
"""
CellToRect(row, col) -> wx.Rect
CellToRect(coords) -> wx.Rect
Return the rectangle corresponding to the grid cell's size and
position in logical coordinates.
"""
def XToCol(self, x, clipToMinMax=False):
"""
XToCol(x, clipToMinMax=False) -> int
Returns the column at the given pixel position.
"""
def XToEdgeOfCol(self, x):
"""
XToEdgeOfCol(x) -> int
Returns the column whose right hand edge is close to the given logical
x position.
"""
def XYToCell(self, *args, **kw):
"""
XYToCell(x, y) -> GridCellCoords
XYToCell(pos) -> GridCellCoords
Translates logical pixel coordinates to the grid cell coordinates.
"""
def YToEdgeOfRow(self, y):
"""
YToEdgeOfRow(y) -> int
Returns the row whose bottom edge is close to the given logical y
position.
"""
def YToRow(self, y, clipToMinMax=False):
"""
YToRow(y, clipToMinMax=False) -> int
Returns the grid row that corresponds to the logical y coordinate.
"""
def AppendCols(self, numCols=1, updateLabels=True):
"""
AppendCols(numCols=1, updateLabels=True) -> bool
Appends one or more new columns to the right of the grid.
"""
def AppendRows(self, numRows=1, updateLabels=True):
"""
AppendRows(numRows=1, updateLabels=True) -> bool
Appends one or more new rows to the bottom of the grid.
"""
def AreHorzGridLinesClipped(self):
"""
AreHorzGridLinesClipped() -> bool
Return true if the horizontal grid lines stop at the last column
boundary or false if they continue to the end of the window.
"""
def AreVertGridLinesClipped(self):
"""
AreVertGridLinesClipped() -> bool
Return true if the vertical grid lines stop at the last row boundary
or false if they continue to the end of the window.
"""
def BeginBatch(self):
"""
BeginBatch()
Increments the grid's batch count.
"""
def ClearGrid(self):
"""
ClearGrid()
Clears all data in the underlying grid table and repaints the grid.
"""
def ClipHorzGridLines(self, clip):
"""
ClipHorzGridLines(clip)
Change whether the horizontal grid lines are clipped by the end of the
last column.
"""
def ClipVertGridLines(self, clip):
"""
ClipVertGridLines(clip)
Change whether the vertical grid lines are clipped by the end of the
last row.
"""
def DeleteCols(self, pos=0, numCols=1, updateLabels=True):
"""
DeleteCols(pos=0, numCols=1, updateLabels=True) -> bool
Deletes one or more columns from a grid starting at the specified
position.
"""
def DeleteRows(self, pos=0, numRows=1, updateLabels=True):
"""
DeleteRows(pos=0, numRows=1, updateLabels=True) -> bool
Deletes one or more rows from a grid starting at the specified
position.
"""
def EndBatch(self):
"""
EndBatch()
Decrements the grid's batch count.
"""
def Fit(self):
"""
Fit()
Overridden wxWindow method.
"""
def ForceRefresh(self):
"""
ForceRefresh()
Causes immediate repainting of the grid.
"""
def GetBatchCount(self):
"""
GetBatchCount() -> int
Returns the number of times that BeginBatch() has been called without
(yet) matching calls to EndBatch().
"""
def GetNumberCols(self):
"""
GetNumberCols() -> int
Returns the total number of grid columns.
"""
def GetNumberRows(self):
"""
GetNumberRows() -> int
Returns the total number of grid rows.
"""
def GetOrCreateCellAttr(self, row, col):
"""
GetOrCreateCellAttr(row, col) -> GridCellAttr
Returns the attribute for the given cell creating one if necessary.
"""
def GetTable(self):
"""
GetTable() -> GridTableBase
Returns a base pointer to the current table object.
"""
def InsertCols(self, pos=0, numCols=1, updateLabels=True):
"""
InsertCols(pos=0, numCols=1, updateLabels=True) -> bool
Inserts one or more new columns into a grid with the first new column
at the specified position.
"""
def InsertRows(self, pos=0, numRows=1, updateLabels=True):
"""
InsertRows(pos=0, numRows=1, updateLabels=True) -> bool
Inserts one or more new rows into a grid with the first new row at the
specified position.
"""
def RefreshAttr(self, row, col):
"""
RefreshAttr(row, col)
Invalidates the cached attribute for the given cell.
"""
def Render(self, dc, pos=wx.DefaultPosition, size=wx.DefaultSize, topLeft=GridCellCoords(-1,-1), bottomRight=GridCellCoords(-1,-1), style=GRID_DRAW_DEFAULT):
"""
Render(dc, pos=wx.DefaultPosition, size=wx.DefaultSize, topLeft=GridCellCoords(-1,-1), bottomRight=GridCellCoords(-1,-1), style=GRID_DRAW_DEFAULT)
Draws part or all of a wxGrid on a wxDC for printing or display.
"""
def SetAttr(self, row, col, attr):
"""
SetAttr(row, col, attr)
Sets the cell attributes for the specified cell.
"""
def SetColAttr(self, col, attr):
"""
SetColAttr(col, attr)
Sets the cell attributes for all cells in the specified column.
"""
def SetMargins(self, extraWidth, extraHeight):
"""
SetMargins(extraWidth, extraHeight)
Sets the extra margins used around the grid area.
"""
def SetRowAttr(self, row, attr):
"""
SetRowAttr(row, attr)
Sets the cell attributes for all cells in the specified row.
"""
def CalcRowLabelsExposed(self, reg):
"""
CalcRowLabelsExposed(reg) -> ArrayInt
Appends one or more new columns to the right of the grid.
"""
def CalcColLabelsExposed(self, reg):
"""
CalcColLabelsExposed(reg) -> ArrayInt
Appends one or more new columns to the right of the grid.
"""
def CalcCellsExposed(self, reg):
"""
CalcCellsExposed(reg) -> GridCellCoordsArray
Appends one or more new columns to the right of the grid.
"""
def GetSortingColumn(self):
"""
GetSortingColumn() -> int
Return the column in which the sorting indicator is currently
displayed.
"""
def IsSortingBy(self, col):
"""
IsSortingBy(col) -> bool
Return true if this column is currently used for sorting.
"""
def IsSortOrderAscending(self):
"""
IsSortOrderAscending() -> bool
Return true if the current sorting order is ascending or false if it
is descending.
"""
def SetSortingColumn(self, col, ascending=True):
"""
SetSortingColumn(col, ascending=True)
Set the column to display the sorting indicator in and its direction.
"""
def UnsetSortingColumn(self):
"""
UnsetSortingColumn()
Remove any currently shown sorting indicator.
"""
def GetGridWindow(self):
"""
GetGridWindow() -> wx.Window
Return the main grid window containing the grid cells.
"""
def GetGridRowLabelWindow(self):
"""
GetGridRowLabelWindow() -> wx.Window
Return the row labels window.
"""
def GetGridColLabelWindow(self):
"""
GetGridColLabelWindow() -> wx.Window
Return the column labels window.
"""
def GetGridCornerLabelWindow(self):
"""
GetGridCornerLabelWindow() -> wx.Window
Return the window in the top left grid corner.
"""
def GetGridColHeader(self):
"""
GetGridColHeader() -> wx.HeaderCtrl
Return the header control used for column labels display.
"""
def DrawCellHighlight(self, dc, attr):
"""
DrawCellHighlight(dc, attr)
"""
def DrawRowLabels(self, dc, rows):
"""
DrawRowLabels(dc, rows)
"""
def DrawRowLabel(self, dc, row):
"""
DrawRowLabel(dc, row)
"""
def DrawColLabels(self, dc, cols):
"""
DrawColLabels(dc, cols)
"""
def DrawColLabel(self, dc, col):
"""
DrawColLabel(dc, col)
"""
def DrawCornerLabel(self, dc):
"""
DrawCornerLabel(dc)
"""
def DrawTextRectangle(self, *args, **kw):
"""
DrawTextRectangle(dc, text, rect, horizontalAlignment=wx.ALIGN_LEFT, verticalAlignment=wx.ALIGN_TOP, textOrientation=wx.HORIZONTAL)
DrawTextRectangle(dc, lines, rect, horizontalAlignment=wx.ALIGN_LEFT, verticalAlignment=wx.ALIGN_TOP, textOrientation=wx.HORIZONTAL)
"""
def GetCellHighlightColour(self):
"""
GetCellHighlightColour() -> wx.Colour
"""
def GetCellHighlightPenWidth(self):
"""
GetCellHighlightPenWidth() -> int
"""
def GetCellHighlightROPenWidth(self):
"""
GetCellHighlightROPenWidth() -> int
"""
def SetCellHighlightColour(self):
"""
SetCellHighlightColour()
"""
def SetCellHighlightPenWidth(self, width):
"""
SetCellHighlightPenWidth(width)
"""
def SetCellHighlightROPenWidth(self, width):
"""
SetCellHighlightROPenWidth(width)
"""
def SetTable(self, table, takeOwnership=False, selmode=SelectCells):
"""
Set the Grid Table to be used by this grid.
"""
wxGridSelectCells = SelectCells
wxGridSelectRows = SelectRows
wxGridSelectColumns = SelectColumns
wxGridSelectRowsOrColumns = SelectRowsOrColumns
BatchCount = property(None, None)
CellHighlightColour = property(None, None)
CellHighlightPenWidth = property(None, None)
CellHighlightROPenWidth = property(None, None)
ColLabelSize = property(None, None)
ColLabelTextOrientation = property(None, None)
ColMinimalAcceptableWidth = property(None, None)
ColSizes = property(None, None)
DefaultCellBackgroundColour = property(None, None)
DefaultCellFont = property(None, None)
DefaultCellOverflow = property(None, None)
DefaultCellTextColour = property(None, None)
DefaultColLabelSize = property(None, None)
DefaultColSize = property(None, None)
DefaultEditor = property(None, None)
DefaultGridLinePen = property(None, None)
DefaultRenderer = property(None, None)
DefaultRowLabelSize = property(None, None)
DefaultRowSize = property(None, None)
GridColHeader = property(None, None)
GridColLabelWindow = property(None, None)
GridCornerLabelWindow = property(None, None)
GridCursorCol = property(None, None)
GridCursorRow = property(None, None)
GridLineColour = property(None, None)
GridRowLabelWindow = property(None, None)
GridWindow = property(None, None)
LabelBackgroundColour = property(None, None)
LabelFont = property(None, None)
LabelTextColour = property(None, None)
NumberCols = property(None, None)
NumberRows = property(None, None)
RowLabelSize = property(None, None)
RowMinimalAcceptableHeight = property(None, None)
RowSizes = property(None, None)
ScrollLineX = property(None, None)
ScrollLineY = property(None, None)
SelectedCells = property(None, None)
SelectedCols = property(None, None)
SelectedRows = property(None, None)
SelectionBackground = property(None, None)
SelectionBlockBottomRight = property(None, None)
SelectionBlockTopLeft = property(None, None)
SelectionForeground = property(None, None)
SelectionMode = property(None, None)
SortingColumn = property(None, None)
Table = property(None, None)
def CanHaveAttributes(self):
"""
CanHaveAttributes() -> bool
Returns true if this grid has support for cell attributes.
"""
def GetColMinimalWidth(self, col):
"""
GetColMinimalWidth(col) -> int
Get the minimal width of the given column/row.
"""
def GetColRight(self, col):
"""
GetColRight(col) -> int
Returns the coordinate of the right border specified column.
"""
def GetColLeft(self, col):
"""
GetColLeft(col) -> int
Returns the coordinate of the left border specified column.
"""
def GetRowMinimalHeight(self, col):
"""
GetRowMinimalHeight(col) -> int
Returns the minimal size for the given column.
"""
# end of class Grid
class GridUpdateLocker(object):
"""
GridUpdateLocker(grid=None)
This small class can be used to prevent wxGrid from redrawing during
its lifetime by calling wxGrid::BeginBatch() in its constructor and
wxGrid::EndBatch() in its destructor.
"""
def __init__(self, grid=None):
"""
GridUpdateLocker(grid=None)
This small class can be used to prevent wxGrid from redrawing during
its lifetime by calling wxGrid::BeginBatch() in its constructor and
wxGrid::EndBatch() in its destructor.
"""
def Create(self, grid):
"""
Create(grid)
This method can be called if the object had been constructed using the
default constructor.
"""
def __enter__(self):
"""
"""
def __exit__(self, exc_type, exc_val, exc_tb):
"""
"""
# end of class GridUpdateLocker
class GridEvent(wx.NotifyEvent):
"""
GridEvent()
GridEvent(id, type, obj, row=-1, col=-1, x=-1, y=-1, sel=True, kbd=wx.KeyboardState())
This event class contains information about various grid events.
"""
def __init__(self, *args, **kw):
"""
GridEvent()
GridEvent(id, type, obj, row=-1, col=-1, x=-1, y=-1, sel=True, kbd=wx.KeyboardState())
This event class contains information about various grid events.
"""
def AltDown(self):
"""
AltDown() -> bool
Returns true if the Alt key was down at the time of the event.
"""
def ControlDown(self):
"""
ControlDown() -> bool
Returns true if the Control key was down at the time of the event.
"""
def GetCol(self):
"""
GetCol() -> int
Column at which the event occurred.
"""
def GetPosition(self):
"""
GetPosition() -> wx.Point
Position in pixels at which the event occurred.
"""
def GetRow(self):
"""
GetRow() -> int
Row at which the event occurred.
"""
def MetaDown(self):
"""
MetaDown() -> bool
Returns true if the Meta key was down at the time of the event.
"""
def Selecting(self):
"""
Selecting() -> bool
Returns true if the user is selecting grid cells, or false if
deselecting.
"""
def ShiftDown(self):
"""
ShiftDown() -> bool
Returns true if the Shift key was down at the time of the event.
"""
Col = property(None, None)
Position = property(None, None)
Row = property(None, None)
# end of class GridEvent
class GridSizeEvent(wx.NotifyEvent):
"""
GridSizeEvent()
GridSizeEvent(id, type, obj, rowOrCol=-1, x=-1, y=-1, kbd=wx.KeyboardState())
This event class contains information about a row/column resize event.
"""
def __init__(self, *args, **kw):
"""
GridSizeEvent()
GridSizeEvent(id, type, obj, rowOrCol=-1, x=-1, y=-1, kbd=wx.KeyboardState())
This event class contains information about a row/column resize event.
"""
def AltDown(self):
"""
AltDown() -> bool
Returns true if the Alt key was down at the time of the event.
"""
def ControlDown(self):
"""
ControlDown() -> bool
Returns true if the Control key was down at the time of the event.
"""
def GetPosition(self):
"""
GetPosition() -> wx.Point
Position in pixels at which the event occurred.
"""
def GetRowOrCol(self):
"""
GetRowOrCol() -> int
Row or column at that was resized.
"""
def MetaDown(self):
"""
MetaDown() -> bool
Returns true if the Meta key was down at the time of the event.
"""
def ShiftDown(self):
"""
ShiftDown() -> bool
Returns true if the Shift key was down at the time of the event.
"""
Position = property(None, None)
RowOrCol = property(None, None)
# end of class GridSizeEvent
class GridRangeSelectEvent(wx.NotifyEvent):
"""
GridRangeSelectEvent()
GridRangeSelectEvent(id, type, obj, topLeft, bottomRight, sel=True, kbd=wx.KeyboardState())
"""
def __init__(self, *args, **kw):
"""
GridRangeSelectEvent()
GridRangeSelectEvent(id, type, obj, topLeft, bottomRight, sel=True, kbd=wx.KeyboardState())
"""
def AltDown(self):
"""
AltDown() -> bool
Returns true if the Alt key was down at the time of the event.
"""
def ControlDown(self):
"""
ControlDown() -> bool
Returns true if the Control key was down at the time of the event.
"""
def GetBottomRightCoords(self):
"""
GetBottomRightCoords() -> GridCellCoords
Top left corner of the rectangular area that was (de)selected.
"""
def GetBottomRow(self):
"""
GetBottomRow() -> int
Bottom row of the rectangular area that was (de)selected.
"""
def GetLeftCol(self):
"""
GetLeftCol() -> int
Left column of the rectangular area that was (de)selected.
"""
def GetRightCol(self):
"""
GetRightCol() -> int
Right column of the rectangular area that was (de)selected.
"""
def GetTopLeftCoords(self):
"""
GetTopLeftCoords() -> GridCellCoords
Top left corner of the rectangular area that was (de)selected.
"""
def GetTopRow(self):
"""
GetTopRow() -> int
Top row of the rectangular area that was (de)selected.
"""
def MetaDown(self):
"""
MetaDown() -> bool
Returns true if the Meta key was down at the time of the event.
"""
def Selecting(self):
"""
Selecting() -> bool
Returns true if the area was selected, false otherwise.
"""
def ShiftDown(self):
"""
ShiftDown() -> bool
Returns true if the Shift key was down at the time of the event.
"""
BottomRightCoords = property(None, None)
BottomRow = property(None, None)
LeftCol = property(None, None)
RightCol = property(None, None)
TopLeftCoords = property(None, None)
TopRow = property(None, None)
# end of class GridRangeSelectEvent
class GridEditorCreatedEvent(wx.CommandEvent):
"""
GridEditorCreatedEvent()
GridEditorCreatedEvent(id, type, obj, row, col, ctrl)
"""
def __init__(self, *args, **kw):
"""
GridEditorCreatedEvent()
GridEditorCreatedEvent(id, type, obj, row, col, ctrl)
"""
def GetCol(self):
"""
GetCol() -> int
Returns the column at which the event occurred.
"""
def GetControl(self):
"""
GetControl() -> wx.Control
Returns the edit control.
"""
def GetRow(self):
"""
GetRow() -> int
Returns the row at which the event occurred.
"""
def SetCol(self, col):
"""
SetCol(col)
Sets the column at which the event occurred.
"""
def SetControl(self, ctrl):
"""
SetControl(ctrl)
Sets the edit control.
"""
def SetRow(self, row):
"""
SetRow(row)
Sets the row at which the event occurred.
"""
Col = property(None, None)
Control = property(None, None)
Row = property(None, None)
# end of class GridEditorCreatedEvent
GRID_VALUE_STRING = "string"
GRID_VALUE_BOOL = "bool"
GRID_VALUE_NUMBER = "long"
GRID_VALUE_FLOAT = "double"
GRID_VALUE_CHOICE = "choice"
GRID_VALUE_TEXT = "string"
GRID_VALUE_LONG = "long"
GRID_VALUE_CHOICEINT = "choiceint"
GRID_VALUE_DATETIME = "datetime"
from collections import namedtuple
_im_GridCellCoords = namedtuple('_im_GridCellCoords', ['Row', 'Col'])
del namedtuple
PyGridCellRenderer = wx.deprecated(GridCellRenderer, 'Use GridCellRenderer instead.')
PyGridCellEditor = wx.deprecated(GridCellEditor, 'Use GridCellEditor instead.')
PyGridCellAttrProvider = wx.deprecated(GridCellAttrProvider, 'Use GridCellAttrProvider instead.')
PyGridTableBase = wx.deprecated(GridTableBase, 'Use GridTableBase instead.')
EVT_GRID_CELL_LEFT_CLICK = wx.PyEventBinder( wxEVT_GRID_CELL_LEFT_CLICK )
EVT_GRID_CELL_RIGHT_CLICK = wx.PyEventBinder( wxEVT_GRID_CELL_RIGHT_CLICK )
EVT_GRID_CELL_LEFT_DCLICK = wx.PyEventBinder( wxEVT_GRID_CELL_LEFT_DCLICK )
EVT_GRID_CELL_RIGHT_DCLICK = wx.PyEventBinder( wxEVT_GRID_CELL_RIGHT_DCLICK )
EVT_GRID_LABEL_LEFT_CLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_LEFT_CLICK )
EVT_GRID_LABEL_RIGHT_CLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_RIGHT_CLICK )
EVT_GRID_LABEL_LEFT_DCLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_LEFT_DCLICK )
EVT_GRID_LABEL_RIGHT_DCLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_RIGHT_DCLICK )
EVT_GRID_ROW_SIZE = wx.PyEventBinder( wxEVT_GRID_ROW_SIZE )
EVT_GRID_COL_SIZE = wx.PyEventBinder( wxEVT_GRID_COL_SIZE )
EVT_GRID_RANGE_SELECT = wx.PyEventBinder( wxEVT_GRID_RANGE_SELECT )
EVT_GRID_CELL_CHANGING = wx.PyEventBinder( wxEVT_GRID_CELL_CHANGING )
EVT_GRID_CELL_CHANGED = wx.PyEventBinder( wxEVT_GRID_CELL_CHANGED )
EVT_GRID_SELECT_CELL = wx.PyEventBinder( wxEVT_GRID_SELECT_CELL )
EVT_GRID_EDITOR_SHOWN = wx.PyEventBinder( wxEVT_GRID_EDITOR_SHOWN )
EVT_GRID_EDITOR_HIDDEN = wx.PyEventBinder( wxEVT_GRID_EDITOR_HIDDEN )
EVT_GRID_EDITOR_CREATED = wx.PyEventBinder( wxEVT_GRID_EDITOR_CREATED )
EVT_GRID_CELL_BEGIN_DRAG = wx.PyEventBinder( wxEVT_GRID_CELL_BEGIN_DRAG )
EVT_GRID_COL_MOVE = wx.PyEventBinder( wxEVT_GRID_COL_MOVE )
EVT_GRID_COL_SORT = wx.PyEventBinder( wxEVT_GRID_COL_SORT )
EVT_GRID_TABBING = wx.PyEventBinder( wxEVT_GRID_TABBING )
# The same as above but with the ability to specify an identifier
EVT_GRID_CMD_CELL_LEFT_CLICK = wx.PyEventBinder( wxEVT_GRID_CELL_LEFT_CLICK, 1 )
EVT_GRID_CMD_CELL_RIGHT_CLICK = wx.PyEventBinder( wxEVT_GRID_CELL_RIGHT_CLICK, 1 )
EVT_GRID_CMD_CELL_LEFT_DCLICK = wx.PyEventBinder( wxEVT_GRID_CELL_LEFT_DCLICK, 1 )
EVT_GRID_CMD_CELL_RIGHT_DCLICK = wx.PyEventBinder( wxEVT_GRID_CELL_RIGHT_DCLICK, 1 )
EVT_GRID_CMD_LABEL_LEFT_CLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_LEFT_CLICK, 1 )
EVT_GRID_CMD_LABEL_RIGHT_CLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_RIGHT_CLICK, 1 )
EVT_GRID_CMD_LABEL_LEFT_DCLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_LEFT_DCLICK, 1 )
EVT_GRID_CMD_LABEL_RIGHT_DCLICK = wx.PyEventBinder( wxEVT_GRID_LABEL_RIGHT_DCLICK, 1 )
EVT_GRID_CMD_ROW_SIZE = wx.PyEventBinder( wxEVT_GRID_ROW_SIZE, 1 )
EVT_GRID_CMD_COL_SIZE = wx.PyEventBinder( wxEVT_GRID_COL_SIZE, 1 )
EVT_GRID_CMD_RANGE_SELECT = wx.PyEventBinder( wxEVT_GRID_RANGE_SELECT, 1 )
EVT_GRID_CMD_CELL_CHANGING = wx.PyEventBinder( wxEVT_GRID_CELL_CHANGING, 1 )
EVT_GRID_CMD_CELL_CHANGED = wx.PyEventBinder( wxEVT_GRID_CELL_CHANGED, 1 )
EVT_GRID_CMD_SELECT_CELL = wx.PyEventBinder( wxEVT_GRID_SELECT_CELL, 1 )
EVT_GRID_CMD_EDITOR_SHOWN = wx.PyEventBinder( wxEVT_GRID_EDITOR_SHOWN, 1 )
EVT_GRID_CMD_EDITOR_HIDDEN = wx.PyEventBinder( wxEVT_GRID_EDITOR_HIDDEN, 1 )
EVT_GRID_CMD_EDITOR_CREATED = wx.PyEventBinder( wxEVT_GRID_EDITOR_CREATED, 1 )
EVT_GRID_CMD_CELL_BEGIN_DRAG = wx.PyEventBinder( wxEVT_GRID_CELL_BEGIN_DRAG, 1 )
EVT_GRID_CMD_COL_MOVE = wx.PyEventBinder( wxEVT_GRID_COL_MOVE, 1 )
EVT_GRID_CMD_COL_SORT = wx.PyEventBinder( wxEVT_GRID_COL_SORT, 1 )
EVT_GRID_CMD_TABBING = wx.PyEventBinder( wxEVT_GRID_TABBING, 1 )
#-- end-grid --#
| [
"salvadoraleguas@gmail.com"
] | salvadoraleguas@gmail.com |
b859d46b4f18efbfc0b5398705cd8d08f1294d73 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03738/s549313659.py | 12a3cd23e0f77bb5e3c9bf8518cad064d24634fd | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | a = int(input())
b = int(input())
ret = ''
if( a > b):
ret = 'GREATER'
elif( a < b):
ret = 'LESS'
else:
ret = 'EQUAL'
print(ret) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
728ca7506be2deabb34816b5fe10cfbb388e53ee | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_thoughts.py | ce645c20272a682102eb7f3134c14f62de160bfa | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
#calss header
class _THOUGHTS():
def __init__(self,):
self.name = "THOUGHTS"
self.definitions = thought
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['thought']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
f344e97b63a1313d7160350354d22691bb915352 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03407/s365812995.py | 9924c2f8b0a742afb50aef0a55fd4b35d69a8361 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90 | py | A,B,C = map(int, input().split())
if C>(A+B):
ans = 'No'
else:
ans = 'Yes'
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
2c716b8c2b1ae34ef0a61b9c3da1bd67c44ace48 | de27e6d143f40d5948244597b861d522a9a272f6 | /fjord/heartbeat/migrations/0002_auto_20150213_0947.py | a4f74d469a7dc0c38c5f0fcaed78e8aba1a95b7d | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mozilla/fjord | 7f31af6dd80869ca856f8a02ff10e72c81685368 | 0fcb81e6a5edaf42c00c64faf001fc43b24e11c0 | refs/heads/master | 2023-07-03T18:20:01.651759 | 2017-01-10T20:12:33 | 2017-01-10T20:12:33 | 5,197,539 | 18 | 22 | null | 2016-08-22T14:56:11 | 2012-07-26T21:25:00 | Python | UTF-8 | Python | false | false | 784 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('heartbeat', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='survey',
name='description',
field=models.TextField(default=b'', help_text='Informal description of the survey so we can tell them apart', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='survey',
name='name',
field=models.CharField(help_text='Unique name for the survey. e.g. heartbeat-question-1', unique=True, max_length=100),
preserve_default=True,
),
]
| [
"willkg@mozilla.com"
] | willkg@mozilla.com |
08ca57f7bfb8b2b56205a27f3306087d7ff3dfbb | 63bf6161532eefa72aa3be8b01cde601b08507dc | /python-mapping-example/fhir_model_generator/model/namingsystem.py | e7edf545c950a359c609ec28b6f86a71fd632773 | [
"Apache-2.0"
] | permissive | Healthedata1/mFHIR | 4ef370b87e03e973918e5683977d32fe262655bc | 1b4ea441cfa08b661416a3badedf7e90f2809163 | refs/heads/master | 2022-12-10T21:07:03.948406 | 2021-06-18T01:58:23 | 2021-06-18T01:58:23 | 129,964,251 | 9 | 5 | null | 2022-12-09T05:23:54 | 2018-04-17T20:57:15 | HTML | UTF-8 | Python | false | false | 1,832 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.1-9346c8cc45 (http://hl7.org/fhir/StructureDefinition/NamingSystem) on 2020-02-10.
# 2020, SMART Health IT.
import sys
from dataclasses import dataclass, field
from typing import ClassVar, Optional, List
from .backboneelement import BackboneElement
from .codeableconcept import CodeableConcept
from .contactdetail import ContactDetail
from .domainresource import DomainResource
from .fhirdate import FHIRDate
from .period import Period
from .usagecontext import UsageContext
@dataclass
class NamingSystemUniqueId(BackboneElement):
""" Unique identifiers used for system.
Indicates how the system may be identified when referenced in electronic
exchange.
"""
resource_type: ClassVar[str] = "NamingSystemUniqueId"
type: str = None
value: str = None
preferred: Optional[bool] = None
comment: Optional[str] = None
period: Optional[Period] = None
@dataclass
class NamingSystem(DomainResource):
""" System of unique identification.
A curated namespace that issues unique symbols within that namespace for
the identification of concepts, people, devices, etc. Represents a
"System" used within the Identifier and Coding data types.
"""
resource_type: ClassVar[str] = "NamingSystem"
name: str = None
status: str = None
kind: str = None
date: FHIRDate = None
publisher: Optional[str] = None
contact: Optional[List[ContactDetail]] = None
responsible: Optional[str] = None
type: Optional[CodeableConcept] = None
description: Optional[str] = None
useContext: Optional[List[UsageContext]] = None
jurisdiction: Optional[List[CodeableConcept]] = None
usage: Optional[str] = None
uniqueId: List[NamingSystemUniqueId] = field(default_factory=list) | [
"ehaas@healthedatainc.com"
] | ehaas@healthedatainc.com |
fe6c635055b56eee3c9d2f141ce391f88f17af89 | 87119ec9cea61be175f2a1f16f0e37d060cde9af | /django/myproject/guestbook/views.py | 10b4c58f64dd2648adb10abbea42488375fd7b7c | [] | no_license | atkins126/sample_nullpobug | bce9c1bf2a31921ac665a18dc2a62be3bdef493e | b2ba65f42f717f0ceb2cf14fe28e90c460bfde87 | refs/heads/master | 2023-02-16T11:37:05.290069 | 2021-01-18T14:43:40 | 2021-01-18T14:43:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | # coding: utf-8
from django.views.generic import CreateView
from django.core.urlresolvers import reverse
from guestbook.models import Greeting
from guestbook.forms import GreetingForm
class IndexView(CreateView):
u"""
テンプレートによるフォーム表示と
送信されたフォーム内容をモデルに保存する
クラスベース汎用ビューを使用
"""
model = Greeting
form_class = GreetingForm
def get_success_url(self):
return reverse('guestbook:index')
def get_context_data(self, **kwargs):
context = kwargs
context['greeting_list'] = Greeting.objects.all()
return context
| [
"tokibito@gmail.com"
] | tokibito@gmail.com |
e795fe5a43ef8191430bc02b8bfe80d6b2af1e33 | d60ee49abaee6c74c5b777f8f112a7f75f71f029 | /transcriptome/variants/combine_indels/merge_callers.py | 4ad773516e591cc4ea2af75aac5c24231071fc5b | [] | no_license | ak352/melanomics | 41530f623b4bfdbd5c7b952debcb47622d1a8e88 | fc5e6fdb1499616fb25a8dc05259add8a65aeca0 | refs/heads/master | 2020-12-24T16:14:42.271416 | 2015-08-06T12:48:52 | 2015-08-06T12:48:52 | 18,439,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,585 | py | import sys
for line in open(sys.argv[1]):
if line.startswith("##"):
print line.rstrip("\n")
elif line.startswith("#CHROM"):
line = line.rstrip("\n").split("\t")
line = line[0:9]
line.append(sys.argv[2])
print "\t".join(line)
else:
line = line.rstrip("\n").split("\t")
attribs = line[8].split(":")
num_callers_has_indel = 0
max_alternate_alleles = 0
for x in range(len(attribs)):
if attribs[x]=="GT":
#Since all positions are covered by some reads, all ./. are homozygous reference (0/0)
for sample in line[9:]:
sample = sample.split(":")
if sample[x]!="./.":
num_callers_has_indel += 1
alternate_alleles = set(sample[x].split("/"))
if "0" in alternate_alleles:
alternate_alleles.remove("0")
#Choose the merged genotype to be the one with the maximum different types of alleles - a sensitive strategy
if len(alternate_alleles) > max_alternate_alleles:
max_alternate_alleles = len(alternate_alleles)
consensus = ":".join(sample)
break
newline = line[0:9]
if num_callers_has_indel >= 2:
newline[6]="PASS"
else:
newline[6]="FAIL"
newline.append(consensus)
if num_callers_has_indel >= 2:
print "\t".join(newline)
| [
"ak@uni.fake"
] | ak@uni.fake |
8bc514d81e1bc5d9600495d88105cfc2b69996ab | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/scatter3d/error_y/_arrayminussrc.py | 3ac5aee82f37d414b78135b25b6c1c4ccf604ac0 | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 435 | py | import _plotly_utils.basevalidators
class ArrayminussrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="arrayminussrc", parent_name="scatter3d.error_y", **kwargs
):
super(ArrayminussrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
| [
"noreply@github.com"
] | hugovk.noreply@github.com |
40439daa08c438b8507f12a556ecfd72a95a7444 | 284f2bfaabf91899211e56063026857c496965cf | /tuites/migrations/0001_initial.py | 8de3c24cc1b399b272364d1abd936290637e1a55 | [] | no_license | vanessa/building-tuirer | 7b56bb9791659fcd04942d2c84a393c3c226f8c4 | 61d85df7d120387700b2e449a6fde5fb9ca7cfaa | refs/heads/master | 2022-12-11T07:25:14.174448 | 2018-08-07T05:18:29 | 2018-08-07T05:18:29 | 142,210,249 | 18 | 0 | null | 2022-12-08T02:19:48 | 2018-07-24T20:35:34 | Python | UTF-8 | Python | false | false | 840 | py | # Generated by Django 2.0.7 on 2018-07-26 19:50
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Tuite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(max_length=280)),
('date_created', models.DateTimeField(auto_now_add=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tuites', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"vanessa@vinta.com.br"
] | vanessa@vinta.com.br |
66f994c8d72a1dd8f06737363ada41762e6d30ec | 7ae0efc9798b7c9fa720022ed5d763d6ab27cd13 | /python/paddle/fluid/tests/unittests/mlu/test_batch_norm_op_mlu_v2.py | f608344f6e0363864a76a23f8d8c10dace130149 | [
"Apache-2.0"
] | permissive | ceci3/Paddle | e1d0b56a1bb1de9a0d26977868795f86e2c0580b | e4d475eabd83e7a6fa1e88c64c28747450f87d66 | refs/heads/develop | 2023-08-03T03:43:35.139011 | 2022-02-08T11:36:07 | 2022-02-08T11:36:07 | 171,274,803 | 0 | 3 | Apache-2.0 | 2021-08-24T07:14:24 | 2019-02-18T11:49:16 | C++ | UTF-8 | Python | false | false | 11,789 | py | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import numpy as np
import paddle.fluid.core as core
from paddle.fluid.op import Operator
import paddle.fluid as fluid
import sys
sys.path.append("..")
from op_test import OpTest, _set_use_system_allocator
from paddle.fluid.framework import grad_var_name
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
import paddle
class TestBatchNorm(unittest.TestCase):
def test_name(self):
places = [fluid.CPUPlace()]
if core.is_compiled_with_mlu():
places.append(fluid.MLUPlace(0))
for p in places:
with fluid.dygraph.guard(p):
batch_norm1d = paddle.nn.BatchNorm1D(1, name="test")
def test_error(self):
places = [fluid.CPUPlace()]
if core.is_compiled_with_mlu():
places.append(fluid.MLUPlace(0))
for p in places:
#paddle.disable_static()
x_data_4 = np.random.random(size=(2, 1, 3, 3)).astype('float32')
x_data_3 = np.random.random(size=(2, 1, 3)).astype('float32')
def error1d_dataformat():
x_data_4 = np.random.random(size=(2, 1, 3, 3)).astype('float32')
batch_norm1d = paddle.nn.BatchNorm1D(1, data_format='NCDHW')
batch_norm1d(fluid.dygraph.to_variable(x_data_4))
def error2d_dataformat():
x_data_3 = np.random.random(size=(2, 1, 3)).astype('float32')
batch_norm2d = paddle.nn.BatchNorm2D(1, data_format='NCDHW')
batch_norm2d(fluid.dygraph.to_variable(x_data_3))
def error3d_dataformat():
x_data_4 = np.random.random(size=(2, 1, 3, 3)).astype('float32')
batch_norm3d = paddle.nn.BatchNorm3D(1, data_format='NCL')
batch_norm3d(fluid.dygraph.to_variable(x_data_4))
def error1d():
x_data_4 = np.random.random(size=(2, 1, 3, 3)).astype('float32')
batch_norm1d = paddle.nn.BatchNorm1D(1)
batch_norm1d(fluid.dygraph.to_variable(x_data_4))
def error2d():
x_data_3 = np.random.random(size=(2, 1, 3)).astype('float32')
batch_norm2d = paddle.nn.BatchNorm2D(1)
batch_norm2d(fluid.dygraph.to_variable(x_data_3))
def error3d():
x_data_4 = np.random.random(size=(2, 1, 3, 3)).astype('float32')
batch_norm3d = paddle.nn.BatchNorm3D(1)
batch_norm3d(fluid.dygraph.to_variable(x_data_4))
with fluid.dygraph.guard(p):
self.assertRaises(ValueError, error1d)
self.assertRaises(ValueError, error2d)
self.assertRaises(ValueError, error3d)
self.assertRaises(ValueError, error1d_dataformat)
self.assertRaises(ValueError, error2d_dataformat)
self.assertRaises(ValueError, error3d_dataformat)
def test_dygraph(self):
places = [fluid.CPUPlace()]
if core.is_compiled_with_mlu():
places.append(fluid.MLUPlace(0))
for p in places:
shape = [4, 10, 4, 4]
def compute_v1(x, is_test, trainable_statistics):
with fluid.dygraph.guard(p):
bn = fluid.dygraph.BatchNorm(
shape[1],
is_test=is_test,
trainable_statistics=trainable_statistics)
y = bn(fluid.dygraph.to_variable(x))
return y.numpy()
def compute_v2(x):
with fluid.dygraph.guard(p):
bn = paddle.nn.BatchNorm2D(shape[1])
y = bn(fluid.dygraph.to_variable(x))
return y.numpy()
def compute_v3(x, is_test, trainable_statistics):
with fluid.dygraph.guard(p):
bn = fluid.dygraph.BatchNorm(
shape[1],
is_test=is_test,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(1.0),
trainable=False),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(0.0),
trainable=False),
trainable_statistics=trainable_statistics)
y = bn(fluid.dygraph.to_variable(x))
return y.numpy()
def compute_v4(x):
with fluid.dygraph.guard(p):
bn = paddle.nn.BatchNorm2D(
shape[1], weight_attr=False, bias_attr=False)
y = bn(fluid.dygraph.to_variable(x))
return y.numpy()
x = np.random.randn(*shape).astype("float32")
y1 = compute_v1(x, False, False)
y2 = compute_v2(x)
y3 = compute_v3(x, False, False)
y4 = compute_v4(x)
self.assertTrue(np.allclose(y1, y2))
self.assertTrue(np.allclose(y3, y4))
def test_static(self):
places = [fluid.CPUPlace()]
if core.is_compiled_with_mlu():
places.append(fluid.MLUPlace(0))
for p in places:
exe = fluid.Executor(p)
shape = [4, 10, 16, 16]
def compute_v1(x_np, is_test, trainable_statistics):
with program_guard(Program(), Program()):
bn = fluid.dygraph.BatchNorm(
shape[1],
is_test=is_test,
trainable_statistics=trainable_statistics)
x = fluid.data(name='x', shape=x_np.shape, dtype=x_np.dtype)
y = bn(x)
exe.run(fluid.default_startup_program())
r = exe.run(feed={'x': x_np}, fetch_list=[y])[0]
return r
def compute_v2(x_np):
with program_guard(Program(), Program()):
bn = paddle.nn.BatchNorm2D(shape[1])
x = fluid.data(name='x', shape=x_np.shape, dtype=x_np.dtype)
y = bn(x)
exe.run(fluid.default_startup_program())
r = exe.run(feed={'x': x_np}, fetch_list=[y])[0]
return r
x = np.random.randn(*shape).astype("float32")
y1 = compute_v1(x, False, False)
y2 = compute_v2(x)
self.assertTrue(np.allclose(y1, y2))
class TestBatchNormChannelLast(unittest.TestCase):
def setUp(self):
self.original_dtyep = paddle.get_default_dtype()
paddle.set_default_dtype("float32")
self.places = [fluid.CPUPlace()]
if core.is_compiled_with_mlu():
self.places.append(fluid.MLUPlace(0))
def tearDown(self):
paddle.set_default_dtype(self.original_dtyep)
def test_1d(self):
for p in self.places:
with fluid.dygraph.guard(p):
x = paddle.randn([2, 6, 4])
net1 = paddle.nn.BatchNorm1D(4, data_format="NLC")
net2 = paddle.nn.BatchNorm1D(4)
net2.weight = net1.weight
net2.bias = net1.bias
y1 = net1(x)
channel_first_x = paddle.transpose(x, [0, 2, 1])
y2 = net2(channel_first_x)
y2 = paddle.transpose(y2, [0, 2, 1])
self.assertEqual(
np.allclose(
y1.numpy(), y2.numpy(), atol=1e-07), True)
def test_2d(self):
for p in self.places:
with fluid.dygraph.guard(p):
x = paddle.randn([2, 6, 6, 4])
net1 = paddle.nn.BatchNorm2D(4, data_format="NHWC")
net2 = paddle.nn.BatchNorm2D(4)
net2.weight = net1.weight
net2.bias = net1.bias
y1 = net1(x)
channel_first_x = paddle.transpose(x, [0, 3, 1, 2])
y2 = net2(channel_first_x)
y2 = paddle.transpose(y2, [0, 2, 3, 1])
self.assertEqual(
np.allclose(
y1.numpy(), y2.numpy(), atol=1e-07), True)
def test_3d(self):
for p in self.places:
with fluid.dygraph.guard(p):
x = paddle.randn([2, 6, 6, 6, 4])
net1 = paddle.nn.BatchNorm3D(4, data_format="NDHWC")
net2 = paddle.nn.BatchNorm3D(4)
net2.weight = net1.weight
net2.bias = net1.bias
y1 = net1(x)
channel_first_x = paddle.transpose(x, [0, 4, 1, 2, 3])
y2 = net2(channel_first_x)
y2 = paddle.transpose(y2, [0, 2, 3, 4, 1])
self.assertEqual(
np.allclose(
y1.numpy(), y2.numpy(), atol=1e-07), True)
# res = np.allclose(y1.numpy(), y2.numpy())
# if res == False:
# np.savetxt("./y1.txt", y1.numpy().flatten(), fmt='%.10f', delimiter='\n')
# np.savetxt("./y2.txt", y2.numpy().flatten(), fmt='%.10f', delimiter='\n')
# self.assertEqual(res, True)
class TestBatchNormUseGlobalStats(unittest.TestCase):
def setUp(self):
self.places = [fluid.CPUPlace()]
if core.is_compiled_with_mlu():
self.places.append(fluid.MLUPlace(0))
self.init_test()
### train mode
def init_test(self):
self.use_global_stats = True
self.trainable_statistics = False
def test_global_stats(self):
for p in self.places:
with fluid.dygraph.guard(p):
x = paddle.randn([2, 6, 6, 4])
net1 = paddle.fluid.dygraph.BatchNorm(
6,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(1.0)),
use_global_stats=self.use_global_stats,
trainable_statistics=self.trainable_statistics)
net2 = paddle.nn.BatchNorm2D(
6, use_global_stats=self.use_global_stats)
net2.weight = net1.weight
net2.bias = net1.bias
if self.trainable_statistics == True:
net1.training = False
net2.training = False
y1 = net1(x)
y2 = net2(x)
self.assertEqual(np.allclose(y1.numpy(), y2.numpy()), True)
class TestBatchNormUseGlobalStatsCase1(TestBatchNormUseGlobalStats):
### test mode
def init_test(self):
self.use_global_stats = False
self.trainable_statistics = True
class TestBatchNormUseGlobalStatsCase2(TestBatchNormUseGlobalStats):
### train mode
def init_test(self):
self.use_global_stats = False
self.trainable_statistics = False
class TestBatchNormUseGlobalStatsCase3(TestBatchNormUseGlobalStats):
### test mode
def init_test(self):
self.use_global_stats = True
self.trainable_statistics = True
if __name__ == '__main__':
paddle.enable_static()
unittest.main()
| [
"noreply@github.com"
] | ceci3.noreply@github.com |
9f8f7a635445ea0cbc31c0a85db8d79724111967 | d1b44d58d4eaa845e1b460f338e61857ac00cd6f | /ch08/misung/ch8_2_misung.py | 656f723a7c00f993ec717b0705ca0a4f183727ba | [] | no_license | hyo-eun-kim/algorithm-study | 549ffe1d453ceede9075c1a8df55a67cf76bde00 | 1ca8298361b6a030d2569c06a34d955cc5e4b1bb | refs/heads/main | 2023-03-26T14:32:49.597667 | 2021-03-25T23:29:00 | 2021-03-25T23:29:00 | 301,707,534 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | # 두 정렬 리스트의 병합
# 정렬되어있는 두 리스트를 연결하라!
class Node:
def __init__(self, val, next=None):
self.val = val
self.next = next
node1 = Node(1)
node2 = Node(2)
node3 = Node(4)
node1.next = node2
node2.next = node3
node4 = Node(1)
node5 = Node(3)
node6 = Node(4)
node4.next = node5
node5.next = node6
def mergeTwoLists(l1, l2):
if l1 is None :
return l2
if l2 is None :
return l1
if l1.val <= l2.val :
head = l1
head.next = mergeTwoLists(l1.next, l2)
else :
head =l2
head.next = mergeTwoLists(l1,l2.next)
return head
mergeTwoLists(node1, node4) | [
"noreply@github.com"
] | hyo-eun-kim.noreply@github.com |
c743c8ff68fe3bd53074f445c816621b0e9b75b1 | 700f9f9e319ebd26d2557d64ea3827808dfad2f5 | /tests/fixtures/test_contributors/content_04_expected.py | 0572936d0420acaa17e064c4a1f555556f4a9eb7 | [
"MIT"
] | permissive | elifesciences/elife-tools | 1b44e660e916a82ef8ff64dd5a6ee5506e517359 | bc16e7dd5d6245077e39f8561b99c9acd510ddf7 | refs/heads/develop | 2023-03-06T08:37:47.424282 | 2023-02-20T20:40:49 | 2023-02-20T20:40:49 | 30,274,058 | 13 | 11 | MIT | 2023-02-20T20:40:50 | 2015-02-04T01:14:41 | Python | UTF-8 | Python | false | false | 1,436 | py | # based on elife article 75374 which has a collab inside a collab
expected = [
{
"type": "author",
"group-author-key": "group-author-id1",
"collab": "the PRACTICAL consortium",
},
{
"type": "author non-byline",
"group-author-key": "group-author-id1",
"surname": "Eeles",
"given-names": "Rosalind A",
"affiliations": [
{
"institution": "The Institute of Cancer Research",
"country": "United Kingdom",
"city": "London",
"ror": "https://ror.org/043jzw605",
}
],
},
{
"type": "author non-byline",
"group-author-key": "group-author-id1",
"collab": "APCB BioResource (Australian Prostate Cancer BioResource)",
"affiliations": [
{
"institution": "Translational Research Institute",
"country": "Australia",
"city": "Brisbane",
}
],
},
{
"type": "author non-byline",
"group-author-key": "group-author-id1",
"surname": "Grönberg",
"given-names": "Henrik",
"affiliations": [
{
"institution": "Department of Medical Epidemiology and Biostatistics, Karolinska Institute",
"country": "Sweden",
"city": "Stockholm",
}
],
},
]
| [
"gnott@starglobal.ca"
] | gnott@starglobal.ca |
1d9bf8c0601b435f06ae15415a41d8a5a8c7e83d | 5e255ad1360c90478393744586663741a9569c21 | /linebot/v3/insight/api/insight.py | 7cb1f70fee34cb4a8a7f6d0d6c1891e351a40fa2 | [
"Apache-2.0"
] | permissive | line/line-bot-sdk-python | d76268e8b542060d6eccbacc5dbfab16960ecc35 | cffd35948238ae24982173e30b1ea1e595bbefd9 | refs/heads/master | 2023-08-31T22:12:31.698183 | 2023-08-28T01:10:09 | 2023-08-28T01:10:09 | 70,553,423 | 1,898 | 1,181 | Apache-2.0 | 2023-09-11T05:14:07 | 2016-10-11T03:42:26 | Python | UTF-8 | Python | false | false | 37,184 | py | # coding: utf-8
"""
LINE Messaging API(Insight)
This document describes LINE Messaging API(Insight). # noqa: E501
The version of the OpenAPI document: 0.0.1
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import re # noqa: F401
import io
from pydantic.v1 import validate_arguments, ValidationError
from typing_extensions import Annotated
from pydantic.v1 import Field, constr, validator
from typing import Optional
from linebot.v3.insight.models.get_friends_demographics_response import GetFriendsDemographicsResponse
from linebot.v3.insight.models.get_message_event_response import GetMessageEventResponse
from linebot.v3.insight.models.get_number_of_followers_response import GetNumberOfFollowersResponse
from linebot.v3.insight.models.get_number_of_message_deliveries_response import GetNumberOfMessageDeliveriesResponse
from linebot.v3.insight.models.get_statistics_per_unit_response import GetStatisticsPerUnitResponse
from linebot.v3.insight.api_client import ApiClient
from linebot.v3.insight.api_response import ApiResponse
from linebot.v3.insight.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class Insight(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient.get_default()
self.api_client = api_client
@validate_arguments
def get_friends_demographics(self, **kwargs) -> GetFriendsDemographicsResponse: # noqa: E501
"""get_friends_demographics # noqa: E501
Retrieves the demographic attributes for a LINE Official Account's friends.You can only retrieve information about friends for LINE Official Accounts created by users in Japan (JP), Thailand (TH), Taiwan (TW) and Indonesia (ID). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_friends_demographics(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GetFriendsDemographicsResponse
"""
kwargs['_return_http_data_only'] = True
if '_preload_content' in kwargs:
raise ValueError("Error! Please call the get_friends_demographics_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data")
return self.get_friends_demographics_with_http_info(**kwargs) # noqa: E501
@validate_arguments
def get_friends_demographics_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501
"""get_friends_demographics # noqa: E501
Retrieves the demographic attributes for a LINE Official Account's friends.You can only retrieve information about friends for LINE Official Accounts created by users in Japan (JP), Thailand (TH), Taiwan (TW) and Indonesia (ID). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_friends_demographics_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the ApiResponse.data will
be set to none and raw_data will store the
HTTP response body without reading/decoding.
Default is True.
:type _preload_content: bool, optional
:param _return_http_data_only: response data instead of ApiResponse
object with status code, headers, etc
:type _return_http_data_only: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:type _content_type: string, optional: force content-type for the request
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GetFriendsDemographicsResponse, status_code(int), headers(HTTPHeaderDict))
"""
_params = locals()
_all_params = [
]
_all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'_content_type',
'_headers'
]
)
# validate the arguments
for _key, _val in _params['kwargs'].items():
if _key not in _all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_friends_demographics" % _key
)
_params[_key] = _val
del _params['kwargs']
_collection_formats = {}
# process the path parameters
_path_params = {}
# process the query parameters
_query_params = []
# process the header parameters
_header_params = dict(_params.get('_headers', {}))
# process the form parameters
_form_params = []
_files = {}
# process the body parameter
_body_params = None
# set the HTTP header `Accept`
_header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# authentication setting
_auth_settings = ['Bearer'] # noqa: E501
_response_types_map = {
'200': "GetFriendsDemographicsResponse",
}
return self.api_client.call_api(
'/v2/bot/insight/demographic', 'GET',
_path_params,
_query_params,
_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
response_types_map=_response_types_map,
auth_settings=_auth_settings,
async_req=_params.get('async_req'),
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501
_preload_content=_params.get('_preload_content', True),
_request_timeout=_params.get('_request_timeout'),
collection_formats=_collection_formats,
_request_auth=_params.get('_request_auth'))
@validate_arguments
def get_message_event(self, request_id : Annotated[constr(strict=True, min_length=1), Field(..., description="Request ID of a narrowcast message or broadcast message. Each Messaging API request has a request ID. ")], **kwargs) -> GetMessageEventResponse: # noqa: E501
"""Get user interaction statistics # noqa: E501
Returns statistics about how users interact with narrowcast messages or broadcast messages sent from your LINE Official Account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_message_event(request_id, async_req=True)
>>> result = thread.get()
:param request_id: Request ID of a narrowcast message or broadcast message. Each Messaging API request has a request ID. (required)
:type request_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GetMessageEventResponse
"""
kwargs['_return_http_data_only'] = True
if '_preload_content' in kwargs:
raise ValueError("Error! Please call the get_message_event_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data")
return self.get_message_event_with_http_info(request_id, **kwargs) # noqa: E501
@validate_arguments
def get_message_event_with_http_info(self, request_id : Annotated[constr(strict=True, min_length=1), Field(..., description="Request ID of a narrowcast message or broadcast message. Each Messaging API request has a request ID. ")], **kwargs) -> ApiResponse: # noqa: E501
"""Get user interaction statistics # noqa: E501
Returns statistics about how users interact with narrowcast messages or broadcast messages sent from your LINE Official Account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_message_event_with_http_info(request_id, async_req=True)
>>> result = thread.get()
:param request_id: Request ID of a narrowcast message or broadcast message. Each Messaging API request has a request ID. (required)
:type request_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the ApiResponse.data will
be set to none and raw_data will store the
HTTP response body without reading/decoding.
Default is True.
:type _preload_content: bool, optional
:param _return_http_data_only: response data instead of ApiResponse
object with status code, headers, etc
:type _return_http_data_only: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:type _content_type: string, optional: force content-type for the request
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GetMessageEventResponse, status_code(int), headers(HTTPHeaderDict))
"""
_params = locals()
_all_params = [
'request_id'
]
_all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'_content_type',
'_headers'
]
)
# validate the arguments
for _key, _val in _params['kwargs'].items():
if _key not in _all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_message_event" % _key
)
_params[_key] = _val
del _params['kwargs']
_collection_formats = {}
# process the path parameters
_path_params = {}
# process the query parameters
_query_params = []
if _params.get('request_id') is not None: # noqa: E501
_query_params.append(('requestId', _params['request_id']))
# process the header parameters
_header_params = dict(_params.get('_headers', {}))
# process the form parameters
_form_params = []
_files = {}
# process the body parameter
_body_params = None
# set the HTTP header `Accept`
_header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# authentication setting
_auth_settings = ['Bearer'] # noqa: E501
_response_types_map = {
'200': "GetMessageEventResponse",
}
return self.api_client.call_api(
'/v2/bot/insight/message/event', 'GET',
_path_params,
_query_params,
_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
response_types_map=_response_types_map,
auth_settings=_auth_settings,
async_req=_params.get('async_req'),
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501
_preload_content=_params.get('_preload_content', True),
_request_timeout=_params.get('_request_timeout'),
collection_formats=_collection_formats,
_request_auth=_params.get('_request_auth'))
@validate_arguments
def get_number_of_followers(self, var_date : Annotated[Optional[constr(strict=True, max_length=8, min_length=8)], Field(description="Date for which to retrieve the number of followers. Format: yyyyMMdd (e.g. 20191231) Timezone: UTC+9 ")] = None, **kwargs) -> GetNumberOfFollowersResponse: # noqa: E501
"""Get number of followers # noqa: E501
Returns the number of users who have added the LINE Official Account on or before a specified date. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_number_of_followers(var_date, async_req=True)
>>> result = thread.get()
:param var_date: Date for which to retrieve the number of followers. Format: yyyyMMdd (e.g. 20191231) Timezone: UTC+9
:type var_date: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GetNumberOfFollowersResponse
"""
kwargs['_return_http_data_only'] = True
if '_preload_content' in kwargs:
raise ValueError("Error! Please call the get_number_of_followers_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data")
return self.get_number_of_followers_with_http_info(var_date, **kwargs) # noqa: E501
@validate_arguments
def get_number_of_followers_with_http_info(self, var_date : Annotated[Optional[constr(strict=True, max_length=8, min_length=8)], Field(description="Date for which to retrieve the number of followers. Format: yyyyMMdd (e.g. 20191231) Timezone: UTC+9 ")] = None, **kwargs) -> ApiResponse: # noqa: E501
"""Get number of followers # noqa: E501
Returns the number of users who have added the LINE Official Account on or before a specified date. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_number_of_followers_with_http_info(var_date, async_req=True)
>>> result = thread.get()
:param var_date: Date for which to retrieve the number of followers. Format: yyyyMMdd (e.g. 20191231) Timezone: UTC+9
:type var_date: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the ApiResponse.data will
be set to none and raw_data will store the
HTTP response body without reading/decoding.
Default is True.
:type _preload_content: bool, optional
:param _return_http_data_only: response data instead of ApiResponse
object with status code, headers, etc
:type _return_http_data_only: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:type _content_type: string, optional: force content-type for the request
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GetNumberOfFollowersResponse, status_code(int), headers(HTTPHeaderDict))
"""
_params = locals()
_all_params = [
'var_date'
]
_all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'_content_type',
'_headers'
]
)
# validate the arguments
for _key, _val in _params['kwargs'].items():
if _key not in _all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_number_of_followers" % _key
)
_params[_key] = _val
del _params['kwargs']
_collection_formats = {}
# process the path parameters
_path_params = {}
# process the query parameters
_query_params = []
if _params.get('var_date') is not None: # noqa: E501
_query_params.append(('date', _params['var_date']))
# process the header parameters
_header_params = dict(_params.get('_headers', {}))
# process the form parameters
_form_params = []
_files = {}
# process the body parameter
_body_params = None
# set the HTTP header `Accept`
_header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# authentication setting
_auth_settings = ['Bearer'] # noqa: E501
_response_types_map = {
'200': "GetNumberOfFollowersResponse",
}
return self.api_client.call_api(
'/v2/bot/insight/followers', 'GET',
_path_params,
_query_params,
_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
response_types_map=_response_types_map,
auth_settings=_auth_settings,
async_req=_params.get('async_req'),
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501
_preload_content=_params.get('_preload_content', True),
_request_timeout=_params.get('_request_timeout'),
collection_formats=_collection_formats,
_request_auth=_params.get('_request_auth'))
@validate_arguments
def get_number_of_message_deliveries(self, var_date : Annotated[constr(strict=True, max_length=8, min_length=8), Field(..., description="Date for which to retrieve number of sent messages. - Format: yyyyMMdd (e.g. 20191231) - Timezone: UTC+9 ")], **kwargs) -> GetNumberOfMessageDeliveriesResponse: # noqa: E501
"""Get number of message deliveries # noqa: E501
Returns the number of messages sent from LINE Official Account on a specified day. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_number_of_message_deliveries(var_date, async_req=True)
>>> result = thread.get()
:param var_date: Date for which to retrieve number of sent messages. - Format: yyyyMMdd (e.g. 20191231) - Timezone: UTC+9 (required)
:type var_date: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GetNumberOfMessageDeliveriesResponse
"""
kwargs['_return_http_data_only'] = True
if '_preload_content' in kwargs:
raise ValueError("Error! Please call the get_number_of_message_deliveries_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data")
return self.get_number_of_message_deliveries_with_http_info(var_date, **kwargs) # noqa: E501
@validate_arguments
def get_number_of_message_deliveries_with_http_info(self, var_date : Annotated[constr(strict=True, max_length=8, min_length=8), Field(..., description="Date for which to retrieve number of sent messages. - Format: yyyyMMdd (e.g. 20191231) - Timezone: UTC+9 ")], **kwargs) -> ApiResponse: # noqa: E501
"""Get number of message deliveries # noqa: E501
Returns the number of messages sent from LINE Official Account on a specified day. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_number_of_message_deliveries_with_http_info(var_date, async_req=True)
>>> result = thread.get()
:param var_date: Date for which to retrieve number of sent messages. - Format: yyyyMMdd (e.g. 20191231) - Timezone: UTC+9 (required)
:type var_date: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the ApiResponse.data will
be set to none and raw_data will store the
HTTP response body without reading/decoding.
Default is True.
:type _preload_content: bool, optional
:param _return_http_data_only: response data instead of ApiResponse
object with status code, headers, etc
:type _return_http_data_only: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:type _content_type: string, optional: force content-type for the request
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GetNumberOfMessageDeliveriesResponse, status_code(int), headers(HTTPHeaderDict))
"""
_params = locals()
_all_params = [
'var_date'
]
_all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'_content_type',
'_headers'
]
)
# validate the arguments
for _key, _val in _params['kwargs'].items():
if _key not in _all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_number_of_message_deliveries" % _key
)
_params[_key] = _val
del _params['kwargs']
_collection_formats = {}
# process the path parameters
_path_params = {}
# process the query parameters
_query_params = []
if _params.get('var_date') is not None: # noqa: E501
_query_params.append(('date', _params['var_date']))
# process the header parameters
_header_params = dict(_params.get('_headers', {}))
# process the form parameters
_form_params = []
_files = {}
# process the body parameter
_body_params = None
# set the HTTP header `Accept`
_header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# authentication setting
_auth_settings = ['Bearer'] # noqa: E501
_response_types_map = {
'200': "GetNumberOfMessageDeliveriesResponse",
}
return self.api_client.call_api(
'/v2/bot/insight/message/delivery', 'GET',
_path_params,
_query_params,
_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
response_types_map=_response_types_map,
auth_settings=_auth_settings,
async_req=_params.get('async_req'),
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501
_preload_content=_params.get('_preload_content', True),
_request_timeout=_params.get('_request_timeout'),
collection_formats=_collection_formats,
_request_auth=_params.get('_request_auth'))
@validate_arguments
def get_statistics_per_unit(self, custom_aggregation_unit : Annotated[constr(strict=True, max_length=30, min_length=1), Field(..., description="Name of aggregation unit specified when sending the message. Case-sensitive. For example, `Promotion_a` and `Promotion_A` are regarded as different unit names. ")], var_from : Annotated[constr(strict=True, max_length=8, min_length=8), Field(..., description="Start date of aggregation period. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 ")], to : Annotated[constr(strict=True, max_length=8, min_length=8), Field(..., description="End date of aggregation period. The end date can be specified for up to 30 days later. For example, if the start date is 20210301, the latest end date is 20210331. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 ")], **kwargs) -> GetStatisticsPerUnitResponse: # noqa: E501
"""get_statistics_per_unit # noqa: E501
You can check the per-unit statistics of how users interact with push messages and multicast messages sent from your LINE Official Account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_statistics_per_unit(custom_aggregation_unit, var_from, to, async_req=True)
>>> result = thread.get()
:param custom_aggregation_unit: Name of aggregation unit specified when sending the message. Case-sensitive. For example, `Promotion_a` and `Promotion_A` are regarded as different unit names. (required)
:type custom_aggregation_unit: str
:param var_from: Start date of aggregation period. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 (required)
:type var_from: str
:param to: End date of aggregation period. The end date can be specified for up to 30 days later. For example, if the start date is 20210301, the latest end date is 20210331. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 (required)
:type to: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GetStatisticsPerUnitResponse
"""
kwargs['_return_http_data_only'] = True
if '_preload_content' in kwargs:
raise ValueError("Error! Please call the get_statistics_per_unit_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data")
return self.get_statistics_per_unit_with_http_info(custom_aggregation_unit, var_from, to, **kwargs) # noqa: E501
@validate_arguments
def get_statistics_per_unit_with_http_info(self, custom_aggregation_unit : Annotated[constr(strict=True, max_length=30, min_length=1), Field(..., description="Name of aggregation unit specified when sending the message. Case-sensitive. For example, `Promotion_a` and `Promotion_A` are regarded as different unit names. ")], var_from : Annotated[constr(strict=True, max_length=8, min_length=8), Field(..., description="Start date of aggregation period. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 ")], to : Annotated[constr(strict=True, max_length=8, min_length=8), Field(..., description="End date of aggregation period. The end date can be specified for up to 30 days later. For example, if the start date is 20210301, the latest end date is 20210331. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 ")], **kwargs) -> ApiResponse: # noqa: E501
"""get_statistics_per_unit # noqa: E501
You can check the per-unit statistics of how users interact with push messages and multicast messages sent from your LINE Official Account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_statistics_per_unit_with_http_info(custom_aggregation_unit, var_from, to, async_req=True)
>>> result = thread.get()
:param custom_aggregation_unit: Name of aggregation unit specified when sending the message. Case-sensitive. For example, `Promotion_a` and `Promotion_A` are regarded as different unit names. (required)
:type custom_aggregation_unit: str
:param var_from: Start date of aggregation period. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 (required)
:type var_from: str
:param to: End date of aggregation period. The end date can be specified for up to 30 days later. For example, if the start date is 20210301, the latest end date is 20210331. Format: yyyyMMdd (e.g. 20210301) Time zone: UTC+9 (required)
:type to: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the ApiResponse.data will
be set to none and raw_data will store the
HTTP response body without reading/decoding.
Default is True.
:type _preload_content: bool, optional
:param _return_http_data_only: response data instead of ApiResponse
object with status code, headers, etc
:type _return_http_data_only: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:type _content_type: string, optional: force content-type for the request
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GetStatisticsPerUnitResponse, status_code(int), headers(HTTPHeaderDict))
"""
_params = locals()
_all_params = [
'custom_aggregation_unit',
'var_from',
'to'
]
_all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'_content_type',
'_headers'
]
)
# validate the arguments
for _key, _val in _params['kwargs'].items():
if _key not in _all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_statistics_per_unit" % _key
)
_params[_key] = _val
del _params['kwargs']
_collection_formats = {}
# process the path parameters
_path_params = {}
# process the query parameters
_query_params = []
if _params.get('custom_aggregation_unit') is not None: # noqa: E501
_query_params.append(('customAggregationUnit', _params['custom_aggregation_unit']))
if _params.get('var_from') is not None: # noqa: E501
_query_params.append(('from', _params['var_from']))
if _params.get('to') is not None: # noqa: E501
_query_params.append(('to', _params['to']))
# process the header parameters
_header_params = dict(_params.get('_headers', {}))
# process the form parameters
_form_params = []
_files = {}
# process the body parameter
_body_params = None
# set the HTTP header `Accept`
_header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# authentication setting
_auth_settings = ['Bearer'] # noqa: E501
_response_types_map = {
'200': "GetStatisticsPerUnitResponse",
}
return self.api_client.call_api(
'/v2/bot/insight/message/event/aggregation', 'GET',
_path_params,
_query_params,
_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
response_types_map=_response_types_map,
auth_settings=_auth_settings,
async_req=_params.get('async_req'),
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501
_preload_content=_params.get('_preload_content', True),
_request_timeout=_params.get('_request_timeout'),
collection_formats=_collection_formats,
_request_auth=_params.get('_request_auth'))
| [
"noreply@github.com"
] | line.noreply@github.com |
64cf23257d7a451e2b536d2a5acb97fafdb2eed9 | b2f256c584aa071a58d8905c3628d4a4df25a506 | /utils/python/python/oj/lonlat.py | c267b99d4fcd491d6c2252c584e21e195d26b8f2 | [
"MIT"
] | permissive | maximebenoitgagne/wintertime | 6865fa4aff5fb51d50ea883b2e4aa883366227d8 | 129758e29cb3b85635c878dadf95e8d0b55ffce7 | refs/heads/main | 2023-04-16T22:08:24.256745 | 2023-01-10T20:15:33 | 2023-01-10T20:15:33 | 585,290,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py | from pylab import *
from scipy.special import ellipeinc
def lonlatdist(lonv, latv):
""" (spherical) distance along piece-wise linear paths in lat-lon space """
d = 0.
res = [d]
for i in range(len(lonv)-1):
lon0 = lonv[i]
lon1 = lonv[i+1]
lat0 = latv[i]
lat1 = latv[i+1]
if lat1-lat0 == 0.:
dd = cos(lat0*pi/180.)*(lon1-lon0)
else:
slope = (lon1-lon0)/(lat1-lat0)
slope2 = slope*slope
k = slope2/(1.+slope2)
dd = 180./pi * sqrt(1+slope2) * abs(ellipeinc(lat1*pi/180., k) - ellipeinc(lat0*pi/180., k))
d += dd
res.append(d)
return res
| [
"maxime.benoit-gagne@takuvik.ulaval.ca"
] | maxime.benoit-gagne@takuvik.ulaval.ca |
643dcab14fae0011f2251e73bc8a67b69fe53737 | 1b2ff7633c5c412afcd830a1ad47ed91e41dc603 | /backend/settings/asgi.py | 258cd8bdcc592f753899d56ebc8618e3d7bced9e | [] | no_license | taivy/objects_test_assignment | 931a1ed42c43eaae09ce6b34dc9ad26ca3590e6e | 7cd9d040d54f9643447eb3a0c9b1d41315643c52 | refs/heads/main | 2023-01-07T19:28:15.556318 | 2020-10-23T14:21:31 | 2020-10-23T14:21:31 | 305,916,842 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397 | py | """
ASGI config for objects_test project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings.settings')
application = get_asgi_application()
| [
"aie803ef4g@gmail.com"
] | aie803ef4g@gmail.com |
cead2db18a4505b8b8747c0f7dab990b7e7895db | af4ad182e46d032ddff504196be7d529b7c82078 | /overlap/vcfFindDifferentButSame.py | 34e3bb052a6ddc355640b30a0b942a42e92271c8 | [] | no_license | BD2KGenomics/brca-pipeline | a8423bf3d651ed395c16aa6b45add78436e870bb | 3df911a6a922338422ce17e8cedba9480d6977f2 | refs/heads/master | 2021-01-18T12:50:14.194467 | 2016-08-10T00:11:36 | 2016-08-10T00:11:36 | 34,352,507 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,474 | py | import pysam
import glob, gzip
from itertools import combinations
from os.path import basename
import logging, sys, optparse
from collections import defaultdict
from os.path import join, basename, dirname, isfile
# maximum distance between two variants to get compared
MAXDIST=50
# do we check if the refAllele sequences are really correct?
CHECKREF=False
#CHECKREF=True
# === COMMAND LINE INTERFACE, OPTIONS AND HELP ===
parser = optparse.OptionParser("usage: %prog [options] filenames - find variants in VCF that have a different position but lead to the same sequence. Can process many files at a time.")
parser.add_option("-d", "--debug", dest="debug", action="store_true", help="show debug messages")
#parser.add_option("-f", "--file", dest="file", action="store", help="run on file")
#parser.add_option("", "--test", dest="test", action="store_true", help="do something")
(options, args) = parser.parse_args()
if options.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
PATH = "/hive/groups/cgl/brca/phase1/data/cutoff_vcf/"
chr13 = open("brca2.txt", "r")
BRCA2 = chr13.read()
chr17 = open("brca1.txt", "r")
BRCA1 = chr17.read()
BRCA2_START = 32800000
BRCA1_START = 41100000
class FastaReader:
""" a class to parse a fasta file
Example:
fr = maxbio.FastaReader(filename)
for (id, seq) in fr.parse():
print id,seq """
def __init__(self, fname):
if hasattr(fname, 'read'):
self.f = fname
elif fname=="stdin":
self.f=sys.stdin
elif fname.endswith(".gz"):
self.f=gzip.open(fname)
else:
self.f=open(fname)
self.lastId=None
def parse(self):
""" Generator: returns sequences as tuple (id, sequence) """
lines = []
for line in self.f:
if line.startswith("\n") or line.startswith("#"):
continue
elif not line.startswith(">"):
lines.append(line.replace(" ","").strip())
continue
else:
if len(lines)!=0: # on first >, seq is empty
faseq = (self.lastId, "".join(lines))
self.lastId=line.strip(">").strip()
lines = []
yield faseq
else:
if self.lastId!=None:
sys.stderr.write("warning: when reading fasta file: empty sequence, id: %s\n" % line)
self.lastId=line.strip(">").strip()
lines=[]
# if it's the last sequence in a file, loop will end on the last line
if len(lines)!=0:
faseq = (self.lastId, "".join(lines))
yield faseq
else:
yield (None, None)
def main(args, options):
fnames = args
dbs = []
for fname in fnames:
dbName, vars = readDb(fname)
dbs.append( (dbName, vars) )
print "Unique variants in %s:%d" %(dbName, len(vars))
for db1, db2 in combinations(dbs, 2):
get_overlap(db1, db2)
def readDb(fname):
" return vcf as (dbName, dict (chrom, pos, ref, alt) -> desc "
db_name = basename(fname).split(".")[0]
if fname.endswith(".gz"):
varFile = gzip.open(fname, "r")
else:
varFile = open(fname, "r")
variants = defaultdict(list)
for line in varFile:
if line.startswith("#"):
continue
chrom, pos, varId, ref, alt = line.strip().split("\t")[:5]
# skip variants that don't lead to change
if ref==alt:
continue
alts = alt.split(",")
for alt in alts:
variants[ (chrom, int(pos), ref, alt) ] = (chrom, pos, varId, ref, alt)
return db_name, variants
def get_overlap(db1, db2):
" print variants that are different but lead to same sequence "
db1Name, db1Vars = db1
db2Name, db2Vars = db2
for var1, desc1 in db1Vars.iteritems():
for var2, desc2 in db2Vars.iteritems():
# don't compare if diff chromosome or start position too far away
if var1[0]!=var2[0] or abs(var1[1]-var2[1]) > MAXDIST :
continue
if var1!=var2:
seq1, seq2, fullSeq = variant_seqs(var1, var2)
if seq1 is None:
continue
if seq1==seq2:
chr1, pos1, id1, from1, to1 = desc1
chr2, pos2, id2, from2, to2 = desc2
pretty1 = "%s:%s->%s (%s)" % (int(pos1), from1, to1, id1)
pretty2 = "%s:%s->%s (%s)" % (int(pos2), from2, to2, id2)
print "%s-%s:" % (db1Name, db2Name), pretty1, "/", pretty2, fullSeq
#print "overlap between the %s and %s: %d" %(name_db1, name_db2, num_overlap)
def variant_seqs(v1, v2):
" return (edited1, edited2) "
chr1, pos1, ref1, alt1 = v1
chr2, pos2, ref2, alt2 = v2
pos1 = int(pos1)
pos2 = int(pos2)
# make sure that v1 is upstream of v2
if pos1 > pos2:
#(chr1, pos1, ref1, alt1 ), (chr2, pos2, ref2, alt2 ) = (chr2, pos2, ref2, alt2), (chr1, pos1, ref1, alt1)
return variant_seqs(v2, v1)
# lift coordinates and make everything 0-based
if chr1 == "13":
seq = BRCA2
pos1 = pos1 -1 - BRCA2_START
pos2 = pos2 -1 - BRCA2_START
elif chr1 == "17":
seq = BRCA1
pos1 = pos1 - 1 - BRCA1_START
pos2 = pos2 - 1 - BRCA1_START
else:
assert(False)
assert(pos1>0)
assert(pos2>0)
assert(pos1 < 200000)
assert(pos2 < 200000)
assert(len(ref1)!=0)
assert(len(ref2)!=0)
if len(ref2)>100 or len(ref1)>100:
return None, None, None
# replace vcf ref string with alt string
if CHECKREF:
genomeRef1 = seq[pos1:pos1+len(ref1)].upper()
genomeRef2 = seq[pos2:pos2+len(ref2)].upper()
if (genomeRef1!=ref1):
print "ref1 is not in genome", genomeRef1, ref1
if (genomeRef2!=ref2):
print "ref2 is not in genome", genomeRef2, ref2
assert(genomeRef1==ref1)
assert(genomeRef2==ref2)
edited_v1 = seq[0:pos1]+alt1+seq[pos1+len(ref1):]
edited_v2 = seq[0:pos2]+alt2+seq[pos2+len(ref2):]
fullSeq = seq[min(pos1,pos2):max(pos1+len(ref1),pos1+len(alt1),pos2+len(alt2),pos2+len(ref2))]
return edited_v1, edited_v2, fullSeq
if __name__ == "__main__":
main(args, options)
| [
"max@soe.ucsc.edu"
] | max@soe.ucsc.edu |
b4ebc885ac2131d59ae87a4e0685e7737963e6e7 | 530b180c3aade8e67cc61ad2baddff018f7d59a8 | /robocorp-code/src/robocorp_code/_language_server_login.py | 3ff66378a390392099399481b4633b3dec39aa58 | [
"Apache-2.0"
] | permissive | robocorp/robotframework-lsp | 67a1f35b9268d349045eb8fe930ea381c2d94cae | d72e5310ed4a8165d7ee516d79e0accccaf7748c | refs/heads/master | 2023-08-17T05:12:43.598270 | 2023-08-12T12:11:22 | 2023-08-12T12:13:21 | 235,202,865 | 167 | 72 | Apache-2.0 | 2023-09-13T22:39:09 | 2020-01-20T21:31:20 | Python | UTF-8 | Python | false | false | 3,071 | py | from robocorp_ls_core.protocols import IEndPoint, ActionResultDict
from robocorp_ls_core.robotframework_log import get_logger
from robocorp_code import commands
from robocorp_ls_core.command_dispatcher import _SubCommandDispatcher
from robocorp_code.protocols import IRcc, CloudLoginParamsDict
log = get_logger(__name__)
login_command_dispatcher = _SubCommandDispatcher("_login")
class _Login(object):
def __init__(
self,
dir_cache,
endpoint: IEndPoint,
base_command_dispatcher,
rcc: IRcc,
feedback,
clear_caches_on_login_change,
):
from robocorp_ls_core.cache import DirCache
from robocorp_code._language_server_feedback import _Feedback
self._dir_cache: DirCache = dir_cache
self._endpoint = endpoint
self._rcc = rcc
self._feedback: _Feedback = feedback
self._clear_caches_on_login_change = clear_caches_on_login_change
base_command_dispatcher.register_sub_command_dispatcher(
login_command_dispatcher
)
@login_command_dispatcher(commands.ROBOCORP_IS_LOGIN_NEEDED_INTERNAL)
def _is_login_needed_internal(self) -> ActionResultDict:
from robocorp_ls_core.progress_report import progress_context
with progress_context(
self._endpoint, "Validating Control Room credentials", self._dir_cache
):
login_needed = not self._rcc.credentials_valid()
return {"success": login_needed, "message": None, "result": login_needed}
@login_command_dispatcher(commands.ROBOCORP_CLOUD_LOGIN_INTERNAL)
def _cloud_login(self, params: CloudLoginParamsDict) -> ActionResultDict:
from robocorp_ls_core.progress_report import progress_context
self._feedback.metric("vscode.cloud.login")
# When new credentials are added we need to remove existing caches.
self._clear_caches_on_login_change()
credentials = params["credentials"]
with progress_context(
self._endpoint, "Adding Control Room credentials", self._dir_cache
):
action_result = self._rcc.add_credentials(credentials)
self._endpoint.notify("$/linkedAccountChanged")
if not action_result.success:
return action_result.as_dict()
result = self._rcc.credentials_valid()
return {"success": result, "message": None, "result": result}
@login_command_dispatcher(commands.ROBOCORP_CLOUD_LOGOUT_INTERNAL)
def _cloud_logout(self) -> ActionResultDict:
from robocorp_ls_core.progress_report import progress_context
self._feedback.metric("vscode.cloud.logout")
# When credentials are removed we need to remove existing caches.
self._clear_caches_on_login_change()
with progress_context(
self._endpoint, "Removing Control Room credentials", self._dir_cache
):
ret = self._rcc.remove_current_credentials().as_dict()
self._endpoint.notify("$/linkedAccountChanged")
return ret
| [
"fabiofz@gmail.com"
] | fabiofz@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.