blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3bcd9aaedf53c7af0773cdd3d311d0726810e8d5 | 238e46a903cf7fac4f83fa8681094bf3c417d22d | /VTK/vtk_7.1.1_x64_Release/lib/python2.7/site-packages/twisted/runner/topfiles/setup.py | a518a5d15ca8bcb5df74efde6fc8793442fb2a1f | [
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"BSD-3-Clause"
] | permissive | baojunli/FastCAE | da1277f90e584084d461590a3699b941d8c4030b | a3f99f6402da564df87fcef30674ce5f44379962 | refs/heads/master | 2023-02-25T20:25:31.815729 | 2021-02-01T03:17:33 | 2021-02-01T03:17:33 | 268,390,180 | 1 | 0 | BSD-3-Clause | 2020-06-01T00:39:31 | 2020-06-01T00:39:31 | null | UTF-8 | Python | false | false | 1,327 | py | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
try:
from twisted.python.dist import setup, ConditionalExtension as Extension
except ImportError:
raise SystemExit("twisted.python.dist module not found. Make sure you "
"have installed the Twisted core package before "
"attempting to install any other Twisted projects.")
extensions = [
Extension("twisted.runner.portmap",
["twisted/runner/portmap.c"],
condition=lambda builder: builder._check_header("rpc/rpc.h")),
]
if __name__ == '__main__':
setup(
twisted_subproject="runner",
# metadata
name="Twisted Runner",
description="Twisted Runner is a process management library and inetd "
"replacement.",
author="Twisted Matrix Laboratories",
author_email="twisted-python@twistedmatrix.com",
maintainer="Andrew Bennetts",
url="http://twistedmatrix.com/trac/wiki/TwistedRunner",
license="MIT",
long_description="""\
Twisted Runner contains code useful for persistent process management
with Python and Twisted, and has an almost full replacement for inetd.
""",
# build stuff
conditionalExtensions=extensions,
)
| [
"l”ibaojunqd@foxmail.com“"
] | l”ibaojunqd@foxmail.com“ |
4cecb71e22c01f0e61c998a992a6d5fcbfbc0541 | eed7b5aa4861086d34e539e7bbfeff4286506692 | /src/Game/Results/game_results.py | 5ff645ba5e36aadc25cb7288b60a7dc942190796 | [] | no_license | dfwarden/DeckBuilding | 0be2ccb68fc9a69c8eaa1d8acedeaa7cebef1a31 | 0b5a7573a3cf33430fe61e4ff8a8a7a0ae20b258 | refs/heads/master | 2021-01-18T09:52:51.880892 | 2015-02-03T03:21:17 | 2015-02-03T03:21:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 617 | py |
class GameResults:
""" Represents the results for a game """
def __init__(self, players, game, defaultResultsClass):
""" Initialize the Game Results with the results for each player """
self.playerToResultClass = {player:defaultResultsClass for player in players}
self.game = game
self.playerResults = []
self.update = self.playerToResultClass.update
def createPlayerResults(self):
self.playerResults = [self.playerToResultClass[player](player, self.game) for player in self.playerToResultClass]
self.playerResults.sort() | [
"cloew123@gmail.com"
] | cloew123@gmail.com |
8919747792cd04daf9b547144dc5d7dc485fc54d | 44064ed79f173ddca96174913910c1610992b7cb | /Second_Processing_app/temboo/Library/Facebook/Actions/Video/WantsToWatch/ReadWantsToWatch.py | 13506f3389f7e11ebe6dd0d0b9c0afde812792c2 | [] | no_license | dattasaurabh82/Final_thesis | 440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5 | 8edaea62f5987db026adfffb6b52b59b119f6375 | refs/heads/master | 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,656 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# ReadWantsToWatch
# Retrieves one or more video wants_to_watch actions.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ReadWantsToWatch(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ReadWantsToWatch Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Facebook/Actions/Video/WantsToWatch/ReadWantsToWatch')
def new_input_set(self):
return ReadWantsToWatchInputSet()
def _make_result_set(self, result, path):
return ReadWantsToWatchResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ReadWantsToWatchChoreographyExecution(session, exec_id, path)
class ReadWantsToWatchInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ReadWantsToWatch
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The access token retrieved from the final step of the OAuth process.)
"""
InputSet._set_input(self, 'AccessToken', value)
def set_ActionID(self, value):
"""
Set the value of the ActionID input for this Choreo. ((optional, string) The id of an action to retrieve. If an id is not provided, a list of all video wants_to_watch actions will be returned.)
"""
InputSet._set_input(self, 'ActionID', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) A comma separated list of fields to return (i.e. id,name).)
"""
InputSet._set_input(self, 'Fields', value)
def set_Limit(self, value):
"""
Set the value of the Limit input for this Choreo. ((optional, integer) Used to page through results. Limits the number of records returned in the response.)
"""
InputSet._set_input(self, 'Limit', value)
def set_Offset(self, value):
"""
Set the value of the Offset input for this Choreo. ((optional, integer) Used to page through results. Returns results starting from the specified number.)
"""
InputSet._set_input(self, 'Offset', value)
def set_ProfileID(self, value):
"""
Set the value of the ProfileID input for this Choreo. ((optional, string) The id of the user's profile. Defaults to "me" indicating the authenticated user.)
"""
InputSet._set_input(self, 'ProfileID', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Can be set to xml or json. Defaults to json.)
"""
InputSet._set_input(self, 'ResponseFormat', value)
class ReadWantsToWatchResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ReadWantsToWatch Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_HasPrevious(self):
"""
Retrieve the value for the "HasPrevious" output from this Choreo execution. ((boolean) A boolean flag indicating that a previous page exists.)
"""
return self._output.get('HasPrevious', None)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Facebook. Corresponds to the ResponseFormat input. Defaults to JSON.)
"""
return self._output.get('Response', None)
def get_HasNext(self):
"""
Retrieve the value for the "HasNext" output from this Choreo execution. ((boolean) A boolean flag indicating that a next page exists.)
"""
return self._output.get('HasNext', None)
class ReadWantsToWatchChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ReadWantsToWatchResultSet(response, path)
| [
"dattasaurabh82@gmail.com"
] | dattasaurabh82@gmail.com |
8ff9c5273da808a4fbe65a14c5947b860c7a317b | 6d8a7664b25fbe9c24ca972d310c5934d0d991e8 | /FUNÇÃO Largua X Altura = Area.py | 787def50df0dea807f540501b89e09db16fbadb4 | [] | no_license | LeoGraciano/python | c6d24f438c3735fd97d1e71ea7bfd53d16fc2205 | bf447bb42a5fab399a8cae5a5d2f5785f9ed0ebf | refs/heads/master | 2022-12-22T11:48:11.757387 | 2020-09-29T03:24:11 | 2020-09-29T03:24:11 | 287,509,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | def area(A, B):
p = A*B
print(f"A área de um terreno de {A}x{B} é de {p}m².")
print(f"{'CONTROLE DE TERRENO':^5}")
print("-"*30)
a = eval(input(" Qual Altura (m): "))
l = eval(input(" Qual Largura (m): "))
area(a, l)
| [
"leonardof.graciano@gmail.com"
] | leonardof.graciano@gmail.com |
ddf498c24b175949c03e84b4d71872d9fec3d4b6 | caa72788fdae6b05c5ce4c132b45fc00d55bb607 | /47Tkinter/选择框/5-CheckButton实现选择.py | a71d9f0175c111c1a8aef35537f25a8e81fa7e71 | [] | no_license | DamonZCR/PythonStu | dcc2ba49195f5859fd63227fe0f8f78b36ed46df | 88fec97e3bccff47ba1c5f521f53a69af6ca2b2d | refs/heads/master | 2023-07-05T06:29:53.300920 | 2021-08-13T12:22:30 | 2021-08-13T12:22:30 | 302,256,563 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | from tkinter import *
root = Tk()
v = IntVar()
c = Checkbutton(root, text='测试选中', variable=v)
c.pack()
l = Label(root, textvariable=v)
l.pack(anchor='se')
mainloop() | [
"137593938@qq.com"
] | 137593938@qq.com |
2fa98fcda422f0478d9bb5fd56f27aca56ee3d68 | d8734b5bb65e43dfd5c9af80ffbed7d87803517d | /HW_1/test_main_unittest.py | 324b73eaff5bc17d302ea17823b16db0ccee910f | [] | no_license | DariaMikhailovna/technosphere | f012b59c20cd9455e874b09c0debbe4c1ccf3608 | f755edee26a3c86b5325a0f90e544a04d138c6ab | refs/heads/main | 2023-04-16T03:41:19.958214 | 2021-05-04T13:29:12 | 2021-05-04T13:29:12 | 348,362,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,274 | py | import unittest
from HW_1.main import Board
class BoardTestCase(unittest.TestCase):
def setUp(self):
board = Board()
board.grid = [['0', '0', '0'],
['0', '0', '0'],
['0', '0', '0']]
self.full_board = board
self.empty_board = Board()
self.set_for_insert = [
(1, 2, 'X'),
(2, 2, '0'),
(1, 1, 'X'),
(0, 1, '0'),
(2, 0, 'X'),
]
self.set_for_column = [
([['X', '.', '.'],
['X', '.', '.'],
['X', '.', 'X']], 'X', 0, True),
([['X', 'X', '.'],
['.', 'X', '.'],
['.', '0', '.']], 'X', 1, False),
([['0', '.', '.'],
['.', 'X', '.'],
['.', '.', '0']], '0', 2, False),
([['0', '.', '0'],
['.', '.', '0'],
['0', '.', '0']], '0', 2, True),
([['.', '0', '0'],
['.', '0', '.'],
['0', '0', '.']], 'X', 1, False),
]
self.set_for_row = [
([['X', '.', '.'],
['.', 'X', '.'],
['.', '.', 'X']], 'X', 0, False),
([['X', '.', '.'],
['0', 'X', '0'],
['.', '.', 'X']], '0', 1, False),
([['0', '.', '.'],
['.', 'X', '.'],
['0', '0', '0']], '0', 2, True),
([['0', '0', '0'],
['.', '.', '.'],
['0', '.', '.']], '0', 0, True),
([['X', 'X', 'X'],
['.', '0', '.'],
['0', '0', '0']], 'X', 2, False),
([['0', '.', 'X'],
['.', '0', '.'],
['0', '.', '.']], '0', 0, False),
]
self.set_for_diagonal = [
([['X', '.', '.'],
['.', 'X', '.'],
['.', '.', 'X']], 'X', True),
([['X', '.', '.'],
['.', 'X', '.'],
['.', '.', 'X']], '0', False),
([['0', '.', '.'],
['.', 'X', '.'],
['.', '.', '0']], '0', False),
([['.', '.', '0'],
['.', '0', '.'],
['0', '.', '.']], '0', True),
([['.', '.', '0'],
['.', '0', '.'],
['0', '.', '.']], 'X', False),
([['.', '.', 'X'],
['.', '0', '.'],
['0', '.', '.']], '0', False),
]
def test_is_empty(self):
for i in range(3):
for j in range(3):
self.assertTrue(self.empty_board.is_empty(i, j))
for i in range(3):
for j in range(3):
self.assertFalse(self.full_board.is_empty(i, j))
def test_insert(self):
for i in range(len(self.set_for_insert)):
self.assertEqual(self.empty_board.grid[self.set_for_insert[i][0]][self.set_for_insert[i][1]], '.')
self.empty_board.insert(self.set_for_insert[i][0], self.set_for_insert[i][1], self.set_for_insert[i][2])
self.assertEqual(self.empty_board.grid[self.set_for_insert[i][0]][self.set_for_insert[i][1]],
self.set_for_insert[i][2])
def test_is_full(self):
self.assertFalse(self.empty_board.is_full())
self.assertTrue(self.full_board.is_full())
self.full_board.insert(0, 0, '.')
self.assertFalse(self.full_board.is_full())
def test_check_column(self):
for i in range(len(self.set_for_column)):
self.empty_board.grid = self.set_for_column[i][0]
self.assertEqual(self.empty_board.check_column(self.set_for_column[i][2], self.set_for_column[i][1]),
self.set_for_column[i][3])
def test_check_row(self):
for i in range(len(self.set_for_row)):
self.empty_board.grid = self.set_for_row[i][0]
self.assertEqual(self.empty_board.check_row(self.set_for_row[i][2], self.set_for_row[i][1]),
self.set_for_row[i][3])
def test_check_diagonals(self):
for i in range(len(self.set_for_row)):
self.empty_board.grid = self.set_for_diagonal[i][0]
self.assertEqual(self.empty_board.check_diagonals(self.set_for_diagonal[i][1]), self.set_for_diagonal[i][2])
| [
"0610-1994@mail.ru"
] | 0610-1994@mail.ru |
ff2abfac46a5644809736691859dae9baa1bc63d | 7a97d08146dad2120f8364e392c36d20c2487853 | /python/k.py | c93422045eec60fc31fcb60d9c8e3a1c9bb69676 | [] | no_license | livelikeabel/abel-algorithm | 80adee03d45c6143b613fab0c9aa432084e07c62 | 8582e633aa316abb43fe070610f65d1a06dc07a9 | refs/heads/master | 2021-01-01T00:50:35.925100 | 2020-04-28T05:01:47 | 2020-04-28T05:01:47 | 239,104,846 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | import sys
sys.stdin = open("input.txt", "rt")
n, k = map(int, input().split())
cnt = 0
for i in range(1, n + 1):
if n % i == 0:
cnt += 1
if cnt == k:
print(i)
break
else:
print(-1)
| [
"esung1129@gmail.com"
] | esung1129@gmail.com |
f0639033a33226b06bdee86293166e7906a87b4d | 362224f8a23387e8b369b02a6ff8690c200a2bce | /django/django_extra/formFun/formApp/views.py | 8094c9700194019cb9af4780cdab17cc2734dc1b | [] | no_license | Helenyixuanwang/python_stack | ac94c7c532655bf47592a8453738daac10f220ad | 97fbc77e3971b5df1fe3e79652b294facf8d6cee | refs/heads/main | 2023-06-11T02:17:27.277551 | 2021-06-21T17:01:09 | 2021-06-21T17:01:09 | 364,336,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | from django.shortcuts import redirect, render
from .forms import *
from django.contrib.auth.forms import AuthenticationForm
# Create your views here.
def index(request):
# myForm=RegisterForm()
# context={
# "myform":myForm
# }
login_form = AuthenticationForm()
context={'myform':login_form}
return render(request, 'index.html',context)
def register(request):
print("Inside Register")
bound_form = RegisterForm(request.POST)
print(request.POST['first_name'])
return redirect("/")
| [
"wangyixuan@msn.com"
] | wangyixuan@msn.com |
13ab09f1b696555138f142ef55a217ed3ef643a3 | 3cd4e2aae2a3ee3f9002fea903a6695f9fd5d373 | /bigml/api_handlers/pcahandler.py | 2166746021326663e5ebe0e0dc57d391a17c0012 | [
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] | permissive | jaykamau7/python | 1c2daf7222f12909563005701b02308b8b80c732 | faf718173e4a108ae8d500e82a6b4197fabbecb4 | refs/heads/master | 2023-02-28T13:29:59.759663 | 2021-02-07T14:10:20 | 2021-02-07T14:10:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,702 | py | # -*- coding: utf-8 -*-
#
# Copyright 2018-2020 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for PCA' REST calls
https://bigml.com/api/pcas
"""
try:
import simplejson as json
except ImportError:
import json
from bigml.api_handlers.resourcehandler import ResourceHandlerMixin
from bigml.api_handlers.resourcehandler import check_resource_type, \
resource_is_ready
from bigml.constants import PCA_PATH
class PCAHandlerMixin(ResourceHandlerMixin):
"""This class is used by the BigML class as
a mixin that provides the REST calls models. It should not
be instantiated independently.
"""
def __init__(self):
"""Initializes the PCAHandler. This class is intended
to be used as a mixin on ResourceHandler, that inherits its
attributes and basic method from BigMLConnection, and must not be
instantiated independently.
"""
self.pca_url = self.url + PCA_PATH
def create_pca(self, datasets, args=None, wait_time=3, retries=10):
"""Creates a PCA from a `dataset`
of a list o `datasets`.
"""
create_args = self._set_create_from_datasets_args(
datasets, args=args, wait_time=wait_time, retries=retries)
body = json.dumps(create_args)
return self._create(self.pca_url, body)
def get_pca(self, pca, query_string='',
shared_username=None, shared_api_key=None):
"""Retrieves a PCA.
The model parameter should be a string containing the
PCA id or the dict returned by create_pca.
As a PCA is an evolving object that is processed
until it reaches the FINISHED or FAULTY state, the function will
return a dict that encloses the PCA
values and state info available at the time it is called.
If this is a shared PCA, the username and
sharing api key must also be provided.
"""
check_resource_type(pca, PCA_PATH,
message="A PCA id is needed.")
return self.get_resource(pca,
query_string=query_string,
shared_username=shared_username,
shared_api_key=shared_api_key)
def pca_is_ready(self, pca, **kwargs):
"""Checks whether a pca's status is FINISHED.
"""
check_resource_type(pca, PCA_PATH,
message="A PCA id is needed.")
resource = self.get_pca(pca, **kwargs)
return resource_is_ready(resource)
def list_pcas(self, query_string=''):
"""Lists all your PCAs.
"""
return self._list(self.pca_url, query_string)
def update_pca(self, pca, changes):
"""Updates a PCA.
"""
check_resource_type(pca, PCA_PATH,
message="A PCA id is needed.")
return self.update_resource(pca, changes)
def delete_pca(self, pca):
"""Deletes a PCA.
"""
check_resource_type(pca, PCA_PATH,
message="A PCA id is needed.")
return self.delete_resource(pca)
| [
"merce@bigml.com"
] | merce@bigml.com |
4522ee6c5e468713cd2a8f56df01b575f0b21404 | 277f976227c7590f6de5e7991d8fbed23b6646fe | /euler/cleaned_solutions/p56.py | a55092ed385e8799552c3231d7e3455d59a91071 | [] | no_license | domspad/euler | ca19aae72165eb4d08104ef7a2757115cfdb9a18 | a4901403e442b376c2edd987a1571ab962dadab2 | refs/heads/master | 2021-01-17T14:04:39.198658 | 2016-07-25T23:40:10 | 2016-07-25T23:40:10 | 54,561,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py |
# 2 mins
# In [7]: time %run p56.py
# 972
# CPU times: user 662 ms, sys: 12 ms, total: 674 ms
# Wall time: 674 ms
def digit_sum(n):
return sum(int(d) for d in str(n))
print max(digit_sum(a**b) for a in xrange(100) for b in xrange(100)) | [
"domspad@umich.edu"
] | domspad@umich.edu |
69919a3651703f97d5f3cf9c1b16bf6160fd8d00 | e9538b7ad6d0ce0ccfbb8e10c458f9e0b73926f6 | /tests/unit/modules/network/fortimanager/test_fmgr_device_config.py | e0be3934096ea08f7a097be0ccf7fa36a1d9310f | [] | no_license | ansible-collection-migration/misc.not_a_real_collection | b3ef8090c59de9ac30aca083c746ec3595d7f5f5 | 7ab1af924a3db4ada2f714b09bb392614344cb1e | refs/heads/master | 2020-12-18T13:48:51.849567 | 2020-01-22T17:39:18 | 2020-01-22T17:39:18 | 235,400,821 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,313 | py | # Copyright 2018 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortimanager.fortimanager import FortiManagerHandler
import pytest
try:
from ansible_collections.misc.not_a_real_collection.plugins.modules import fmgr_device_config
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
def load_fixtures():
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') + "/{filename}.json".format(
filename=os.path.splitext(os.path.basename(__file__))[0])
try:
with open(fixture_path, "r") as fixture_file:
fixture_data = json.load(fixture_file)
except IOError:
return []
return [fixture_data]
@pytest.fixture(autouse=True)
def module_mock(mocker):
connection_class_mock = mocker.patch('ansible.module_utils.basic.AnsibleModule')
return connection_class_mock
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.fmgr_device_config.Connection')
return connection_class_mock
@pytest.fixture(scope="function", params=load_fixtures())
def fixture_data(request):
func_name = request.function.__name__.replace("test_", "")
return request.param.get(func_name, None)
fmg_instance = FortiManagerHandler(connection_mock, module_mock)
def test_update_device_hostname(fixture_data, mocker):
mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortimanager.fortimanager.FortiManagerHandler.process_request',
side_effect=fixture_data)
# Fixture sets used:###########################
##################################################
# adom: ansible
# interface: None
# device_unique_name: FGT1
# install_config: disable
# device_hostname: ansible-fgt01
# interface_ip: None
# interface_allow_access: None
# mode: update
##################################################
##################################################
# adom: ansible
# interface: None
# device_unique_name: FGT2
# install_config: disable
# device_hostname: ansible-fgt02
# interface_ip: None
# interface_allow_access: None
# mode: update
##################################################
##################################################
# adom: ansible
# interface: None
# device_unique_name: FGT3
# install_config: disable
# device_hostname: ansible-fgt03
# interface_ip: None
# interface_allow_access: None
# mode: update
##################################################
# Test using fixture 1 #
output = fmgr_device_config.update_device_hostname(fmg_instance, fixture_data[0]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 2 #
output = fmgr_device_config.update_device_hostname(fmg_instance, fixture_data[1]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 3 #
output = fmgr_device_config.update_device_hostname(fmg_instance, fixture_data[2]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
def test_update_device_interface(fixture_data, mocker):
mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortimanager.fortimanager.FortiManagerHandler.process_request',
side_effect=fixture_data)
# Fixture sets used:###########################
##################################################
# adom: ansible
# install_config: disable
# device_unique_name: FGT1
# interface: port2
# device_hostname: None
# interface_ip: 10.1.1.1/24
# interface_allow_access: ping, telnet, https, http
# mode: update
##################################################
##################################################
# adom: ansible
# install_config: disable
# device_unique_name: FGT2
# interface: port2
# device_hostname: None
# interface_ip: 10.1.2.1/24
# interface_allow_access: ping, telnet, https, http
# mode: update
##################################################
##################################################
# adom: ansible
# install_config: disable
# device_unique_name: FGT3
# interface: port2
# device_hostname: None
# interface_ip: 10.1.3.1/24
# interface_allow_access: ping, telnet, https, http
# mode: update
##################################################
# Test using fixture 1 #
output = fmgr_device_config.update_device_interface(fmg_instance, fixture_data[0]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 2 #
output = fmgr_device_config.update_device_interface(fmg_instance, fixture_data[1]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 3 #
output = fmgr_device_config.update_device_interface(fmg_instance, fixture_data[2]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
def test_exec_config(fixture_data, mocker):
mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortimanager.fortimanager.FortiManagerHandler.process_request',
side_effect=fixture_data)
# Fixture sets used:###########################
##################################################
# adom: ansible
# interface: None
# device_unique_name: FGT1
# install_config: enable
# device_hostname: None
# interface_ip: None
# interface_allow_access: None
# mode: exec
##################################################
##################################################
# adom: ansible
# install_config: enable
# device_unique_name: FGT2, FGT3
# interface: None
# device_hostname: None
# interface_ip: None
# interface_allow_access: None
# mode: exec
##################################################
# Test using fixture 1 #
output = fmgr_device_config.exec_config(fmg_instance, fixture_data[0]['paramgram_used'])
assert isinstance(output['raw_response'], dict) is True
# Test using fixture 2 #
output = fmgr_device_config.exec_config(fmg_instance, fixture_data[1]['paramgram_used'])
assert isinstance(output['raw_response'], dict) is True
| [
"ansible_migration@example.com"
] | ansible_migration@example.com |
52c627ca53565f30d86878726ff73d5d52a24d28 | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/python/autograph/converters/call_trees 2.py | 0563589169aa076e608dbb30c6236cc9a05e18da | [] | no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:9da573568f59bd33ef92c84d65c3c4bddedbcee5e4f54d70a2fc920ccd9bffeb
size 7435
| [
"business030301@gmail.com"
] | business030301@gmail.com |
b85f62bda9d07d71763ee046dd7411731a9ea547 | fd86752c58059da70f9ea56233a02d9b5bdae445 | /src/tests/test_checks.py | 705469a29f80b71d647b73bdba8dbcd4be95085a | [] | no_license | SUNET/scriptherder | 4eab03f77df152148ccd565fb333145f85bebe73 | ccb788212cf3471f1d38af316a1e6af6f7c41846 | refs/heads/master | 2023-06-16T00:08:12.984584 | 2023-03-01T10:12:57 | 2023-03-01T10:12:57 | 19,273,787 | 1 | 1 | null | 2015-09-30T11:16:07 | 2014-04-29T11:16:25 | Python | UTF-8 | Python | false | false | 6,344 | py | import sys
import logging
import unittest
from scriptherder import Job, Check
logging.basicConfig(level = logging.DEBUG, stream = sys.stderr,
format = '%(asctime)s: %(threadName)s %(levelname)s %(message)s')
logger = logging.getLogger('unittest')
class TestChecks(unittest.TestCase):
def _run(self, cmd, ok='', warn='', run=True, runtime_mode = True):
check = Check(ok, warn, 'unit_testing', logger, runtime_mode=True)
self.job = Job('unittest_job', cmd)
if run:
self.job.run()
self.job.check(check, logger)
# Call status summary for all the tests to make sure it works in all
# possible states
logger.debug('Job status summary: {}'.format(self.job.status_summary()))
if not runtime_mode:
logger.info('Unit test evaluating checks again, post-execution')
check = Check(ok, warn, 'unit_testing', logger, runtime_mode=False)
self.job.check(check, logger)
logger.debug('Job status summary: {}'.format(self.job.status_summary()))
def test_exit_status_ok(self):
""" Test exit status matching OK criteria """
self._run(['/bin/echo', 'test'],
ok = 'exit_status=0')
self.assertTrue(self.job.is_ok())
def test_exit_status_warning(self):
""" Test exit status matching WARN criteria """
self._run(['/bin/echo', 'test'],
ok = 'exit_status=1', warn = 'exit_status=0')
self.assertFalse(self.job.is_ok())
self.assertTrue(self.job.is_warning())
def test_exit_status_critical(self):
""" Test exit status matching neither OK nor WARN criteria """
self._run(['/bin/true', 'test'],
ok = 'exit_status=1', warn = 'exit_status=2')
self.assertFalse(self.job.is_ok())
self.assertFalse(self.job.is_warning())
self.assertEqual(self.job.check_status, 'CRITICAL')
def test_exit_status_negated1(self):
""" Test exit status matching OK criteria (negated) """
self._run(['/bin/false'],
ok = '!exit_status=0')
self.assertTrue(self.job.is_ok())
self.assertFalse(self.job.is_warning())
def test_max_age(self):
""" Test max_age criteria """
self._run(['/bin/echo', 'test'],
ok = 'exit_status=0, max_age=10s', warn = 'exit_status=0, max_age=3h',
runtime_mode = False)
self.assertTrue(self.job.is_ok())
self.assertFalse(self.job.is_warning())
def test_max_age_negated(self):
""" Test max_age criteria (negated) """
self._run(['/bin/echo', 'test'],
ok = 'exit_status=0, !max_age=10s', warn = 'exit_status=0, max_age=3h',
runtime_mode = False)
self.assertFalse(self.job.is_ok())
self.assertTrue(self.job.is_warning())
def test_file_exists(self):
""" Test file_exists criteria """
self._run(['/bin/echo', 'test'],
ok = 'exit_status=1', warn = 'exit_status=1,OR_file_exists=/etc/services',
runtime_mode = False)
self.assertFalse(self.job.is_ok())
self.assertTrue(self.job.is_warning())
def test_file_exists_negated(self):
""" Test file_exists criteria (negated) """
self._run(['/bin/false'],
ok = 'exit_status=0,!OR_file_exists=/this_could_be_a_FAIL_file',
runtime_mode = False)
self.assertTrue(self.job.is_ok())
def test_file_exists_fail(self):
""" Test file_exists criteria failure """
self._run(['/bin/false'],
ok = 'exit_status=0,OR_file_exists=/this_file_should_not_exist',
runtime_mode = False)
self.assertFalse(self.job.is_ok())
self.assertEqual(self.job.check_status, 'CRITICAL')
self.assertEqual(self.job.check_reason,
'file_does_not_exist=/this_file_should_not_exist, stored_status=OK==False')
def test_OR_running(self):
""" Test OR_running criteria """
self._run(['/bin/echo', 'test'],
ok = 'exit_status=1,OR_running', warn = 'exit_status=0')
self.assertTrue(self.job.is_ok())
self.assertFalse(self.job.is_warning())
def test_OR_running_negated(self):
""" Test OR_running criteria """
self._run(['/bin/echo', 'test'],
ok = 'exit_status=1,OR_running', warn = '!OR_running',
run = False)
self.assertFalse(self.job.is_ok())
self.assertTrue(self.job.is_warning())
def test_output_contains(self):
""" Test output_contains criteria """
self._run(['/bin/echo', 'STATUS_TESTING_OK'],
ok = 'exit_status=0,output_contains=TESTING')
self.assertTrue(self.job.is_ok())
self.assertEqual(self.job.check_reason, 'exit=0, output_contains=TESTING==True')
def test_output_contains_negated(self):
""" Test output_contains criteria (negated) """
self._run(['/bin/echo', 'STATUS_TESTING_OK'],
ok = 'exit_status=0,!output_contains=ERROR')
self.assertTrue(self.job.is_ok())
self.assertEqual(self.job.check_reason, 'exit=0, !output_contains=ERROR==True')
def test_obsolete_output_not_contains(self):
""" Test obsolete option output_not_contains """
self._run(['/bin/echo', 'STATUS_TESTING_OK'],
ok = 'exit_status=0,output_not_contains=ERROR')
self.assertTrue(self.job.is_ok())
self.assertEqual(self.job.check_reason, 'exit=0, !output_contains=ERROR==True')
def test_output_matches(self):
""" Test output_matches criteria """
self._run(['/bin/echo', 'STATUS_TESTING_OK'],
ok = 'exit_status=0,output_matches=.*TESTING.*')
self.assertTrue(self.job.is_ok())
self.assertEqual(self.job.check_reason, 'exit=0, output_matches=.*TESTING.*==True')
def test_output_matches_negated(self):
""" Test output_matches criteria (negated) """
self._run(['/bin/echo', 'STATUS_TESTING_OK'],
ok = 'exit_status=0,!output_matches=.*ERROR.*')
self.assertTrue(self.job.is_ok())
self.assertEqual(self.job.check_reason, 'exit=0, !output_matches=.*ERROR.*==True')
| [
"fredrik@thulin.net"
] | fredrik@thulin.net |
1c402ab7373067bbc21a099324a4a52e008eaf33 | e2f68f7f2b96af92d0d56ef9aa3119e7909cd992 | /dataplicity/app.py | 4324d9a7ad59def5aae43dca0bbb127b31f85691 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | anuradhawick/dataplicity-agent | c89edd563103aa251f858d38aeba8ed6c605481c | 9d4c234f0d7b24aa144a079f54883d38eb8b9f40 | refs/heads/master | 2022-04-09T18:16:18.590182 | 2020-03-26T12:10:44 | 2020-03-26T12:10:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,933 | py | from __future__ import unicode_literals
from __future__ import print_function
import argparse
import logging
import logging.config
import sys
from . import __version__
from . import subcommand
from .client import Client
from .subcommands import run, version
log = logging.getLogger("app")
# Map log levels on to integer values
_logging_level_names = {
"NOTSET": 0,
"DEBUG": 10,
"INFO": 20,
"WARN": 30,
"WARNING": 30,
"ERROR": 40,
"CRITICAL": 50,
}
class App(object):
"""Dataplicity Agent command line interface."""
def __init__(self):
self.subcommands = {
name: cls(self) for name, cls in subcommand.registry.items()
}
def _make_arg_parser(self):
"""Make an argument parse object."""
parser = argparse.ArgumentParser("dataplicity", description=self.__doc__)
_version = "dataplicity agent v{}".format(__version__)
parser.add_argument(
"-v",
"--version",
action="version",
version=_version,
help="Display version and exit",
)
parser.add_argument(
"--log-level",
metavar="LEVEL",
default="INFO",
help="Set log level (INFO or WARNING or ERROR or DEBUG)",
)
parser.add_argument(
"--log-file", metavar="PATH", default=None, help="Set log file"
)
parser.add_argument(
"-d",
"--debug",
action="store_true",
dest="debug",
default=False,
help="Enables debug output",
)
parser.add_argument(
"-s",
"--server-url",
metavar="URL",
dest="server_url",
default=None,
help="URL of dataplicity.com api",
)
parser.add_argument(
"-m",
"--m2m-url",
metavar="WS URL",
dest="m2m_url",
default=None,
help="URL of m2m server (should start with ws:// or wss://",
)
parser.add_argument(
"-q", "--quiet", action="store_true", default=False, help="Hide output"
)
parser.add_argument(
"--serial",
dest="serial",
metavar="SERIAL",
default=None,
help="Set Dataplicity serial",
)
parser.add_argument(
"--auth",
dest="auth_token",
metavar="KEY",
default=None,
help="Set Dataplicity auth token",
)
subparsers = parser.add_subparsers(
title="available sub-commands", dest="subcommand", help="sub-command help"
)
for name, _subcommand in self.subcommands.items():
subparser = subparsers.add_parser(
name,
help=_subcommand.help,
description=getattr(_subcommand, "__doc__", None),
)
_subcommand.add_arguments(subparser)
return parser
def _init_logging(self):
"""Initialise logging."""
log_format = "%(asctime)s %(name)s\t: %(message)s"
log_level = "CRITICAL" if self.args.quiet else self.args.log_level.upper()
try:
log_level_no = _logging_level_names[log_level]
except IndexError:
self.error("invalid log level")
if self.args.log_file:
log_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"simple": {
"class": "logging.Formatter",
"format": log_format,
"datefmt": "[%d/%b/%Y %H:%M:%S]",
}
},
"handlers": {
"file": {
"level": log_level,
"class": "logging.handlers.RotatingFileHandler",
"maxBytes": 5 * 1024 * 1024,
"backupCount": 5,
"filename": self.args.log_file,
"formatter": "simple",
}
},
"loggers": {"": {"level": log_level, "handlers": ["file"]}},
}
logging.config.dictConfig(log_config)
else:
logging.basicConfig(
format=log_format, datefmt="[%d/%b/%Y %H:%M:%S]", level=log_level_no
)
def make_client(self):
"""Make the client object."""
client = Client(
rpc_url=self.args.server_url,
m2m_url=self.args.m2m_url,
serial=self.args.serial,
auth_token=self.args.auth_token,
)
return client
def error(self, msg, code=-1):
"""Display error and exit app."""
log.critical("app exit ({%s}) code={%s}", msg, code)
sys.stderr.write(msg + "\n")
sys.exit(code)
def run(self):
parser = self._make_arg_parser()
args = self.args = parser.parse_args(sys.argv[1:])
self._init_logging()
log.debug("ready")
if args.subcommand is None:
parser.print_help()
return 1
subcommand = self.subcommands[args.subcommand]
subcommand.args = args
try:
return subcommand.run() or 0
except Exception as e:
if self.args.debug:
raise
sys.stderr.write("(dataplicity {}) {}\n".format(__version__, e))
cmd = sys.argv[0].rsplit("/", 1)[-1]
debug_cmd = " ".join([cmd, "--debug"] + sys.argv[1:])
sys.stderr.write("(run '{}' for a full traceback)\n".format(debug_cmd))
return -1
def main():
"""Dataplicity Agent entry point."""
return_code = App().run() or 0
log.debug("exit with code %s", return_code)
sys.exit(return_code)
| [
"willmcgugan@gmail.com"
] | willmcgugan@gmail.com |
ceaaef67a466aa7984b5aeb9fc34214bdf9d406f | 75f6bbcdf10dec884202b3136feb0317842df55f | /apps/task/migrations/0004_taskhistory_run_time.py | d4d47070a627c7c134373d9e2215331eb3433b6a | [] | no_license | qt-pay/python-devops | bafa305fbcd7bef4498857ab75be7447bc1e0a42 | 60e9481ab84628cf817fde1c52f4a15d5085e503 | refs/heads/main | 2023-03-15T12:39:45.813287 | 2021-01-24T18:40:38 | 2021-01-24T18:40:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | py | # Generated by Django 2.2.2 on 2021-01-06 14:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('task', '0003_auto_20210105_1927'),
]
operations = [
migrations.AddField(
model_name='taskhistory',
name='run_time',
field=models.CharField(blank=True, max_length=20, null=True, verbose_name='脚本运行时长'),
),
]
| [
"yans121@sina.com"
] | yans121@sina.com |
afe99c5dbd056a118f4b8016b5e82309d514bfcb | cec68acfc0187b7d92fb7d6e5107058e3f8269ea | /Degiskenler/sozluk.py | d095580f7fa0dd6618f37d68ac9c10ee1cf7d9b9 | [] | no_license | vektorelpython/Python8 | 441575224100a687467c4934f7c741aa0c4bd087 | d135fbf1444d56a0da38c42fd2e8feda48646f49 | refs/heads/master | 2022-01-18T12:17:40.387422 | 2019-09-07T13:47:55 | 2019-09-07T13:47:55 | 205,534,765 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | sozluk = {"elma":"apple","portakal":"orange","kitap":"book"}
print(sozluk)
print(sozluk["elma"])
sozluk["elma"] = "alma"
print(sozluk)
sozluk.update({"kalem":"pencil"})
print(sozluk)
print(sozluk.values()) | [
"Kurs"
] | Kurs |
a1c0ae12360d7f426f7df523dba5d075c446021e | 11ca0c393c854fa7212e783a34269f9dae84e8c7 | /Python/381. O(1) 时间插入、删除和获取随机元素 - 允许重复.py | 9b0953c2ceda3750878644fdc4c09fbf6a49a38e | [] | no_license | VictoriqueCQ/LeetCode | dc84d81163eed26fa9dbc2114bba0b5c2ea881f4 | a77b3ead157f97f5d9599badb4d4c5da69de44ba | refs/heads/master | 2021-06-05T06:40:24.659909 | 2021-03-31T08:31:51 | 2021-03-31T08:31:51 | 97,978,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | import random
class RandomizedCollection:
def __init__(self):
"""
Initialize your data structure here.
"""
self.v = []
def insert(self, val: int) -> bool:
"""
Inserts a value to the collection. Returns true if the collection did not already contain the specified element.
"""
self.v.append(val)
return val not in self.v[: -1]
def remove(self, val: int) -> bool:
"""
Removes a value from the collection. Returns true if the collection contained the specified element.
"""
if val not in self.v:
return False
self.v.remove(val)
return True
def getRandom(self) -> int:
"""
Get a random element from the collection.
"""
return random.choice(self.v)
| [
"1997Victorique0317"
] | 1997Victorique0317 |
d1f9477dfbe06bd03429351846beab4de65d55b0 | b5cc6d7b5f7ccea36fce4eab961979404414f8b0 | /fem/fenics_solvers.py | f3d97983089a684391179e43738572f19b731b8b | [] | no_license | MiroK/cutFEM-beam | adf0c925dbe64b370dab48e82335617450675f5d | 2fb3686804e836d4031fbf231a36a0f9ac8a3012 | refs/heads/master | 2021-01-21T23:54:32.868307 | 2015-02-14T13:14:59 | 2015-02-14T13:14:59 | 25,625,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,241 | py | from dolfin import *
# Optimization options for the form compiler
parameters["form_compiler"]["cpp_optimize"] = True
parameters["form_compiler"]["optimize"] = True
# Make mesh ghosted for evaluation of DG terms
parameters["ghost_mode"] = "shared_facet"
def cg_solver(mesh, problem, p=2, verbose=False):
'''
Solve biharmonic problem:
laplace^2(u) = f in mesh
u = 0 on mesh boundary
laplace(u) = 0 on mesh boundary
We use CG elements of degree p. Interior penalty is used to force continuity
of laplace u.
'''
assert p > 1
if isinstance(mesh, str):
mesh = Mesh(mesh)
u_exact = problem['u']
f = problem['f']
V = FunctionSpace(mesh, 'CG', p)
u = TrialFunction(V)
v = TestFunction(V)
h = CellSize(mesh)
h_avg = (h('+') + h('-'))/2.0
n = FacetNormal(mesh)
# Penalty parameter
alpha = Constant(100)
# Define bilinear form
# Standard term
a = inner(div(grad(u)), div(grad(v)))*dx
# Ip stab of surface term with grad(v).n
a += - inner(avg(div(grad(u))), jump(grad(v), n))*dS \
- inner(jump(grad(u), n), avg(div(grad(v))))*dS \
+ alpha/h_avg**(p-1)*inner(jump(grad(u), n), jump(grad(v), n))*dS
# Define linear form
L = inner(f, v)*dx
# DirichletBC
bc = DirichletBC(V, Constant(0), DomainBoundary())
# Solve variational problem
A, M = PETScMatrix(), PETScMatrix()
b = PETScVector()
m = inner(u, v)*dx
assemble_system(m, L, bc, A_tensor=M, b_tensor=b)
assemble_system(a, L, bc, A_tensor=A, b_tensor=b)
# try:
# esolver = SLEPcEigenSolver(A)
# esolver.parameters['spectrum'] = 'largest magnitude'
# esolver.solve(1)
# max_r, max_c = esolver.get_eigenvalue(0)
# if mesh.num_cells() < 513:
# print '..'
# esolver.parameters['spectrum'] = 'smallest magnitude'
# esolver.solve(1)
# min_r, min_c = esolver.get_eigenvalue(0)
# print '%2E %2E %2E \t' % (max_r, min_r, max_r/min_r)
# except:
# print 'Eigensolver went wrong'
u = Function(V)
solve(A, u.vector(), b)
# Plot solution
if verbose:
plot(u, title='numeric')
plot(u_exact, mesh=mesh, title='exact')
interactive()
e_L2 = errornorm(u_exact, u, 'l2')
return {'h': mesh.hmax(), 'L2': e_L2, 'a_max': 1, 'uh': u}
def mixed_solver(mesh, problem, p, verbose=False):
'''
Solve biharmonic problem:
laplace^2(u) = f in mesh
u = 0 on mesh boundary
laplace(u) = 0 on mesh boundary
by braking it into
-laplace(u) = sigma
-laplace(sigma) = f in mesh
u = 0
sigma = 0 on mesh boundary
We use CG elements of degree p for u-space and n for s-space
'''
if isinstance(mesh, str):
mesh = Mesh(mesh)
u_exact = problem['u']
f = problem['f']
# --------
V = FunctionSpace(mesh, 'CG', p)
u = TrialFunction(V)
v = TestFunction(V)
# Stiffness matrix
a = inner(grad(u), grad(v))*dx
m = inner(u, v)*dx
# Define linear form
L = inner(f, v)*dx
# DirichletBC
bc = DirichletBC(V, Constant(0), DomainBoundary())
A = PETScMatrix()
b = PETScVector()
assemble_system(a, L, bc, A_tensor=A, b_tensor=b)
B, _ = assemble_system(m, L)
solver = LUSolver(A)
solver.parameters['reuse_factorization'] = True
# Solve first for moment
sigma = Function(V)
print '.'
solver.solve(sigma.vector(), b)
print '..'
# Make the rhs for discplacement system
B.mult(sigma.vector(), b)
# Solve for u
u = Function(V)
solver.solve(u.vector(), b)
print '...'
# Plot solution
if verbose:
plot(u, title='numeric')
plot(u_exact, mesh=mesh, title='exact')
interactive()
e_L2 = errornorm(u_exact, u, 'l2')
return {'h': mesh.hmax(), 'L2': e_L2, 'a_max': 1}
# -----------------------------------------------------------------------------
if __name__ == '__main__':
DIM = -1
if DIM == 2:
from problem import joris_problem
D, Lx, Ly, = 1, 1, 1
problem = joris_problem(D, Lx, Ly)
mesh = RectangleMesh(0, 0, Lx, Ly, 40, 40)
mixed_solver(mesh, problem, 2, True)
cg_solver(mesh, problem, 2, True)
elif DIM == 1:
from problem import manufacture_biharmonic_1d
import sympy as sp
x = sp.symbols('x')
a = -1
b = 1.5
E = 1
u = sp.sin(sp.pi*(x-a)/(b-a))
problem = manufacture_biharmonic_1d(u=u, a=a, b=b, E=E)
mesh = IntervalMesh(100, a, b)
mixed_solver(mesh, problem, 2, True)
cg_solver(mesh, problem, 2, True)
else:
from problem import manufacture_biharmonic_1d
import matplotlib.pyplot as plt
from sympy.plotting import plot_parametric
from beam_mesh import line_mesh
import numpy as np
import sympy as sp
x = sp.symbols('x')
E = 1
A = np.array([0.25, 0])
B = np.array([0.75, 1])
L = np.hypot(*(A-B))
f = 1
problem = manufacture_biharmonic_1d(f=f, a=0, b=L, E=E)
u = problem['u']
plot_parametric(A[0] + (B[0]-A[0])*x, A[1] + (B[1]-A[1])*x, (x, 0, 1),
xlim=(0, 1), ylim=(0, 1))
n_cells = 2**10
line_mesh(A, B, n_cells, 'mesh.xml')
mesh = Mesh('mesh.xml')
ans = cg_solver(mesh, problem)
uh = ans['uh']
def F(s):
return A + (B-A)*s/L
s = np.linspace(0, L, 100)
plt.figure()
plt.plot(s, [u(si) for si in s], label='exact')
plt.plot(s, [uh(*F(si)) for si in s], label='numeric')
plt.legend(loc='best')
plt.axis('tight')
plt.show()
beam_mesh = mesh
plate_mesh = UnitSquareMesh(10, 10)
# Constraint is a problem ...
V = FunctionSpace(plate_mesh, 'CG', 2)
W = FunctionSpace(beam_mesh, 'CG', 2)
P = FunctionSpace(beam_mesh, 'CG', 1)
M = MixedFunctionSpace([V, W, P])
| [
"miroslav.kuchta@gmail.com"
] | miroslav.kuchta@gmail.com |
677089b7b1256a45b93d36d61e4e372fe35bacc7 | f0fd2b4f56b1753e47139a3557a1625abcfead9e | /django/full_stack/dojo_reads/dojo_reads/settings.py | 0b4358dfd60ebfcbe0fa20911d6c4c865e8f3ac9 | [] | no_license | nlee1229/Python-Stack | 16dd6078be98392d8a21a93965beb7d39ba4157e | 1aba5cf17f1f6c50d8fd50de031fcd6ec2bdda21 | refs/heads/master | 2023-03-26T05:12:14.264780 | 2021-03-22T01:56:22 | 2021-03-22T01:56:22 | 328,876,685 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,137 | py | """
Django settings for dojo_reads project.
Generated by 'django-admin startproject' using Django 2.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h0#b4y0t5hth3zppr06er+!c76zu6!%9elk3x^7al8^2u5#0t7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'login_reg_app',
'main_app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dojo_reads.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dojo_reads.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"72540269+nlee1229@users.noreply.github.com"
] | 72540269+nlee1229@users.noreply.github.com |
e82af246a4fabbf3a2ba385c7b0c50fb773c3ad8 | 628ec414b7807fc50de67345361e41cc68ba3720 | /mayan/apps/ocr/apps.py | e1d21110b14b38c12ebf713b08bd02902cf0438c | [
"Apache-2.0"
] | permissive | TestingCodeReview/Mayan-EDMS | aafe144424ffa8128a4ff7cee24d91bf1e1f2750 | d493ec34b2f93244e32e1a2a4e6cda4501d3cf4e | refs/heads/master | 2020-05-27T23:34:44.118503 | 2019-04-05T02:04:18 | 2019-04-05T02:04:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,645 | py | from __future__ import unicode_literals
from datetime import timedelta
import logging
from kombu import Exchange, Queue
from django.apps import apps
from django.db.models.signals import post_save
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from acls import ModelPermission
from common import (
MayanAppConfig, menu_facet, menu_multi_item, menu_object, menu_secondary,
menu_tools
)
from common.classes import ModelField
from common.settings import settings_db_sync_task_delay
from documents.search import document_search, document_page_search
from documents.signals import post_version_upload
from documents.widgets import document_link
from mayan.celery import app
from navigation import SourceColumn
from .events import event_ocr_document_version_submit
from .handlers import (
handler_index_document, handler_initialize_new_ocr_settings,
handler_ocr_document_version,
)
from .links import (
link_document_page_ocr_content, link_document_ocr_content,
link_document_ocr_download, link_document_ocr_errors_list,
link_document_submit, link_document_submit_multiple,
link_document_type_ocr_settings, link_document_type_submit,
link_entry_list
)
from .permissions import (
permission_document_type_ocr_setup, permission_ocr_document,
permission_ocr_content_view
)
from .queues import * # NOQA
from .signals import post_document_version_ocr
from .utils import get_document_ocr_content
logger = logging.getLogger(__name__)
def document_ocr_submit(self):
latest_version = self.latest_version
# Don't error out if document has no version
if latest_version:
latest_version.submit_for_ocr()
def document_version_ocr_submit(self):
from .tasks import task_do_ocr
event_ocr_document_version_submit.commit(
action_object=self.document, target=self
)
task_do_ocr.apply_async(
eta=now() + timedelta(seconds=settings_db_sync_task_delay.value),
kwargs={'document_version_pk': self.pk},
)
class OCRApp(MayanAppConfig):
has_rest_api = True
has_tests = True
name = 'ocr'
verbose_name = _('OCR')
def ready(self):
super(OCRApp, self).ready()
Document = apps.get_model(
app_label='documents', model_name='Document'
)
DocumentPage = apps.get_model(
app_label='documents', model_name='DocumentPage'
)
DocumentType = apps.get_model(
app_label='documents', model_name='DocumentType'
)
DocumentTypeSettings = self.get_model(
model_name='DocumentTypeSettings'
)
DocumentVersion = apps.get_model(
app_label='documents', model_name='DocumentVersion'
)
DocumentVersionOCRError = self.get_model('DocumentVersionOCRError')
Document.add_to_class('submit_for_ocr', document_ocr_submit)
DocumentVersion.add_to_class(
'ocr_content', get_document_ocr_content
)
DocumentVersion.add_to_class(
'submit_for_ocr', document_version_ocr_submit
)
ModelField(
Document, name='versions__pages__ocr_content__content'
)
ModelPermission.register(
model=Document, permissions=(
permission_ocr_document, permission_ocr_content_view
)
)
ModelPermission.register(
model=DocumentType, permissions=(
permission_document_type_ocr_setup,
)
)
ModelPermission.register_inheritance(
model=DocumentTypeSettings, related='document_type',
)
SourceColumn(
source=DocumentVersionOCRError, label=_('Document'),
func=lambda context: document_link(context['object'].document_version.document)
)
SourceColumn(
source=DocumentVersionOCRError, label=_('Added'),
attribute='datetime_submitted'
)
SourceColumn(
source=DocumentVersionOCRError, label=_('Result'),
attribute='result'
)
app.conf.CELERY_QUEUES.append(
Queue('ocr', Exchange('ocr'), routing_key='ocr'),
)
app.conf.CELERY_ROUTES.update(
{
'ocr.tasks.task_do_ocr': {
'queue': 'ocr'
},
}
)
document_search.add_model_field(
field='versions__pages__ocr_content__content', label=_('OCR')
)
document_page_search.add_model_field(
field='ocr_content__content', label=_('OCR')
)
menu_facet.bind_links(
links=(link_document_ocr_content,), sources=(Document,)
)
menu_facet.bind_links(
links=(link_document_page_ocr_content,), sources=(DocumentPage,)
)
menu_multi_item.bind_links(
links=(link_document_submit_multiple,), sources=(Document,)
)
menu_object.bind_links(
links=(link_document_submit,), sources=(Document,)
)
menu_object.bind_links(
links=(link_document_page_ocr_content,), sources=(DocumentPage,)
)
menu_object.bind_links(
links=(link_document_type_ocr_settings,), sources=(DocumentType,)
)
menu_secondary.bind_links(
links=(
link_document_ocr_content, link_document_ocr_errors_list,
link_document_ocr_download
),
sources=(
'ocr:document_content', 'ocr:document_ocr_error_list',
'ocr:document_ocr_download',
)
)
menu_secondary.bind_links(
links=(link_entry_list,),
sources=(
'ocr:entry_list', 'ocr:entry_delete_multiple',
'ocr:entry_re_queue_multiple', DocumentVersionOCRError
)
)
menu_tools.bind_links(
links=(
link_document_type_submit, link_entry_list
)
)
post_document_version_ocr.connect(
dispatch_uid='ocr_handler_index_document',
receiver=handler_index_document,
sender=DocumentVersion
)
post_save.connect(
dispatch_uid='ocr_handler_initialize_new_ocr_settings',
receiver=handler_initialize_new_ocr_settings,
sender=DocumentType
)
post_version_upload.connect(
dispatch_uid='ocr_handler_ocr_document_version',
receiver=handler_ocr_document_version,
sender=DocumentVersion
)
| [
"roberto.rosario.gonzalez@gmail.com"
] | roberto.rosario.gonzalez@gmail.com |
152ffb10dc25b1ca59f9931e30c3976970cc1f2a | e754658e64e2bf6361fb01dcdf52d3f7364c2dae | /geemap/__init__.py | dbd53ffe9b416156f0132ee9ae815c0684ad3764 | [
"MIT"
] | permissive | wha7/geemap | ad4f00163d8a13aac583b32e74916ac5011877cf | 57f1cb182ac51e9560976ac1452b28beee6d5312 | refs/heads/master | 2023-01-08T08:45:24.860924 | 2020-11-06T04:03:28 | 2020-11-06T04:03:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | """Top-level package for geemap."""
__author__ = """Qiusheng Wu"""
__email__ = "giswqs@gmail.com"
__version__ = "0.8.1"
from .geemap import *
# from .basemaps import ee_basemaps
# from .legends import builtin_legends
| [
"giswqs@gmail.com"
] | giswqs@gmail.com |
4595a184419af2980cd94de30541698d57fcb270 | 97fbcd3e36f8a4fbe02c03f3433107be597cd5db | /anvil/type_utils.py | 84f09746a037b710fd1b3ff63588ed9c69dbfca8 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jzako/anvil | 049f04eb49d13ea12bb9d65cc2f0650cef575490 | 1bbe7a5059fcaba5f6f8c84b01dbf44fcccf9d8a | refs/heads/master | 2021-01-22T14:20:27.198719 | 2015-10-20T20:24:37 | 2015-10-20T20:24:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import types
def make_bool(val):
if isinstance(val, bool):
return val
if isinstance(val, types.NoneType):
return False
sval = str(val).lower().strip()
if sval in ['true', '1', 'on', 'yes', 't']:
return True
if sval in ['0', 'false', 'off', 'no', 'f', '', 'none']:
return False
raise TypeError("Unable to convert %r to a boolean" % (val))
def obj_name(obj):
if isinstance(obj, (types.TypeType,
types.ModuleType,
types.FunctionType,
types.LambdaType)):
return str(obj.__name__)
return obj_name(obj.__class__)
| [
"harlowja@yahoo-inc.com"
] | harlowja@yahoo-inc.com |
8cac4526b7a7108207dd13871b04a87c0ec849b3 | 22be44dce81f6c0ac9f891e1661118299e4feaf1 | /labs/src/A.0.HelloPython/startingpoint/main.py | 807eef2c1b435052d484207f144f158a3277586d | [] | no_license | KathiW/python_foundations | 18a1b24a140e8f3e482a1581986c9bafd64565ff | 02b6d5b2532fb9c71a497ab1fe506a7d70bc13e1 | refs/heads/main | 2023-02-19T10:47:02.391016 | 2021-01-20T13:00:51 | 2021-01-20T13:00:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 182 | py | min_possible=1
max_possible=100
print("Think of a whole number between 1 and 100.")
print("Then I'll try to guess it.")
print("Ready?")
print('\n\nTODO:Implement this game\n\n')
| [
"you@example.com"
] | you@example.com |
40f444b118b051a4a4b714f45b5074514526826d | 7bb3b187c9cd2b5f16bd740e920eb875bccd2bbb | /Sqlite.py | 6da147a1460fc3f925efe642aa5a17333e588bdd | [] | no_license | yaowenqiang/python-security-demo | c85c3066eb944267e2693f49f3291b1459ea829b | ece715fbb824fa395710fc5e82b8731ce1d370a8 | refs/heads/master | 2020-05-29T08:47:31.487050 | 2016-10-05T19:47:30 | 2016-10-05T19:47:30 | 70,012,123 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | import sqlite3
conn = sqlite3.connect('dbname.db')
cur = conn.cursor()
for row in cur.execute('select * from table'):
print(row)
| [
"yaowenqiang111@163.com"
] | yaowenqiang111@163.com |
16b0f445540dde1647ce7b4edbe3557f4e17109d | 555b9f764d9bca5232360979460bc35c2f5ad424 | /google/ads/google_ads/v1/proto/enums/price_extension_type_pb2.py | d68fc2e76ee0a79c0fb5e4882d13b5b99bbeac9c | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | juanmacugat/google-ads-python | b50256163782bc0223bcd8b29f789d74f4cfad05 | 0fc8a7dbf31d9e8e2a4364df93bec5f6b7edd50a | refs/heads/master | 2021-02-18T17:00:22.067673 | 2020-03-05T16:13:57 | 2020-03-05T16:13:57 | 245,215,877 | 1 | 0 | Apache-2.0 | 2020-03-05T16:39:34 | 2020-03-05T16:39:33 | null | UTF-8 | Python | false | true | 5,168 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v1/proto/enums/price_extension_type.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v1/proto/enums/price_extension_type.proto',
package='google.ads.googleads.v1.enums',
syntax='proto3',
serialized_options=_b('\n!com.google.ads.googleads.v1.enumsB\027PriceExtensionTypeProtoP\001ZBgoogle.golang.org/genproto/googleapis/ads/googleads/v1/enums;enums\242\002\003GAA\252\002\035Google.Ads.GoogleAds.V1.Enums\312\002\035Google\\Ads\\GoogleAds\\V1\\Enums\352\002!Google::Ads::GoogleAds::V1::Enums'),
serialized_pb=_b('\n>google/ads/googleads_v1/proto/enums/price_extension_type.proto\x12\x1dgoogle.ads.googleads.v1.enums\x1a\x1cgoogle/api/annotations.proto\"\xeb\x01\n\x16PriceExtensionTypeEnum\"\xd0\x01\n\x12PriceExtensionType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12\n\n\x06\x42RANDS\x10\x02\x12\n\n\x06\x45VENTS\x10\x03\x12\r\n\tLOCATIONS\x10\x04\x12\x11\n\rNEIGHBORHOODS\x10\x05\x12\x16\n\x12PRODUCT_CATEGORIES\x10\x06\x12\x11\n\rPRODUCT_TIERS\x10\x07\x12\x0c\n\x08SERVICES\x10\x08\x12\x16\n\x12SERVICE_CATEGORIES\x10\t\x12\x11\n\rSERVICE_TIERS\x10\nB\xec\x01\n!com.google.ads.googleads.v1.enumsB\x17PriceExtensionTypeProtoP\x01ZBgoogle.golang.org/genproto/googleapis/ads/googleads/v1/enums;enums\xa2\x02\x03GAA\xaa\x02\x1dGoogle.Ads.GoogleAds.V1.Enums\xca\x02\x1dGoogle\\Ads\\GoogleAds\\V1\\Enums\xea\x02!Google::Ads::GoogleAds::V1::Enumsb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_PRICEEXTENSIONTYPEENUM_PRICEEXTENSIONTYPE = _descriptor.EnumDescriptor(
name='PriceExtensionType',
full_name='google.ads.googleads.v1.enums.PriceExtensionTypeEnum.PriceExtensionType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BRANDS', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EVENTS', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOCATIONS', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NEIGHBORHOODS', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PRODUCT_CATEGORIES', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PRODUCT_TIERS', index=7, number=7,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SERVICES', index=8, number=8,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SERVICE_CATEGORIES', index=9, number=9,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SERVICE_TIERS', index=10, number=10,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=155,
serialized_end=363,
)
_sym_db.RegisterEnumDescriptor(_PRICEEXTENSIONTYPEENUM_PRICEEXTENSIONTYPE)
_PRICEEXTENSIONTYPEENUM = _descriptor.Descriptor(
name='PriceExtensionTypeEnum',
full_name='google.ads.googleads.v1.enums.PriceExtensionTypeEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_PRICEEXTENSIONTYPEENUM_PRICEEXTENSIONTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=128,
serialized_end=363,
)
_PRICEEXTENSIONTYPEENUM_PRICEEXTENSIONTYPE.containing_type = _PRICEEXTENSIONTYPEENUM
DESCRIPTOR.message_types_by_name['PriceExtensionTypeEnum'] = _PRICEEXTENSIONTYPEENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PriceExtensionTypeEnum = _reflection.GeneratedProtocolMessageType('PriceExtensionTypeEnum', (_message.Message,), dict(
DESCRIPTOR = _PRICEEXTENSIONTYPEENUM,
__module__ = 'google.ads.googleads_v1.proto.enums.price_extension_type_pb2'
,
__doc__ = """Container for enum describing types for a price extension.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v1.enums.PriceExtensionTypeEnum)
))
_sym_db.RegisterMessage(PriceExtensionTypeEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"noreply@github.com"
] | juanmacugat.noreply@github.com |
07be74c1ce70c9e40a4ac69d3170a13087bb46e4 | eff2fe0333955dee20091e5de3ff7beb5c49a447 | /django_project/projet1/site_info/infos/forms.py | c75ba26a9bfedd48edc57a46bc0260cc61de834e | [] | no_license | xarala221/python-pour-toure | ec8aad2100f9a157b158fe1727c3b0566f09c2da | 27d90bc580acb159182e0914dffd2e037ef6f86b | refs/heads/master | 2023-04-27T00:29:55.927458 | 2019-11-08T00:09:55 | 2019-11-08T00:09:55 | 209,387,702 | 0 | 0 | null | 2023-04-21T20:39:02 | 2019-09-18T19:26:11 | Python | UTF-8 | Python | false | false | 283 | py | from django import forms
from .models import Article
class ArticleForm(forms.ModelForm):
titre = forms.CharField(label="Titre", max_length=150)
contenu = forms.CharField(widget=forms.Textarea)
class Meta:
model = Article
fields = ('titre', 'contenu',)
| [
"xaralaxarala@gmail.com"
] | xaralaxarala@gmail.com |
189efb22bc96c8dbf39159cfa00bac64b62041ab | 384e179223c646e6390fce9e97242d34842cb192 | /tests/minmax.py | b691b3232828b0326e6803175594c6c94da6bd27 | [] | no_license | mykespb/edu | 3a36fd1981fb696b622e23d878d45a098da819e7 | 0364b261234a2488c0ad3408ad83406594a5238e | refs/heads/master | 2023-08-07T15:37:19.285125 | 2023-08-06T18:13:28 | 2023-08-06T18:13:28 | 20,329,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,589 | py | #!/usr/bin/env python3
# программа ищет максимумы и минимумы в списке
# (C) М.Колодин, 2021-06-25, 2021-10-06 1.3
# -------------------------------------- подготовка
import random
rang = range(10)
def makerand(size=10, lower=-100, upper=100):
""" сделать список из size элементов,
минимальное значение lower,
максимальное значение upper
"""
return [random.randint(lower, upper) for _ in range(size)]
def head(s):
""" напечатать заголовок
"""
sep = "---------------------------------"
print(f"\n{sep}\n{s}\n{sep}\n")
def test(f):
""" запустить функцию на тест """
for i in rang:
a = makerand()
print(a)
print(f(a))
# -------------------------------------- запуски функций
head("найти максимум")
def getmax1(a):
""" найти максимум """
if not a: return None
return max(a)
print(getmax1([]))
test(getmax1)
def getmax2(a):
""" найти максимум """
if not a: return None
m = a[0]
for e in a:
if e > m:
m = e
return m
print(getmax2([]))
test(getmax1)
head("найти минимум, нетривиально")
def getmin2(a):
""" найти минимум """
if not a: return None
m = a[0]
for e in a:
if e < m:
m = e
return m
test(getmin2)
head("найти минимум и максимум за 1 проход")
def getall1(a):
""" найти максимум и минимум """
if not a: return None
mi = ma = a[0]
for e in a:
if e > ma: ma = e
if e < mi: mi = e
return mi, ma
test(getall1)
head("найти максимум по модулю")
def maxmod1(a):
""" найти максимум по модулю """
return sorted(a, key=lambda x: abs(x), reverse=True)[0] if a else None
test(maxmod1)
head("найти максимум и минимум по модулю")
def minmaxmod1(a):
""" найти минимум и максимум по модулю за 1 проход """
if not a: return None
m = sorted(a, key=lambda x: abs(x))
return m[0], m[-1]
test(minmaxmod1)
def minmaxmod2(a):
""" найти минимум и максимум по модулю за 1 проход """
if not a: return None
return sorted(a, key=lambda x: abs(x))[0: len(a): len(a)-1]
test(minmaxmod2)
# ================= the end.
| [
"mykespb@gmail.com"
] | mykespb@gmail.com |
d95dd338136787acdc0eb23e72b41ad2b218308d | 846a7668ac964632bdb6db639ab381be11c13b77 | /android/tools/test/connectivity/acts/framework/acts/controllers/native_android_device.py | cbe32373eacb752b1ca8049113a18167327983d1 | [] | no_license | BPI-SINOVOIP/BPI-A64-Android8 | f2900965e96fd6f2a28ced68af668a858b15ebe1 | 744c72c133b9bf5d2e9efe0ab33e01e6e51d5743 | refs/heads/master | 2023-05-21T08:02:23.364495 | 2020-07-15T11:27:51 | 2020-07-15T11:27:51 | 143,945,191 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,263 | py | #!/usr/bin/env python3.4
#
# Copyright 2016 - The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from acts.controllers.android_device import AndroidDevice
from acts.controllers.utils_lib import host_utils
import acts.controllers.native as native
from subprocess import call
import logging
import time
#TODO(tturney): Merge this into android device
ACTS_CONTROLLER_CONFIG_NAME = "NativeAndroidDevice"
ACTS_CONTROLLER_REFERENCE_NAME = "native_android_devices"
def create(configs):
logger = logging
ads = get_instances(configs)
for ad in ads:
try:
ad.get_droid()
except:
logger.exception("Failed to start sl4n on %s" % ad.serial)
return ads
def destroy(ads):
pass
def get_instances(serials, ):
"""Create AndroidDevice instances from a list of serials.
Args:
serials: A list of android device serials.
logger: A logger to be passed to each instance.
Returns:
A list of AndroidDevice objects.
"""
results = []
for s in serials:
results.append(NativeAndroidDevice(s))
return results
class NativeAndroidDeviceError(Exception):
pass
class NativeAndroidDevice(AndroidDevice):
def __del__(self):
if self.h_port:
self.adb.forward("--remove tcp:%d" % self.h_port)
def get_droid(self, handle_event=True):
"""Create an sl4n connection to the device.
Return the connection handler 'droid'. By default, another connection
on the same session is made for EventDispatcher, and the dispatcher is
returned to the caller as well.
If sl4n server is not started on the device, try to start it.
Args:
handle_event: True if this droid session will need to handle
events.
Returns:
droid: Android object useds to communicate with sl4n on the android
device.
ed: An optional EventDispatcher to organize events for this droid.
Examples:
Don't need event handling:
>>> ad = NativeAndroidDevice()
>>> droid = ad.get_droid(False)
Need event handling:
>>> ad = NativeAndroidDevice()
>>> droid, ed = ad.get_droid()
"""
if not self.h_port or not host_utils.is_port_available(self.h_port):
self.h_port = host_utils.get_available_host_port()
self.adb.tcp_forward(self.h_port, self.d_port)
pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
while (pid):
self.adb.shell("kill {}".format(pid))
pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
call(
["adb -s " + self.serial + " shell sh -c \"/system/bin/sl4n\" &"],
shell=True)
try:
time.sleep(3)
droid = self.start_new_session()
except:
droid = self.start_new_session()
return droid
def start_new_session(self):
"""Start a new session in sl4n.
Also caches the droid in a dict with its uid being the key.
Returns:
An Android object used to communicate with sl4n on the android
device.
Raises:
sl4nException: Something is wrong with sl4n and it returned an
existing uid to a new session.
"""
droid = native.NativeAndroid(port=self.h_port)
droid.open()
if droid.uid in self._droid_sessions:
raise bt.SL4NException(("SL4N returned an existing uid for a "
"new session. Abort."))
return droid
self._droid_sessions[droid.uid] = [droid]
return droid
| [
"mingxin.android@gmail.com"
] | mingxin.android@gmail.com |
45a4c5a576c3940d7582246e107c9e4cf88223a8 | e206cc00299804ce2271eb5d1513620e44ee9a9b | /course1-algorithm-toolbox/assignments/assignment_003_binary_search/binary_search.py | 9f88ea80fac72e56ee2a8046d8fed1a86d21c756 | [] | no_license | dmitri-mamrukov/coursera-data-structures-and-algorithms | 15459cd160f7bbae5464bf53d995bca868a0b415 | 01dd6f0dadf62a520bcafafddf7bf2b79e8e2603 | refs/heads/master | 2020-05-24T18:27:00.665642 | 2019-05-21T20:45:37 | 2019-05-21T20:45:37 | 187,410,737 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 774 | py | #!/usr/bin/python3
import sys
def binary_search(data, key):
"""Finds the index of the key in the array if any.
Otherwise, -1 is returned.
"""
low = 0
high = len(data) - 1
while low <= high:
mid = low + (high - low) // 2
if data[mid] == key:
return mid
elif key < data[mid]:
high = mid - 1
else:
low = mid + 1
return -1
def linear_search(a, x):
for i in range(len(a)):
if a[i] == x:
return i
return -1
if __name__ == '__main__':
input = sys.stdin.read()
data = list(map(int, input.split()))
n = data[0]
m = data[n + 1]
a = data[1 : n + 1]
for x in data[n + 2:]:
print(binary_search(a, x), end = ' ')
print()
| [
"dmitri.mamrukov@gmail.com"
] | dmitri.mamrukov@gmail.com |
256abba517d9e7d62ae15a8a78fa14b8944a73e9 | 2e43fc58f2a70b38c8f74101d639d1ad6fffb609 | /ParadoxTrading/Indicator/General/STD.py | 83f4e6feb10387c9f7a07fe2930542fb323df01c | [
"MIT"
] | permissive | ppaanngggg/ParadoxTrading | 9cac27dee26a49739dde661c1e03d83bda09df9b | 2c4024e60b14bf630fd141ccd4c77f197b7c901a | refs/heads/master | 2021-05-11T20:13:14.871616 | 2018-07-13T05:49:15 | 2018-07-13T05:49:15 | 117,434,771 | 96 | 26 | MIT | 2018-03-21T08:47:27 | 2018-01-14T13:57:16 | Python | UTF-8 | Python | false | false | 946 | py | import statistics
from collections import deque
from ParadoxTrading.Indicator.IndicatorAbstract import IndicatorAbstract
from ParadoxTrading.Utils import DataStruct
class STD(IndicatorAbstract):
def __init__(
self, _period: int, _use_key: str = 'closeprice',
_idx_key: str = 'time', _ret_key: str = 'std'
):
super().__init__()
self.use_key = _use_key
self.idx_key = _idx_key
self.ret_key = _ret_key
self.data = DataStruct(
[self.idx_key, self.ret_key],
self.idx_key
)
self.period = _period
self.buf = deque(maxlen=self.period)
def _addOne(self, _data_struct: DataStruct):
index_value = _data_struct.index()[0]
self.buf.append(_data_struct.getColumn(self.use_key)[0])
self.data.addDict({
self.idx_key: index_value,
self.ret_key: statistics.pstdev(self.buf),
})
| [
"hantian.pang@gmail.com"
] | hantian.pang@gmail.com |
68124a7b67fd1c6095d96303a8aca0f384c4afdc | 8acffb8c4ddca5bfef910e58d3faa0e4de83fce8 | /ml-flask/Lib/site-packages/transformers/benchmark/benchmark_args_tf.py | cd36a0cecd7e703e619b11f5e9e7cad729ff38b1 | [
"MIT"
] | permissive | YaminiHP/SimilitudeApp | 8cbde52caec3c19d5fa73508fc005f38f79b8418 | 005c59894d8788c97be16ec420c0a43aaec99b80 | refs/heads/master | 2023-06-27T00:03:00.404080 | 2021-07-25T17:51:27 | 2021-07-25T17:51:27 | 389,390,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:afce00f23314b1eaf01e6b9073d8607e187acc93116a0521b1dc0774527c6b52
size 4573
| [
"yamprakash130@gmail.com"
] | yamprakash130@gmail.com |
47bc42a6c5aa9ec5584da4120261c20195f8b5b2 | fdb9bdc6c4ab2f14ba71e544493706d5e275899f | /fhir/resources/R4B/coding.py | fbfc479eed95b3821cf73e2b01b529bc8a6270ca | [
"BSD-3-Clause"
] | permissive | nazrulworld/fhir.resources | 6ae8aea8180c611b0c5050759c6dcdf63e4cb061 | 1fd6ea476b27b3fcb8c4ef8f23bc51cf161e69e3 | refs/heads/main | 2023-08-30T18:27:27.277249 | 2023-07-03T19:57:06 | 2023-07-03T19:57:06 | 165,297,877 | 256 | 83 | NOASSERTION | 2023-08-24T15:34:05 | 2019-01-11T19:26:41 | Python | UTF-8 | Python | false | false | 3,954 | py | # -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Coding
Release: R4B
Version: 4.3.0
Build ID: c475c22
Last updated: 2022-05-28T12:47:40.239+10:00
"""
from pydantic import Field
from . import element, fhirtypes
class Coding(element.Element):
"""Disclaimer: Any field name ends with ``__ext`` doesn't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
A reference to a code defined by a terminology system.
"""
resource_type = Field("Coding", const=True)
code: fhirtypes.Code = Field(
None,
alias="code",
title="Symbol in syntax defined by the system",
description=(
"A symbol in syntax defined by the system. The symbol may be a "
"predefined code or an expression in a syntax defined by the coding "
"system (e.g. post-coordination)."
),
# if property is element of this resource.
element_property=True,
)
code__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_code", title="Extension field for ``code``."
)
display: fhirtypes.String = Field(
None,
alias="display",
title="Representation defined by the system",
description=(
"A representation of the meaning of the code in the system, following "
"the rules of the system."
),
# if property is element of this resource.
element_property=True,
)
display__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_display", title="Extension field for ``display``."
)
system: fhirtypes.Uri = Field(
None,
alias="system",
title="Identity of the terminology system",
description=(
"The identification of the code system that defines the meaning of the "
"symbol in the code."
),
# if property is element of this resource.
element_property=True,
)
system__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_system", title="Extension field for ``system``."
)
userSelected: bool = Field(
None,
alias="userSelected",
title="If this coding was chosen directly by the user",
description=(
"Indicates that this coding was chosen by a user directly - e.g. off a "
"pick list of available items (codes or displays)."
),
# if property is element of this resource.
element_property=True,
)
userSelected__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_userSelected", title="Extension field for ``userSelected``."
)
version: fhirtypes.String = Field(
None,
alias="version",
title="Version of the system - if relevant",
description=(
"The version of the code system which was used when choosing this code."
" Note that a well-maintained code system does not need the version "
"reported, because the meaning of codes is consistent across versions. "
"However this cannot consistently be assured, and when the meaning is "
"not guaranteed to be consistent, the version SHOULD be exchanged."
),
# if property is element of this resource.
element_property=True,
)
version__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_version", title="Extension field for ``version``."
)
@classmethod
def elements_sequence(cls):
"""returning all elements names from
``Coding`` according specification,
with preserving original sequence order.
"""
return [
"id",
"extension",
"system",
"version",
"code",
"display",
"userSelected",
]
| [
"connect2nazrul@gmail.com"
] | connect2nazrul@gmail.com |
aabda3c3f3bf812d7795a082ede806aac3d5fa23 | 8b4516cb0b39a9cdd81656d788443992d1cdf11c | /setup.py | 2177e876e8f0ebf3db288220b09885128a7ee444 | [] | no_license | zwl-max/mmdetection_clw | 321e7c2af349bce5d54c4e2388be0833937b9645 | d6d05ce10e2cebe8fac72c06cf79a88e0b1bbefb | refs/heads/main | 2023-04-04T00:42:59.671784 | 2021-04-12T17:06:49 | 2021-04-12T17:06:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,306 | py | #!/usr/bin/env python
import os
from setuptools import find_packages, setup
import torch
from torch.utils.cpp_extension import (BuildExtension, CppExtension,
CUDAExtension)
def readme():
with open('README.md', encoding='utf-8') as f:
content = f.read()
return content
version_file = 'mmdet/version.py'
def get_version():
with open(version_file, 'r') as f:
exec(compile(f.read(), version_file, 'exec'))
return locals()['__version__']
def make_cuda_ext(name, module, sources, sources_cuda=[]):
define_macros = []
extra_compile_args = {'cxx': []}
if torch.cuda.is_available() or os.getenv('FORCE_CUDA', '0') == '1':
define_macros += [('WITH_CUDA', None)]
extension = CUDAExtension
extra_compile_args['nvcc'] = [
'-D__CUDA_NO_HALF_OPERATORS__',
'-D__CUDA_NO_HALF_CONVERSIONS__',
'-D__CUDA_NO_HALF2_OPERATORS__',
]
sources += sources_cuda
else:
print(f'Compiling {name} without CUDA')
extension = CppExtension
return extension(
name=f'{module}.{name}',
sources=[os.path.join(*module.split('.'), p) for p in sources],
define_macros=define_macros,
extra_compile_args=extra_compile_args)
def parse_requirements(fname='requirements.txt', with_version=True):
"""Parse the package dependencies listed in a requirements file but strips
specific versioning information.
Args:
fname (str): path to requirements file
with_version (bool, default=False): if True include version specs
Returns:
List[str]: list of requirements items
CommandLine:
python -c "import setup; print(setup.parse_requirements())"
"""
import sys
from os.path import exists
import re
require_fpath = fname
def parse_line(line):
"""Parse information from a line in a requirements text file."""
if line.startswith('-r '):
# Allow specifying requirements in other files
target = line.split(' ')[1]
for info in parse_require_file(target):
yield info
else:
info = {'line': line}
if line.startswith('-e '):
info['package'] = line.split('#egg=')[1]
elif '@git+' in line:
info['package'] = line
else:
# Remove versioning from the package
pat = '(' + '|'.join(['>=', '==', '>']) + ')'
parts = re.split(pat, line, maxsplit=1)
parts = [p.strip() for p in parts]
info['package'] = parts[0]
if len(parts) > 1:
op, rest = parts[1:]
if ';' in rest:
# Handle platform specific dependencies
# http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies
version, platform_deps = map(str.strip,
rest.split(';'))
info['platform_deps'] = platform_deps
else:
version = rest # NOQA
info['version'] = (op, version)
yield info
def parse_require_file(fpath):
with open(fpath, 'r') as f:
for line in f.readlines():
line = line.strip()
if line and not line.startswith('#'):
for info in parse_line(line):
yield info
def gen_packages_items():
if exists(require_fpath):
for info in parse_require_file(require_fpath):
parts = [info['package']]
if with_version and 'version' in info:
parts.extend(info['version'])
if not sys.version.startswith('3.4'):
# apparently package_deps are broken in 3.4
platform_deps = info.get('platform_deps')
if platform_deps is not None:
parts.append(';' + platform_deps)
item = ''.join(parts)
yield item
packages = list(gen_packages_items())
return packages
if __name__ == '__main__':
setup(
name='mmdet',
version=get_version(),
description='OpenMMLab Detection Toolbox and Benchmark',
long_description=readme(),
long_description_content_type='text/markdown',
author='OpenMMLab',
author_email='openmmlab@gmail.com',
keywords='computer vision, object detection',
url='https://github.com/open-mmlab/mmdetection',
packages=find_packages(exclude=('configs', 'tools', 'demo')),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
license='Apache License 2.0',
setup_requires=parse_requirements('requirements/build.txt'),
tests_require=parse_requirements('requirements/tests.txt'),
install_requires=parse_requirements('requirements/runtime.txt'),
extras_require={
'all': parse_requirements('requirements.txt'),
'tests': parse_requirements('requirements/tests.txt'),
'build': parse_requirements('requirements/build.txt'),
'optional': parse_requirements('requirements/optional.txt'),
},
ext_modules=[
make_cuda_ext(
name="deform_conv_cuda",
module="mmdet.models.utils.dcn",
sources=["src/deform_conv_cuda.cpp", "src/deform_conv_cuda_kernel.cu"],
),
make_cuda_ext(
name="deform_pool_cuda",
module="mmdet.models.utils.dcn",
sources=["src/deform_pool_cuda.cpp", "src/deform_pool_cuda_kernel.cu"],
),
],
cmdclass={'build_ext': BuildExtension},
zip_safe=False)
| [
"623497281@qq.com"
] | 623497281@qq.com |
6482dd6d3f523eae4b49c9c65952243b23ecdf56 | fb7efe44f4d9f30d623f880d0eb620f3a81f0fbd | /components/metrics/net/DEPS | df18a66dd19fb30a0ed7f84f6657cd48a48c334c | [
"BSD-3-Clause"
] | permissive | wzyy2/chromium-browser | 2644b0daf58f8b3caee8a6c09a2b448b2dfe059c | eb905f00a0f7e141e8d6c89be8fb26192a88c4b7 | refs/heads/master | 2022-11-23T20:25:08.120045 | 2018-01-16T06:41:26 | 2018-01-16T06:41:26 | 117,618,467 | 3 | 2 | BSD-3-Clause | 2022-11-20T22:03:57 | 2018-01-16T02:09:10 | null | UTF-8 | Python | false | false | 178 | include_rules = [
"+chromeos/dbus",
"+chromeos/network",
"+components/data_use_measurement/core",
"+components/variations",
"+net",
"+third_party/cros_system_api",
]
| [
"jacob-chen@iotwrt.com"
] | jacob-chen@iotwrt.com | |
ccb8c6765f17d87c44c46722046e8925fa3aacce | 40028d1859d9653386ed6c59e22fc539f17d5c64 | /ecommerseapi/api/models.py | 8a4a50e7074964385d3888b859bf4ea7504f8134 | [] | no_license | shobhit1215/E_Commerse_API | 20da72298c6ded5b4590a84443fb2f9f2dee177c | 76cd8915609f02050138c5c4d47ba13c8050223b | refs/heads/main | 2023-06-08T10:31:11.747416 | 2021-06-06T14:20:47 | 2021-06-06T14:20:47 | 373,506,156 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,931 | py | from django.db import models
from django.contrib.auth.models import User
# Create your models here
class Category(models.Model):
title = models.CharField(max_length=100)
class Meta:
verbose_name_plural='categories'
def __str__(self):
return self.title
class Book(models.Model):
title = models.CharField(max_length=50)
category = models.ForeignKey(Category,related_name='books',on_delete=models.CASCADE)
author = models.CharField(max_length=100,default='John Doe')
isbn = models.CharField(max_length=20)
pages = models.IntegerField()
price = models.IntegerField()
stock = models.IntegerField()
description = models.TextField()
imageURL = models.URLField(max_length=500)
status = models.BooleanField(default=True)
date_created = models.DateField(auto_now_add=True)
class Meta:
ordering=['-date_created']
def __str__(self):
return self.title
class Product(models.Model):
product_tag = models.CharField(max_length=10)
name = models.CharField(max_length=100)
category = models.ForeignKey(Category,related_name='products',on_delete=models.CASCADE)
price = models.IntegerField()
stock = models.IntegerField()
imageURL = models.URLField()
status = models.BooleanField(default=True)
date_created = models.DateField(auto_now_add=True)
class Meta:
ordering=['-date_created']
def __str__(self):
return '{} {}'.format(self.product_tag, self.name)
class Cart(models.Model):
cart_id = models.OneToOneField(User,on_delete=models.CASCADE,primary_key=True)
created_at = models.DateTimeField(auto_now_add=True)
books = models.ForeignKey(Book,on_delete=models.CASCADE,default=1)
products = models.ForeignKey(Product,on_delete=models.CASCADE,default=1)
class Meta:
ordering = ['cart_id','-created_at']
def __str__(self):
return f'{self.cart_id}'
| [
"imshobhit.sb@gmail.com"
] | imshobhit.sb@gmail.com |
9951109087ec9d12722fc9f82d84fd9982978987 | 102a9e14dc7d86c4b397101b426c6846a6949d5d | /drdown/forum/tests/test_model_category.py | b6539b7ce9a0ccf0ea247e153b5b4927dadd1011 | [
"MIT"
] | permissive | fga-eps-mds/2018.1-Dr-Down | 2371535227aed7c09bbae9fd8871b8eac8068c05 | 3423374360105b06ac2c57a320bf2ee8deaa08a3 | refs/heads/develop | 2023-04-13T18:08:44.880516 | 2018-06-25T23:36:27 | 2018-06-25T23:36:27 | 124,143,479 | 3 | 13 | MIT | 2021-03-29T17:31:49 | 2018-03-06T21:55:37 | Python | UTF-8 | Python | false | false | 2,358 | py | from test_plus.test import TestCase
from drdown.forum.models.model_category import Category
class ModelTestCase(TestCase):
def setUp(self):
"""
This method will run before any test case.
"""
self.category1 = Category.objects.create(
name='Medicamentos',
description='Tipo de Medicamento',
slug='med',
)
self.category2 = Category.objects.create(
name='Eventos',
description='Tipo de Eventos',
slug='event',
)
def tearDown(self):
"""
This method will run after any test.
"""
self.category1.delete()
self.category2.delete()
def test_save_name_ok(self):
"""
Test to verify if name of category is the correct passed
"""
self.assertEquals(self.category1.name, 'Medicamentos')
self.assertEquals(self.category2.name, 'Eventos')
def test_save_description_ok(self):
"""
Test to verify if description is the correct passed
"""
self.assertEquals(self.category1.description, 'Tipo de Medicamento')
self.assertEquals(self.category2.description, 'Tipo de Eventos')
def test_save_slug_ok(self):
"""
Test to verify if slug is the correct passed
"""
self.assertEquals(self.category1.slug, 'med')
self.assertEquals(self.category2.slug, 'event')
def test_save_name_error(self):
"""
Test to verify if name of category really fail
"""
self.assertNotEquals(self.category1.name, '')
self.assertNotEquals(self.category2.name, '')
def test_save_description_error(self):
"""
Test to verify if description really fail
"""
self.assertNotEquals(self.category1.description, '')
self.assertNotEquals(self.category2.description, '')
def test_save_slug_error(self):
"""
Test to verify if slug really fail
"""
self.assertNotEquals(self.category1.slug, '')
self.assertNotEquals(self.category2.slug, '')
def test_str_is_equal_to_title(self):
"""
Method `__str__` should be equal to field `title`
"""
self.assertEqual(self.category1.__str__(), self.category1.name)
| [
"joberth.rogers18@gmail.com"
] | joberth.rogers18@gmail.com |
8fc7138fff62bf36605f163a89c9620cc018d8a0 | 53bab92377bf98e2c98e0d94b95c5c6f7c3aef31 | /bbs/templatetags/table.py | 49d0207fbf38a6665c638e2c78d310fdf3811a93 | [] | no_license | seiya0723/assets_manager_01 | e4a3d7b5bf00803be41261d1ac72db9f878bf58d | c421f3e728836d45077f745a7f26766361a9fe24 | refs/heads/master | 2023-05-31T14:12:25.713436 | 2021-06-19T01:04:58 | 2021-06-19T01:04:58 | 378,149,413 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,075 | py | from django import template
register = template.Library()
@register.inclusion_tag("bbs/table.html")
def generate_balance(topics):
balances = []
# TODO:ページネーション化させる時はこの時点でこれまでのページの残高を計算しておく
total = 0
for topic in topics:
row = {}
row["id"] = topic.id
row["category"] = topic.category
row["title"] = topic.title
row["comment"] = topic.comment
row["income"] = topic.income
row["spending"] = topic.spending
row["dt"] = topic.dt
row["pay_dt"] = topic.pay_dt
# total = total + row["income"] - row["spending"]
if row["spending"]:
spending = int(row["spending"])
else:
spending = 0
if row["income"]:
income = int(row["income"])
else:
income = 0
total = total + income - spending
row["total"] = total
balances.append(row)
return {"balances": balances} | [
"seiya@asahina"
] | seiya@asahina |
255cd831c415c060347e3abee618ed7d545d1406 | 4cb8c8f11c2a19a75495771b1d6b53881cd67b58 | /Production/test/condorSub/dict_wjets.py | 712d8b7480e597b4af4713672f587c5df75c08b8 | [] | no_license | kpedro88/TreeMaker | d7498274106067e56aebbed7f086fbee8bf46f7e | 0260dc10392e4828452e559beb3b6dc8fa765df5 | refs/heads/upgrade2017 | 2023-07-06T13:24:04.465706 | 2018-04-03T17:56:48 | 2018-04-03T17:56:48 | 38,841,239 | 0 | 1 | null | 2021-11-11T17:20:57 | 2015-07-09T19:35:41 | Python | UTF-8 | Python | false | false | 1,367 | py | flist = {
"scenario": "Summer16",
"samples": [
['Summer16.WJetsToLNu_HT-100To200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1'],
['Summer16.WJetsToLNu_HT-200To400_TuneCUETP8M1_13TeV-madgraphMLM-pythia8'],
['Summer16.WJetsToLNu_HT-200To400_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1'],
['Summer16.WJetsToLNu_HT-400To600_TuneCUETP8M1_13TeV-madgraphMLM-pythia8'],
['Summer16.WJetsToLNu_HT-400To600_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1'],
['Summer16.WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8'],
['Summer16.WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1'],
['Summer16.WJetsToLNu_HT-800To1200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8'],
['Summer16.WJetsToLNu_HT-800To1200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1'],
['Summer16.WJetsToLNu_HT-1200To2500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8'],
['Summer16.WJetsToLNu_HT-1200To2500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1'],
['Summer16.WJetsToLNu_HT-2500ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8'],
['Summer16.WJetsToLNu_HT-2500ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1'],
['Summer16.WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8'],
['Summer16.WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext2'],
]
}
| [
"kpedro88@gmail.com"
] | kpedro88@gmail.com |
5aacee947944b556598823f2dc0174abf527df76 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_1/kochie/main.py | d0b158a2aab7b94c30ec932f4a95c829ee42d1d4 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 904 | py | __author__ = 'Robert'
def main():
with open("A-large.in") as input_data:
data = input_data.readlines()
for i in range(1, len(data)):
solution = count_sheep(data[i][0:-1])
print("Case #{0}: {1}".format(i, solution))
with open("A-large.out", "a") as out_data:
out_data.write("Case #{0}: {1}\n".format(i, solution))
def count_sheep(number):
number = int(number)
numbers = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'}
a = number
if number == 0:
return "INSOMNIA"
else:
while numbers:
a = str(a)
for digit in a:
if digit in numbers:
numbers.remove(digit)
else:
pass
a = int(a)
a += number
return a - number
if __name__ == '__main__':
main()
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
f6a6a40912984765f880af58386530c7c612fdb1 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/FrogRiver_20200723131519.py | 6effa38dcc4a1b3b5fdf8f76bfbf253c3caa9f2f | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | def Frog(X,A):
# given x where the frog wants to go
# find earliest time
# once you get the second that has that position
# return the second
for i in A:
print(Frog(5,[1,3,1,4,2,3,]))
| [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
d497ebe83fb0fc8317599a55ee0b0965f03144e7 | 795df757ef84073c3adaf552d5f4b79fcb111bad | /elliptic_integral/elliptic_pia.py | cf5577cb036a99449ebb86a48fffbc5007c68c9e | [] | no_license | tnakaicode/jburkardt-python | 02cb2f9ba817abf158fc93203eb17bf1cb3a5008 | 1a63f7664e47d6b81c07f2261b44f472adc4274d | refs/heads/master | 2022-05-21T04:41:37.611658 | 2022-04-09T03:31:00 | 2022-04-09T03:31:00 | 243,854,197 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,415 | py | #! /usr/bin/env python3
#
def elliptic_pia ( n, a ):
#*****************************************************************************80
#
## ELLIPTIC_PIA evaluates the complete elliptic integral Pi(N,A).
#
# Discussion:
#
# This is one form of what is sometimes called the complete elliptic
# integral of the third kind.
#
# The function is defined by the formula:
#
# Pi(N,A) = integral ( 0 <= T <= PI/2 )
# dT / (1 - N sin^2(T) ) sqrt ( 1 - sin^2(A) * sin ( T )^2 )
#
# In MATLAB, the function can be evaluated by:
#
# ellipticPi(n,(sin(a*pi/180)^2)
#
# The value is computed using Carlson elliptic integrals:
#
# k = sin ( a * pi / 180 )
# Pi(n,k) = RF ( 0, 1 - k^2, 1 ) + 1/3 n RJ ( 0, 1 - k^2, 1, 1 - n )
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 02 June 2018
#
# Author:
#
# John Burkardt
#
# Parameters:
#
# Input, real N, A, the arguments.
#
# Output, real VALUE, the function value.
#
from rf import rf
from rj import rj
import numpy as np
k = np.sin ( a * np.pi / 180.0 )
x = 0.0
y = ( 1.0 - k ) * ( 1.0 + k )
z = 1.0
p = 1.0 - n
errtol = 1.0E-03
value1, ierr = rf ( x, y, z, errtol )
value2, ierr = rj ( x, y, z, p, errtol )
value = value1 + n * value2 / 3.0
return value
def elliptic_pia_test ( ):
#*****************************************************************************80
#
## ELLIPTIC_PIA_TEST tests ELLIPTIC_PIA.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 02 June 2018
#
# Author:
#
# John Burkardt
#
from elliptic_pia_values import elliptic_pia_values
print ( '' )
print ( 'ELLIPTIC_PIA_TEST:' )
print ( ' ELLIPTIC_PIA returns values of' )
print ( ' the complete elliptic integral of the' )
print ( ' third kind, with parameter angle A.' )
print ( '' )
print ( ' N A Pi(N,A) Pi(N,A)' )
print ( ' Tabulated Calculated' )
print ( '' )
n_data = 0
while ( True ):
n_data, n, a, pia = elliptic_pia_values ( n_data )
if ( n_data == 0 ):
break
pia2 = elliptic_pia ( n, a )
print ( ' %14.6f %14.6f %24.16g %24.16g' % ( n, a, pia, pia2 ) )
return
if ( __name__ == '__main__' ):
from timestamp import timestamp
timestamp ( )
elliptic_pia_test ( )
timestamp ( )
| [
"tnakaicode@gmail.com"
] | tnakaicode@gmail.com |
e0b2f99cbca64ba9f6779056bc840ddd61ca06bd | bc441bb06b8948288f110af63feda4e798f30225 | /easy_flow_sdk/model/topology/property_pb2.pyi | d0dbef32cc3fe22a59c299ab9123bf7e0f71f90a | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,822 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from easy_flow_sdk.model.topology.cmdb_instance_pb2 import (
CmdbInstance as easy_flow_sdk___model___topology___cmdb_instance_pb2___CmdbInstance,
)
from easy_flow_sdk.model.topology.strategy_pb2 import (
Strategy as easy_flow_sdk___model___topology___strategy_pb2___Strategy,
)
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class Property(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
objectId = ... # type: typing___Text
instanceId = ... # type: typing___Text
@property
def strategy(self) -> easy_flow_sdk___model___topology___strategy_pb2___Strategy: ...
@property
def relateInstances(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[easy_flow_sdk___model___topology___cmdb_instance_pb2___CmdbInstance]: ...
def __init__(self,
*,
objectId : typing___Optional[typing___Text] = None,
instanceId : typing___Optional[typing___Text] = None,
strategy : typing___Optional[easy_flow_sdk___model___topology___strategy_pb2___Strategy] = None,
relateInstances : typing___Optional[typing___Iterable[easy_flow_sdk___model___topology___cmdb_instance_pb2___CmdbInstance]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Property: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Property: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"strategy",b"strategy"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"instanceId",b"instanceId",u"objectId",b"objectId",u"relateInstances",b"relateInstances",u"strategy",b"strategy"]) -> None: ...
| [
"service@easyops.cn"
] | service@easyops.cn |
bbbd2d726161d9fb00f8dc6eefd6b35ec2230fb4 | 3da6b8a0c049a403374e787149d9523012a1f0fc | /Coder_Old/pycharm_daima/爬虫大师班/代理/ip_test.py | 2f983188dcfea397ba5a11c983db0967a738a609 | [] | no_license | AndersonHJB/PyCharm_Coder | d65250d943e84b523f022f65ef74b13e7c5bc348 | 32f2866f68cc3a391795247d6aba69a7156e6196 | refs/heads/master | 2022-07-25T11:43:58.057376 | 2021-08-03T02:50:01 | 2021-08-03T02:50:01 | 348,922,058 | 3 | 3 | null | 2021-09-05T02:20:10 | 2021-03-18T02:57:16 | Python | UTF-8 | Python | false | false | 1,308 | py | # !/usr/bin/python3
# -*- coding: utf-8 -*-
# @Author:AI悦创 @DateTime :2020/2/2 15:36 @Function :功能 Development_tool :PyCharm
# code is far away from bugs with the god animal protecting
# I love animals. They taste delicious.
import requests
# url = 'https://www.kuaidaili.com/free/'
# url = 'https://www.kuaidaili.com/free/inha/2/'
# url = 'https://www.kuaidaili.com/free/inha/3/'
# url = 'https://www.kuaidaili.com/free/inha/4/'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36',
'Referer': 'https://www.kuaidaili.com',
}
session = requests.Session()
session.headers = headers
def Crawl_Spider():
url_list = ['https://www.kuaidaili.com/free/inha/{}'.format(page) for page in range(1,3)]
for url in url_list:
print(url)
html = session.get(url, headers = headers)
# html.encoding = html.apparent_encoding
html.encoding = 'gbk'
print(html.status_code)
print(html.text)
if __name__ == '__main__':
Crawl_Spider()
# proxies = {
# 'http': 'http://10.10.1.10:3128',
# 'https': 'http://10.10.1.10:1080',
# }
# try:
# html = requests.get('http://icanhazip.com', proxies=proxies, timeout=1)
# print(html.status_code)
# except:
# pass | [
"1432803776@qq.com"
] | 1432803776@qq.com |
61027afb89c2cbe3b76699d2d494ba891d4779b8 | e514bbdf8e0abe5ef0b58b94fe5f7d2afb38ea6b | /test_suite/system_tests/scripts/frame_order/cam/iso_cone.py | 89a05903a501948e08f4b948fb8b3f2df9ede2b7 | [] | no_license | edward-dauvergne/relax | 98ad63703e68a4535bfef3d6c0529e07cc84ff29 | 9710dc0f2dfe797f413756272d4bec83cf6ca1c9 | refs/heads/master | 2020-04-07T04:25:25.382027 | 2017-01-04T15:38:09 | 2017-01-04T15:38:09 | 46,500,334 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,134 | py | ###############################################################################
# #
# Copyright (C) 2012-2014 Edward d'Auvergne #
# #
# This file is part of the program relax (http://www.nmr-relax.com). #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
# Module docstring.
"""Script for optimising the isotropic cone frame order test model of CaM."""
# relax module imports.
from base_script import Base_script
from lib.frame_order.variables import MODEL_ISO_CONE
class Analysis(Base_script):
# Set up some class variables.
DIRECTORY = 'iso_cone'
MODEL = MODEL_ISO_CONE
AXIS_THETA = 0.96007997859534299767
AXIS_PHI = 4.03227550621962294031
CONE_THETA = 0.6
CONE_SIGMA_MAX = 0.9
#LOAD_STATE = True
# Execute the analysis.
Analysis(self._execute_uf)
| [
"bugman@b7916896-f9f9-0310-9fe5-b3996d8957d5"
] | bugman@b7916896-f9f9-0310-9fe5-b3996d8957d5 |
b0baa479b033bd29837c2a9f4e3a838c7a5127db | 9140ba97a4ff6e9ef9f4e49d67ab238b669a3597 | /verify/migrations/0001_initial.py | c3671a31e432d3d8842519b23a9845a56ab6c277 | [] | no_license | poojapauskar/foodromeoproject-api | 877ada5d72db0ac364e735a1ad7af0f46ad02bcc | 2007ed7ae12a3f5d1d227faaccaf7e6bd93f760d | refs/heads/master | 2021-01-21T13:30:31.838067 | 2016-05-09T06:41:10 | 2016-05-09T06:41:10 | 54,105,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 869 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-19 12:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Verify',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('email', models.EmailField(default=b'', max_length=100)),
('password', models.CharField(default=b'', max_length=100)),
('confirmed', models.CharField(default=b'', max_length=100)),
],
options={
'ordering': ('created',),
},
),
]
| [
"git.poojapauskar@gmail.com"
] | git.poojapauskar@gmail.com |
757da35bcf9e29eb2f8637c05bb3c0f0bf47ff64 | 786232b3c9eac87728cbf2b5c5636d7b6f10f807 | /Leetcode/medium/179.py | bdfde1bbaf3d55528702f8c3f435055590278c86 | [] | no_license | luoyanhan/Algorithm-and-data-structure | c9ada2e123fae33826975665be37ca625940ddd4 | fb42c3a193f58360f6b6f3b7d5d755cd6e80ad5b | refs/heads/master | 2021-12-22T15:45:28.260386 | 2021-12-02T03:08:35 | 2021-12-02T03:08:35 | 251,007,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | class Solution:
def largestNumber(self, nums):
from functools import cmp_to_key
temp = sorted(list(map(str, nums)), key=cmp_to_key(lambda x, y: int(x+y)-int(y+x)), reverse=True)
return ''.join(temp if temp[0] != '0' else '0')
print(Solution().largestNumber([10,2])) | [
"707025023@qq.com"
] | 707025023@qq.com |
629fe2adc3713e315432d3b15fd4fba274bebdcb | ae13e905feec06f2f94245481b31fcb605e485de | /practice/algorithms/sorting/running_time_of_algorithms.py | 17ad574e552f3d72e89f7b310cc6e374b6d893e3 | [] | no_license | feadoor/hackerrank | e7a84bb20c01d420a3c37f0a7e5176ab0aac6604 | 8fa88b71d37ae83b0826a76499c9e69f947d0aeb | refs/heads/master | 2021-05-04T17:28:27.089671 | 2019-02-21T17:25:34 | 2019-02-21T17:25:34 | 120,271,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | #!/usr/local/bin/pypy3
def read_space_separated_integers():
return [int(x) for x in input().strip().split(' ')]
def do_step(values, curr_idx):
curr = values[curr_idx]
for idx in range(curr_idx - 1, -1, -1):
if values[idx] > curr:
values[idx + 1] = values[idx]
else:
values[idx + 1] = curr
return curr_idx - idx - 1
else:
values[0] = curr
return curr_idx
def insertion_sort_shifts(values):
return sum(do_step(values, idx) for idx in range(1, len(values)))
def main():
_, values = input(), read_space_separated_integers()
print(insertion_sort_shifts(values))
if __name__ == '__main__':
main()
| [
"sam.capplemanlynes@gmail.com"
] | sam.capplemanlynes@gmail.com |
5dc83ed2b8b1d5fbec5ca51398141236af407567 | aa4024b6a846d2f6032a9b79a89d2e29b67d0e49 | /GM2AUTOSAR_MM/Properties/unit_contracts/HUnitR04a_IsolatedLHS.py | 1ba6aae69ee4d2788ef8b611c16c451ffdad007c | [
"MIT"
] | permissive | levilucio/SyVOLT | 41311743d23fdb0b569300df464709c4954b8300 | 0f88827a653f2e9d3bb7b839a5253e74d48379dc | refs/heads/master | 2023-08-11T22:14:01.998341 | 2023-07-21T13:33:36 | 2023-07-21T13:33:36 | 36,246,850 | 3 | 2 | MIT | 2023-07-21T13:33:39 | 2015-05-25T18:15:26 | Python | UTF-8 | Python | false | false | 2,513 | py | from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HUnitR04a_IsolatedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HUnitR04a_IsolatedLHS
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HUnitR04a_IsolatedLHS, self).__init__(name='HUnitR04a_IsolatedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule']
self["MT_constraint__"] = """return True"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'HUnitR04a_IsolatedLHS')
self["equations"] = []
# Set the node attributes
# match class PhysicalNode(4.0.m.0PhysicalNode) node
self.add_node()
self.vs[0]["MT_pre__attr1"] = """return True"""
self.vs[0]["MT_label__"] = """1"""
self.vs[0]["mm__"] = """MT_pre__PhysicalNode"""
self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'4.0.m.0PhysicalNode')
# match class Partition(4.0.m.1Partition) node
self.add_node()
self.vs[1]["MT_pre__attr1"] = """return True"""
self.vs[1]["MT_label__"] = """2"""
self.vs[1]["mm__"] = """MT_pre__Partition"""
self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'4.0.m.1Partition')
# match class Module(4.0.m.2Module) node
self.add_node()
self.vs[2]["MT_pre__attr1"] = """return True"""
self.vs[2]["MT_label__"] = """3"""
self.vs[2]["mm__"] = """MT_pre__Module"""
self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'4.0.m.2Module')
# match class Scheduler(4.0.m.3Scheduler) node
self.add_node()
self.vs[3]["MT_pre__attr1"] = """return True"""
self.vs[3]["MT_label__"] = """4"""
self.vs[3]["mm__"] = """MT_pre__Scheduler"""
self.vs[3]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'4.0.m.3Scheduler')
# match class Service(4.0.m.4Service) node
self.add_node()
self.vs[4]["MT_pre__attr1"] = """return True"""
self.vs[4]["MT_label__"] = """5"""
self.vs[4]["mm__"] = """MT_pre__Service"""
self.vs[4]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'4.0.m.4Service')
# define evaluation methods for each apply class.
def eval_attr11(self, attr_value, this):
return True
def eval_attr12(self, attr_value, this):
return True
def eval_attr13(self, attr_value, this):
return True
def eval_attr14(self, attr_value, this):
return True
def eval_attr15(self, attr_value, this):
return True
def constraint(self, PreNode, graph):
return True
| [
"bentleyjoakes@gmail.com"
] | bentleyjoakes@gmail.com |
ac87d87525a2a7a63d565f016d76f063a999f440 | 2bdedcda705f6dcf45a1e9a090377f892bcb58bb | /src/main/output/DNS/car/job_group_morning_mother/history/way/information/car.py | 9a3868ea3b6b3e2c6836fc812a9b7b4faa377dcb | [] | no_license | matkosoric/GenericNameTesting | 860a22af1098dda9ea9e24a1fc681bb728aa2d69 | 03f4a38229c28bc6d83258e5a84fce4b189d5f00 | refs/heads/master | 2021-01-08T22:35:20.022350 | 2020-02-21T11:28:21 | 2020-02-21T11:28:21 | 242,123,053 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,564 | py | 'use strict';
let https = require ('https');
// **********************************************
// *** Update or verify the following values. ***
// **********************************************
// Replace the subscriptionKey string value with your valid subscription key.
let subscriptionKey = '956bf92f63606179b70abdd657524465';
let host = 'api.microsofttranslator.com';
let path = '/V2/Http.svc/TranslateArray';
let target = 'fr-fr';
let params = '';
let ns = "http://schemas.microsoft.com/2003/10/Serialization/Arrays";
let content =
'<TranslateArrayRequest>\n' +
// NOTE: AppId is required, but it can be empty because we are sending the Ocp-Apim-Subscription-Key header.
' <AppId />\n' +
' <Texts>\n' +
' <string xmlns=\"' + ns + '\">Hello</string>\n' +
' <string xmlns=\"' + ns + '\">Goodbye</string>\n' +
' </Texts>\n' +
' <To>' + target + '</To>\n' +
'</TranslateArrayRequest>\n';
let response_handler = function (response) {
let body = '';
response.on ('data', function (d) {
body += d;
});
response.on ('end', function () {
console.log (body);
});
response.on ('error', function (e) {
console.log ('Error: ' + e.message);
});
};
let TranslateArray = function () {
let request_params = {
method : 'POST',
hostname : host,
path : path + params,
headers : {
'Content-Type' : 'text/xml',
'30d5b1752ddf8cef3c90f48f26c8fbf5' : subscriptionKey,
}
};
let req = https.request (request_params, response_handler);
req.write (content);
req.end ();
}
TranslateArray ();
| [
"soric.matko@gmail.com"
] | soric.matko@gmail.com |
bf4f5a8e047458e90c20456c37a3e40e2fda51c2 | 77b16dcd465b497c22cf3c096fa5c7d887d9b0c2 | /Tan_ShinYi/Assignments/Python_Fundamentals/Selection_Sort.py | 09d9c318ca543d40256519af0d891fbfe65a5aac | [
"MIT"
] | permissive | curest0x1021/Python-Django-Web | a7cf8a45e0b924ce23791c18f6a6fb3732c36322 | 6264bc4c90ef1432ba0902c76b567cf3caaae221 | refs/heads/master | 2020-04-26T17:14:20.277967 | 2016-10-18T21:54:39 | 2016-10-18T21:54:39 | 173,706,702 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py | import random
nums = []
for i in range(100):
nums.append(int((random.random())*10000)) #fills empty nums list with 100 random int's from 0-1000
count=0
while count<(len(nums)-1):
min_place=count
for i in range(count,len(nums)):
if nums[i]<nums[min_place]:
min_place=i
nums[count], nums[min_place]=nums[min_place], nums[count]
count+=1
print nums #prints sorted list
| [
"43941751+curest0x1021@users.noreply.github.com"
] | 43941751+curest0x1021@users.noreply.github.com |
086d8ff799a7540f1931f3cd2a406d7bedad981d | 1e1e4ca56b0363d267d9cbef2ea24fdb6f52c025 | /day10/进程池.py | 2c4017405c16b3bcf4c886842c6c792d9867239e | [] | no_license | yuyuyuyushui/s14 | c478ec59f3f5e63cd0e336e30ab3e8dea92f5894 | bcc1716c2e2dab86f1fd529f654d8b34fd7efb93 | refs/heads/master | 2021-04-06T00:33:05.542098 | 2018-03-24T15:52:04 | 2018-03-24T15:52:04 | 116,809,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py | import multiprocessing
import os,time
def f(i):
time.sleep(2)
print(os.getpid())
return i+200
def bar(arg):
print('--exce--',arg,os.getpid())
if __name__ == '__main__':
pool = multiprocessing.Pool(processes=5)#建立进程池
print(os.getpid())
for i in range(10):
pool.apply_async(func=f,args=(i,),callback=bar)#并行调用
print('end')
pool.close()
pool.join() | [
"786313105@qq.com"
] | 786313105@qq.com |
061e1a6a8220ce1294d21994f1de664fcd50ee6d | 88b91b19a659e90aea4b2cf4da6418f2fd04b9ef | /testPractice/repository/rental_repository.py | c26675b066279181cbcef775dc627f6df79d3ce4 | [] | no_license | GeorgianBadita/Python-Problems | d759d7099486179532f9c0f456099ba780b89468 | e0324bf24bfc801cc68404f86c720926e144e5aa | refs/heads/master | 2021-09-03T01:51:49.865850 | 2018-01-04T18:03:03 | 2018-01-04T18:03:03 | 107,010,845 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,805 | py | """
@author: Badita Marin-Georgian
@email: geo.badita@gmail.com
@date: 12/10/2017 19:28
"""
from repository.client_repository import RepositoryException
class RentalRepository:
'''
Class controlling the rental data
'''
def __init__(self, rent_validator):
'''
Function that inits the RentalRepository
:param rent_validator: validator for Rental class
'''
self.__list = {}
self.__validator = rent_validator
def get_all_rents(self):
'''
Function that gets all rents as a list
:return:
'''
return list(self.__list.values())
def store_rental(self, rental):
'''
Function that stores a rental into the list
:post: if the rental doesn't exists in the list, the rental will be added
:param rental: Rental type object
:return:
'''
new_id = len(self.get_all_rents()) + 1
rental.set_rental_id(new_id)
if rental.get_rental_id() not in self.get_all_rents():
self.__list[new_id] = rental
else:
raise RepositoryException("Duplicated ID!")
def find_rental(self, rental_id):
'''
Function that finds a rental by a given id
:param rental_id:
:return:
'''
all_r = self.get_all_rents()
for rental in all_r:
if rental.get_rental_id() == rental_id:
return rental
return None
def delete_rental(self, rental_id):
'''
Functon that deletes a rental, by rental_id
:param rental_id:
:return:
'''
rent_del = self.find_rental(rental_id)
if rental_id is None:
return None
del self.__list[rental_id]
return rent_del
| [
"geo.badita@gmail.com"
] | geo.badita@gmail.com |
1b403491a8d428779b0a35f972ab5513d4e83ace | c90674d955fe1399c0e99cf34437e583d1cf9fb9 | /loop2.py | 824c0c074802ea462bb02e3e49e7750afaf7f5ae | [] | no_license | TrellixVulnTeam/My_python_code_QQZ2 | 556878cbe4f8d6d92e71f48285a6d2439b10ca81 | 8cd8b697d92e1a79cce109baf560eeff27717ce8 | refs/heads/master | 2023-03-19T15:26:35.836114 | 2018-06-29T14:09:06 | 2018-06-29T14:09:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | names = ["a","b","c"]
emaildomain = ["gmail","hotmail","yahoo"]
for i,j in zip(names,emaildomain):
text = i+"@"+j
print(text) | [
"apple@Apples-MacBook-Pro.local"
] | apple@Apples-MacBook-Pro.local |
43956a4b7bdeceabe1c6152721cde3980124cf70 | 473fc28d466ddbe9758ca49c7d4fb42e7d82586e | /app/src/main/java/com/syd/source/aosp/external/toolchain-utils/get_common_image_version.py | da36b98fcf8ba6ea1b5a51ae49d4d952fd6136e3 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | lz-purple/Source | a7788070623f2965a8caa3264778f48d17372bab | e2745b756317aac3c7a27a4c10bdfe0921a82a1c | refs/heads/master | 2020-12-23T17:03:12.412572 | 2020-01-31T01:54:37 | 2020-01-31T01:54:37 | 237,205,127 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,561 | py | #!/usr/bin/python2
#
# Copyright 2013 Google Inc. All Rights Reserved.
"""Script to find list of common images (first beta releases) in Chromeos.
Display information about stable ChromeOS/Chrome versions to be used
by the team developers. The purpose is to increase team productivity
by using stable (known and tested) ChromeOS/Chrome versions instead of
using randomly selected versions. Currently we define as a "stable"
version the first Beta release in a particular release cycle.
"""
from __future__ import print_function
__author__ = 'llozano@google.com (Luis Lozano)'
import argparse
import pickle
import re
import sys
import urllib
VERSIONS_HISTORY_URL = 'http://cros-omahaproxy.appspot.com/history'
def DisplayBetas(betas):
print('List of betas from %s' % VERSIONS_HISTORY_URL)
for beta in betas:
print(' Release', beta['chrome_major_version'], beta)
return
def FindAllBetas(all_versions):
"""Get ChromeOS first betas from History URL."""
all_betas = []
prev_beta = {}
for line in all_versions:
match_obj = re.match(
r'(?P<date>.*),(?P<chromeos_version>.*),'
r'(?P<chrome_major_version>\d*).(?P<chrome_minor_version>.*),'
r'(?P<chrome_appid>.*),beta-channel,,Samsung Chromebook Series 5 550',
line)
if match_obj:
if prev_beta:
if (prev_beta['chrome_major_version'] !=
match_obj.group('chrome_major_version')):
all_betas.append(prev_beta)
prev_beta = match_obj.groupdict()
if prev_beta:
all_betas.append(prev_beta)
return all_betas
def SerializeBetas(all_betas, serialize_file):
with open(serialize_file, 'wb') as f:
pickle.dump(all_betas, f)
print('Serialized list of betas into', serialize_file)
return
def Main(argv):
"""Get ChromeOS first betas list from history URL."""
parser = argparse.ArgumentParser()
parser.add_argument('--serialize',
dest='serialize',
default=None,
help='Save list of common images into the specified '
'file.')
options = parser.parse_args(argv)
try:
opener = urllib.URLopener()
all_versions = opener.open(VERSIONS_HISTORY_URL)
except IOError as ioe:
print('Cannot open', VERSIONS_HISTORY_URL)
print(ioe)
return 1
all_betas = FindAllBetas(all_versions)
DisplayBetas(all_betas)
if options.serialize:
SerializeBetas(all_betas, options.serialize)
all_versions.close()
return 0
if __name__ == '__main__':
retval = Main(sys.argv[1:])
sys.exit(retval)
| [
"997530783@qq.com"
] | 997530783@qq.com |
6583249fabc74884a3eef7897d442bc0e7b7e6c4 | d6f84bfc45de7e0e2cb9d058549642b4a50bb23c | /draw2image.py | 989cc8209b80108b46ef6ea9512390f77600b50e | [] | no_license | MadhuV99/pypilzet | f007de7213b1cc190f379d9f98ad977d9566e83e | 7583b2480fbf32ec8ef8bb62ae685a9c9fafb4cc | refs/heads/main | 2023-02-08T11:07:16.855357 | 2020-12-27T03:36:50 | 2020-12-27T03:36:50 | 324,604,495 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | # draw2image.py
#!/usr/bin/python
from PIL import Image, ImageDraw
import sys
my_img_rect = Image.new('RGBA', (200, 200), 'white')
idraw = ImageDraw.Draw(my_img_rect)
idraw.rectangle((10, 10, 100, 100), fill='blue')
my_img_rect.show()
my_img_fldr = r".\my_imgs\\"
my_img_pic = r"rectangle"
my_img_format = r".png"
my_out_pic = my_img_pic+"_draw"
my_out_file = my_img_fldr+my_out_pic+my_img_format
try:
# img.save('rectangle.png')
my_img_rect.save(my_out_file, 'png')
except IOError:
print("Unable to save image")
sys.exit(1) | [
"madhuvasudevan@yahoo.com"
] | madhuvasudevan@yahoo.com |
e8be57f2d75bd38180bc6f61eeb02cdb845ad0e4 | 1adb3f388f06e11d9f85ba00be61931b16304146 | /brc_python/reviews/admin.py | d3ad9f6c08e08a7eefd873863a93d89397b79c2a | [] | no_license | jeanruggiero/brc-web | b315e2670a6251a4768b01fe6663c213e5c9d63f | 7ac2301a8019da4ffa8e16ea2face94b75dadfa7 | refs/heads/master | 2021-09-24T00:46:17.498207 | 2020-01-27T02:59:41 | 2020-01-27T02:59:41 | 228,442,226 | 0 | 0 | null | 2021-09-22T18:28:07 | 2019-12-16T17:44:56 | JavaScript | UTF-8 | Python | false | false | 317 | py | from django.contrib import admin
from .models import LeavenworthReview, SkillsNightReview, Squamish1Review, \
Squamish2Review, GradClimbReview, InstructorReview
admin.site.register([LeavenworthReview, SkillsNightReview, Squamish1Review,
Squamish2Review, GradClimbReview, InstructorReview])
| [
"jeanruggiero@gmail.com"
] | jeanruggiero@gmail.com |
93fc3918c0adf0e6cc267eb31cea767f7c925ba3 | 74be814f7cd10d3c91a53460bd6698aa8bc95704 | /剑指offer/面试题66. 构建乘积数组.py | 90873a93e5900fe421206f4e8f6a77e68c282d3f | [] | no_license | weiyuyan/LeetCode | 7202f7422bc3bef6bd35ea299550b51905401656 | 19db0e78826d3e3d27d2574abd9d461eb41458d1 | refs/heads/master | 2020-12-03T17:10:53.738507 | 2020-05-27T08:28:36 | 2020-05-27T08:28:36 | 231,402,839 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,536 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# author:ShidongDu time:2020/3/7
'''
给定一个数组 A[0,1,…,n-1],请构建一个数组 B[0,1,…,n-1],其中 B 中的元素 B[i]=A[0]×A[1]×…×A[i-1]×A[i+1]×…×A[n-1]。
不能使用除法。
示例:
输入: [1,2,3,4,5]
输出: [120,60,40,30,24]
提示:
所有元素乘积之和不会溢出 32 位整数
a.length <= 100000
'''
# 构造前缀树
class Solution:
def constructArr(self, A):
if not A:
return []
prefix = [1]*len(A)
prefix[0]=1
for i in range(1,len(A)):
prefix[i] = prefix[i-1]*A[i-1]
cur_suffix = 1
for i in range(len(A)-1,-1,-1):
prefix[i] = prefix[i] * cur_suffix
cur_suffix *= A[i]
return prefix
# 方法二
from typing import List
class Solution:
# 将数组分为两部分数组C和D
# C[i] = A[0]*A[1]*...*A[i-1]
# D[i] = A[i+1]*A[i+2]*...*A[n-1]
# C[i]可以用自上到下的方法算出来,即C[i] = C[i-1]*A[i-1]
# D[i]可以用自下到上的方法算出来,即D[i] = D[i+1]*A[i+1]
def constructArr(self, a: List[int]) -> List[int]:
C, D = [1]*len(a), [1]*len(a)
res = []
for i in range(1, len(a)):
C[i] = C[i-1]*a[i-1]
for j in range(len(a)-2, -1, -1):
D[j] = D[j+1]*a[j+1]
for k in range(len(a)):
res.append(C[k]*D[k])
return res
solution = Solution()
array = [1, 2]
res = solution.constructArr(array)
print(res) | [
"244128764@qq.com"
] | 244128764@qq.com |
0aaa65c940fe287810eb79b7feb929c0cc22be6e | 001002103510c9f96addeaea7a9861ca24442829 | /src/data/datasets/__init__.py | 0046350ab2049495ba41179e8cbcce31ba057395 | [] | no_license | Tung-I/Mango | d21caf95d737947730186d4369c3327a11ff00d2 | 7cf2ee84251e8c6c07f37a52dec8750f322086bb | refs/heads/master | 2022-10-25T04:58:09.701042 | 2020-06-16T07:28:51 | 2020-06-16T07:28:51 | 261,380,873 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | from .base_dataset import BaseDataset
from .mango_dataset import MangoDataset
from .cifar_dataset import CIFARDataset
from .aug_dataset import AugDataset
from .test_dataset import TestDataset | [
"dong893610@gmail.com"
] | dong893610@gmail.com |
4de0ff2cd80ffb3061918f2941c6feffb3a36ff4 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/resolve/TupleInExcept.py | 1ebd91de1cde02793b85153112647b10932d971c | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 187 | py | import errno
try:
f = open('myfile.txt')
s = f.readline()
i = int(s.strip())
except IOError as (errno, strerror):
print "I/O error({0}): {1}".format(e<ref>rrno, strerror)
| [
"yole@jetbrains.com"
] | yole@jetbrains.com |
d20e5a24ef5e784cc61dd6f91edf377bf6f668d6 | 439386f9097632d44d31d1f599df76ec2820d072 | /常规项目/约牌房/1600/YuePai/src/cases/dfqp_enter.py | 8336784584c0075cb0c7edab93f0eeade1ec7387 | [] | no_license | YiFeng0755/testcase | 33693f0940a6497aa40e2e51a0535c9eb6c12b29 | edc19480c3e94cbcbf004aa9d20099ec6d1b9304 | refs/heads/master | 2020-04-28T04:34:28.232022 | 2019-03-11T11:13:25 | 2019-03-11T11:13:25 | 146,287,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,845 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
入口
'''
import time
from runcenter.enums import EnumPriority,EnumStatus
from runcenter.testcase import debug_run_all,TestCase
from uilib.hall_page import Hall_Page
from uilib.game_page import Game_Page
from uilib.yuepai_page import Yuepai_Page
from common.common import Common
class C70432_Yuepai_display(TestCase):
'''
约牌房开启,游戏选场列表正常显示约牌房入口
'''
owner = "LucyLiu"
status = EnumStatus.Design
priority = EnumPriority.High
timeout = 10
def pre_test(self):
self.common = Common()
self.hall_page = Hall_Page()
self.game_page = Game_Page()
self.yuepai_page = Yuepai_Page()
# 初始化Luadriver
self.start_step("初始化driver")
self.luadriver = self.common.setupdriver()
# 每个用例都需要关闭活动,把这个放在初始化里面实现
self.common.closeactivity(self.luadriver)
def run_test(self):
'''
测试用例
'''
self.start_step("等待页面加载完成")
self.hall_page.wait_element("同步标志")
self.start_step("获取子游戏列表")
game_list = self.game_page.get_game_list()
for i in range(len(game_list)):
game_list[i].click()
self.game_page.game_is_download()
if (self.game_page.element_is_exist("约牌按钮")==True):
self.game_page.screenshot("%s.png" %game_list[i].get_attribute("name"))
self.game_page.wait_element("约牌按钮").click()
self.start_step("进入约牌房")
self.game_page.wait_element("返回").click()
time.sleep(3)
else:
self.log_info("无约牌房")
try:
self.game_page.wait_element("返回1",20).click()
except:
self.log_info("返回失败")
def post_test(self):
'''
测试用例执行完成后,清理测试环境
'''
self.common.closedriver()
class C70433_Yuepai_display(TestCase):
'''
约牌房关闭,游戏选场列表正常隐藏约牌房入口
'''
owner = "LucyLiu"
status = EnumStatus.Design
priority = EnumPriority.High
timeout = 10
def pre_test(self):
self.common = Common()
self.hall_page = Hall_Page()
self.game_page = Game_Page()
self.yuepai_page = Yuepai_Page()
# 初始化Luadriver
self.start_step("初始化driver")
self.luadriver = self.common.setupdriver()
# 每个用例都需要关闭活动,把这个放在初始化里面实现
self.common.closeactivity(self.luadriver)
def run_test(self):
'''
测试用例
'''
self.start_step("等待页面加载完成")
self.hall_page.wait_element("同步标志")
self.start_step("获取子游戏列表")
game_list = self.game_page.get_game_list()
for i in range(len(game_list)):
game_list[i].click()
self.game_page.game_is_download()
if (self.game_page.element_is_exist("约牌按钮") == False):
self.game_page.screenshot("%s.png" % game_list[i].get_attribute("name"))
else:
self.log_info("有约牌房")
try:
self.game_page.wait_element("返回1", 20).click()
except:
self.log_info("返回失败")
def post_test(self):
'''
测试用例执行完成后,清理测试环境
'''
self.common.closedriver()
# __qtaf_seq_tests__ = [C70524_Recorddisplay]
if __name__ == '__main__':
# C039_DFQP_Activity = C039_DFQP_Activity()
# C039_DFQP_Activity.debug_run()
debug_run_all()
| [
"YoungLiu@boyaa.com"
] | YoungLiu@boyaa.com |
d793f58074e7c3e61444249240e95137417aac64 | 39a1d46fdf2acb22759774a027a09aa9d10103ba | /model-optimizer/unit_tests/extensions/front/CTCGreedyDecoderReplacement_test.py | 063d71173e1f7e5c923ffaf1190fff21491ce5c9 | [
"Apache-2.0"
] | permissive | mashoujiang/openvino | 32c9c325ffe44f93a15e87305affd6099d40f3bc | bc3642538190a622265560be6d88096a18d8a842 | refs/heads/master | 2023-07-28T19:39:36.803623 | 2021-07-16T15:55:05 | 2021-07-16T15:55:05 | 355,786,209 | 1 | 3 | Apache-2.0 | 2021-06-30T01:32:47 | 2021-04-08T06:22:16 | C++ | UTF-8 | Python | false | false | 5,203 | py | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import unittest
from extensions.front.CTCGreedyDecoderReplacement import CTCGreedyDecoderReplacement, CTCGreedyDecoderWithSparseToDenseShapeReplacement
from mo.front.common.partial_infer.utils import int64_array
from mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, const
class CTCGreedyDecoderReplacementTests(unittest.TestCase):
def test1(self):
nodes_attributes = {
# nodes from original graph
'logits': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
'seq_len': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
'order_arr': {'kind': 'op', 'op': 'Const'},
'transpose': {'type': 'Transpose', 'kind': 'op', 'op': 'Transpose'},
'decoder': {'kind': 'op', 'op': 'CTCGreedyDecoderSeqLen', 'merge_repeated': True},
'cast': {'kind': 'op', 'op': 'Cast'},
'sparse_to_dense': {'kind': 'op', 'op': 'SparseToDense'},
'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'},
# new nodes
'new_decoder': {'kind': 'op', 'op': 'CTCGreedyDecoderSeqLen', 'use_mask_format': True},
**const('squeeze_axes', int64_array([2, 3])),
'squeeze_dec_seq': {'kind': 'op', 'op': 'Squeeze'},
'cast_to_int': {'kind': 'op', 'op': 'Cast'},
}
graph = build_graph(nodes_attributes,
[('logits', 'decoder', {'out': 0, 'in': 0}),
('seq_len', 'decoder', {'out': 0, 'in': 1}),
('decoder', 'sparse_to_dense', {'out': 0, 'in': 0}),
('decoder', 'sparse_to_dense', {'out': 2, 'in': 1}),
('decoder', 'cast', {'out': 1, 'in': 0}),
('cast', 'sparse_to_dense', {'out': 0}),
('sparse_to_dense', 'last', {'out': 0, 'in': 0}),
], nodes_with_edges_only=True)
graph.stage = 'front'
CTCGreedyDecoderWithSparseToDenseShapeReplacement().find_and_replace_pattern(graph)
graph_ref = build_graph(nodes_attributes,
[('logits', 'transpose', {'out': 0, 'in': 0}),
('order_arr', 'transpose', {'out': 0, 'in': 1}),
('transpose', 'decoder', {'out': 0, 'in': 0}),
('seq_len', 'decoder', {'out': 0, 'in': 1}),
('decoder', 'last', {'out': 0, 'in': 0}),
],
nodes_with_edges_only=True)
(flag, resp) = compare_graphs(graph, graph_ref, 'last', check_op_attrs=True)
self.assertTrue(flag, resp)
def test2(self):
nodes_attributes = {
# nodes from original graph
'logits': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
'seq_len': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
'order_arr': {'kind': 'op', 'op': 'Const'},
'transpose': {'type': 'Transpose', 'kind': 'op', 'op': 'Transpose'},
'decoder': {'kind': 'op', 'op': 'CTCGreedyDecoderSeqLen', 'merge_repeated': True},
'cast': {'kind': 'op', 'op': 'Cast'},
'sparse_to_dense': {'kind': 'op', 'op': 'SparseToDense'},
'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'},
# new nodes
'new_decoder': {'kind': 'op', 'op': 'CTCGreedyDecoderSeqLen', 'use_mask_format': True},
**const('squeeze_axes', int64_array([2, 3])),
'squeeze_dec_seq': {'kind': 'op', 'op': 'Squeeze'},
'cast_to_int': {'kind': 'op', 'op': 'Cast'},
}
graph = build_graph(nodes_attributes,
[('logits', 'decoder', {'out': 0, 'in': 0}),
('seq_len', 'decoder', {'out': 0, 'in': 1}),
('decoder', 'sparse_to_dense', {'out': 0, 'in': 0}),
('decoder', 'cast', {'out': 1, 'in': 0}),
('cast', 'sparse_to_dense', {'out': 0}),
('sparse_to_dense', 'last', {'out': 0, 'in': 0}),
], nodes_with_edges_only=True)
graph.stage = 'front'
CTCGreedyDecoderReplacement().find_and_replace_pattern(graph)
graph_ref = build_graph(nodes_attributes,
[('logits', 'transpose', {'out': 0, 'in': 0}),
('order_arr', 'transpose', {'out': 0, 'in': 1}),
('transpose', 'decoder', {'out': 0, 'in': 0}),
('seq_len', 'decoder', {'out': 0, 'in': 1}),
('decoder', 'last', {'out': 0, 'in': 0}),
],
nodes_with_edges_only=True)
(flag, resp) = compare_graphs(graph, graph_ref, 'last', check_op_attrs=True)
self.assertTrue(flag, resp)
| [
"noreply@github.com"
] | mashoujiang.noreply@github.com |
ac7b07e642d05dc0d1ff270fac4e20c2f046348c | 200bc48000f6821b5d449ddc3d3269b8e10623be | /dashboard/dashboard/delete_old_tests.py | 3ef7ab087b74c2a09d26736697a77184db18b203 | [
"BSD-3-Clause"
] | permissive | marcelfarres/catapult | 97dab55fc2b231e47fe245c12e76f7169dfa7227 | f49c20888bb8ea3208efa29a2eb625ffb926ebc4 | refs/heads/master | 2021-01-18T04:39:58.000436 | 2016-06-28T17:49:43 | 2016-06-28T17:49:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,276 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A cron job which queues old tests for deletion."""
import datetime
from google.appengine.api import taskqueue
from google.appengine.datastore import datastore_query
from dashboard import datastore_hooks
from dashboard import list_tests
from dashboard import request_handler
from dashboard import utils
from dashboard.models import graph_data
_CUTOFF_DATE = datetime.timedelta(days=183) # Six months ago
_TESTS_TO_CHECK_AT_ONCE = 100
# Queue name needs to be listed in queue.yaml.
_TASK_QUEUE_NAME = 'delete-old-tests-queue'
_DELETE_TASK_QUEUE_NAME = 'delete-tests-queue'
class DeleteOldTestsHandler(request_handler.RequestHandler):
"""Finds tests with no new data, and deletes them."""
def post(self):
"""Query for tests, and put ones with no new data on the delete queue."""
datastore_hooks.SetPrivilegedRequest()
cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
tests, next_cursor, more = graph_data.TestMetadata.query().fetch_page(
_TESTS_TO_CHECK_AT_ONCE, keys_only=True, start_cursor=cursor)
if more:
taskqueue.add(
url='/delete_old_tests',
params={'cursor': next_cursor.urlsafe()},
queue_name=_TASK_QUEUE_NAME)
for test in tests:
# Delete this test if:
# 1) It has no Rows newer than the cutoff
# 2) It has no descendant tests
no_new_rows = False
last_row = graph_data.Row.query(
graph_data.Row.parent_test == utils.OldStyleTestKey(test)).order(
-graph_data.Row.timestamp).get()
if last_row:
if last_row.timestamp < datetime.datetime.today() - _CUTOFF_DATE:
no_new_rows = True
else:
no_new_rows = True
descendants = list_tests.GetTestDescendants(test, keys_only=True)
descendants.remove(test)
if not descendants and no_new_rows:
taskqueue.add(
url='/delete_test_data',
params={
'test_path': utils.TestPath(test), # For manual inspection.
'test_key': test.urlsafe(),
},
queue_name=_DELETE_TASK_QUEUE_NAME)
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
85f92496e765df21ac2cd7377c68dff80637ed1d | 7b102f9c8f2e3f9240090d1d67af50333a2ba98d | /shared_code/central_comp/non_fatal/dismod/cascade_ode/run_children.py | 9b87545d7d12131ab52a929d1b26c53cba7a0b94 | [] | no_license | Nermin-Ghith/ihme-modeling | 9c8ec56b249cb0c417361102724fef1e6e0bcebd | 746ea5fb76a9c049c37a8c15aa089c041a90a6d5 | refs/heads/main | 2023-04-13T00:26:55.363986 | 2020-10-28T19:51:51 | 2020-10-28T19:51:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,316 | py | import logging
from copy import copy
import sys
import drill
from drill import Cascade, Cascade_loc
import pandas as pd
import multiprocessing as mp
import gc
import os
from jobmon import job
# Set dUSERt file mask to readable-for all users
os.umask(0o0002)
def run_loc(args):
gc.collect()
loc_id, sex_id, year, full_timespan, debug = args
if debug:
if full_timespan:
cl = Cascade_loc(loc_id, sex_id, year, c, timespan=50,
parent_loc=cl_parent)
else:
cl = Cascade_loc(loc_id, sex_id, year, c, parent_loc=cl_parent)
cl.run_dismod()
cl.summarize_posterior()
cl.draw()
cl.predict()
return loc_id, 0
else:
try:
if full_timespan:
cl = Cascade_loc(loc_id, sex_id, year, c, timespan=50,
parent_loc=cl_parent)
else:
cl = Cascade_loc(loc_id, sex_id, year, c, parent_loc=cl_parent)
cl.run_dismod()
cl.summarize_posterior()
cl.draw()
cl.predict()
return loc_id, 0
except Exception as e:
logging.exception("Failure running location {}".format(loc_id))
return loc_id, str(e)
if __name__ == "__main__":
mvid = int(sys.argv[1])
location_id = int(sys.argv[2])
sex = sys.argv[3]
y = int(sys.argv[4])
cv_iter = int(sys.argv[5])
try:
if sys.argv[6]=="debug":
debug = True
else:
debug = False
except:
debug = False
if sex=='male':
sex_id = 0.5
elif sex=='female':
sex_id = -0.5
c = Cascade(mvid, reimport=False, cv_iter=cv_iter)
try:
j = job.Job(os.path.normpath(os.path.join(c.root_dir, '..')))
j.start()
except IOError as e:
logging.exception(e)
except Exception as e:
logging.exception(e)
year_split_lvl = c.model_version_meta.fix_year.values[0]-1
lt = c.loctree
this_lvl = lt.get_nodelvl_by_id(location_id)
if location_id == 1:
cl_parent = Cascade_loc(location_id, 0, 2000, c, reimport=False)
else:
cl_parent = Cascade_loc(location_id, sex_id, y, c, reimport=False)
num_children = len(lt.get_node_by_id(location_id).children)
num_cpus = mp.cpu_count()
if not debug:
pool = mp.Pool(min(num_cpus, num_children, 10))
# Run child locations
arglist = []
for child_loc in lt.get_node_by_id(location_id).children:
if this_lvl>=(year_split_lvl-1):
full_timespan = False
else:
full_timespan = True
arglist.append((
child_loc.id, sex_id, y,
full_timespan, debug))
if debug:
'..... RUNNING IN SINGLE PROCESS DEBUG MODE .....'
res = map(run_loc, arglist)
else:
res = pool.map(run_loc, arglist)
pool.close()
pool.join()
errors = ['%s: %s' % (str(r[0]), r[1]) for r in res if r[1] != 0]
try:
if len(errors) == 0:
j.finish()
else:
error_msg = "; ".join(errors)
j.log_error(error_msg)
j.failed()
except NameError as e:
logging.exception(e)
except Exception as e:
logging.exception(e)
| [
"nsidles@uw.edu"
] | nsidles@uw.edu |
17a4bc2d55265f97b424078b0d607270b98f569a | 32c56293475f49c6dd1b0f1334756b5ad8763da9 | /google-cloud-sdk/lib/third_party/kubernetes/client/models/v1_handler.py | 8fdb314fd03254108947f916c63f52e75d99f2df | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] | permissive | bopopescu/socialliteapp | b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494 | 85bb264e273568b5a0408f733b403c56373e2508 | refs/heads/master | 2022-11-20T03:01:47.654498 | 2020-02-01T20:29:43 | 2020-02-01T20:29:43 | 282,403,750 | 0 | 0 | MIT | 2020-07-25T08:31:59 | 2020-07-25T08:31:59 | null | UTF-8 | Python | false | false | 4,438 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen
https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1Handler(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name and the value is attribute
type.
attribute_map (dict): The key is attribute name and the value is json key
in definition.
"""
swagger_types = {
'_exec': 'V1ExecAction',
'http_get': 'V1HTTPGetAction',
'tcp_socket': 'V1TCPSocketAction'
}
attribute_map = {
'_exec': 'exec',
'http_get': 'httpGet',
'tcp_socket': 'tcpSocket'
}
def __init__(self, _exec=None, http_get=None, tcp_socket=None):
"""
V1Handler - a model defined in Swagger
"""
self.__exec = None
self._http_get = None
self._tcp_socket = None
self.discriminator = None
if _exec is not None:
self._exec = _exec
if http_get is not None:
self.http_get = http_get
if tcp_socket is not None:
self.tcp_socket = tcp_socket
@property
def _exec(self):
"""
Gets the _exec of this V1Handler.
One and only one of the following should be specified. Exec specifies
the action to take.
:return: The _exec of this V1Handler.
:rtype: V1ExecAction
"""
return self.__exec
@_exec.setter
def _exec(self, _exec):
"""
Sets the _exec of this V1Handler.
One and only one of the following should be specified. Exec specifies
the action to take.
:param _exec: The _exec of this V1Handler.
:type: V1ExecAction
"""
self.__exec = _exec
@property
def http_get(self):
"""
Gets the http_get of this V1Handler.
HTTPGet specifies the http request to perform.
:return: The http_get of this V1Handler.
:rtype: V1HTTPGetAction
"""
return self._http_get
@http_get.setter
def http_get(self, http_get):
"""
Sets the http_get of this V1Handler.
HTTPGet specifies the http request to perform.
:param http_get: The http_get of this V1Handler.
:type: V1HTTPGetAction
"""
self._http_get = http_get
@property
def tcp_socket(self):
"""
Gets the tcp_socket of this V1Handler.
TCPSocket specifies an action involving a TCP port. TCP hooks not yet
supported
:return: The tcp_socket of this V1Handler.
:rtype: V1TCPSocketAction
"""
return self._tcp_socket
@tcp_socket.setter
def tcp_socket(self, tcp_socket):
"""
Sets the tcp_socket of this V1Handler.
TCPSocket specifies an action involving a TCP port. TCP hooks not yet
supported
:param tcp_socket: The tcp_socket of this V1Handler.
:type: V1TCPSocketAction
"""
self._tcp_socket = tcp_socket
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, 'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], 'to_dict') else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1Handler):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"jonathang132298@gmail.com"
] | jonathang132298@gmail.com |
b55fbb662ee45a07998c60bbe7ca4a437076ce1f | acd9ff3b087317a9f1261be1d376af80f12351cc | /snippets/sample.py | a6d2c160fc14edadd40a4273770b23f18dcae3e7 | [
"MIT"
] | permissive | tgandor/urban_oculus | 6f18f5fd39eae0db9309aebd5b206bea0cdb50e9 | 069a69ea35fe5072377d9b9cac15d285920d29e8 | refs/heads/master | 2022-10-03T19:34:37.518871 | 2022-09-26T23:45:00 | 2022-09-26T23:45:00 | 212,092,683 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | # mogrify to successive qualities 50 imgs at a time
# works best with val2017 ;)
import glob
import os
imgs = sorted(glob.glob("*.jpg"))
for q, img in zip((i for i in range(1, 101) for _ in range(50)), imgs):
print(img)
os.system(f"mogrify -quality {q} {img}")
os.rename(img, f"{q:03d}_{img}")
| [
"tomasz.gandor@gmail.com"
] | tomasz.gandor@gmail.com |
ba3fe86f04f4feeb9d8e21a759d4a3d71dcbfa87 | 015383d460fa4321391d964c4f65c4d0c044dcc1 | /.venv/lib/python3.7/site-packages/faker/providers/color/uk_UA/__init__.py | 082c0e3c70d4d54b748de7d862ec0568cffeaf26 | [
"Unlicense"
] | permissive | kobbyrythm/temperature_stories_django | 8f400c8d3c8190b0e83f7bcfece930d696c4afe9 | 552d39f1f6f3fc1f0a2f7308a7da61bf1b9b3de3 | refs/heads/main | 2023-07-03T21:28:46.020709 | 2021-07-20T09:44:29 | 2021-07-20T09:44:29 | 468,728,039 | 3 | 0 | Unlicense | 2022-03-11T11:41:47 | 2022-03-11T11:41:46 | null | UTF-8 | Python | false | false | 10,523 | py | from collections import OrderedDict
from .. import Provider as ColorProvider
class Provider(ColorProvider):
"""Implement color provider for ``uk_UA`` locale.
Sources:
- https://uk.wikipedia.org/wiki/Список_кольорів
"""
all_colors = OrderedDict((
('Абрикосовий', '#FBCEB1'),
('Аквамариновий', '#7FFFD4'),
('Алізариновий червоний', '#E32636'),
('Амарантовий', '#E52B50'),
('Амарантово-рожевий', '#F19CBB'),
('Аметистовий', '#9966CC'),
('Андроїдний зелений', '#A4C639'),
('Арсеновий', '#3B444B'),
('Атомний мандаріновий', '#FF9966'),
('Багряний', '#FF2400'),
('Баклажановий', '#990066'),
('Барвінковий', '#CCCCFF'),
('Бежевий', '#F5F5DC'),
('Берлінська лазур', '#003153'),
('Блаватний', '#6495ED'),
('Блакитний', '#AFEEEE'),
('Блакитний Брандейса', '#0070FF'),
('Блакитно-зелений', '#00DDDD'),
('Блакитно-фіолетовий', '#8A2BE2'),
('Блідий рожево-ліловий', '#996666'),
('Блідо-брунатний', '#987654'),
('Блідо-волошковий', '#ABCDEF'),
('Блідо-карміновий', '#AF4035'),
('Блідо-каштановий', '#DDADAF'),
('Блідо-пурпуровий', '#F984E5'),
('Блідо-пісочний', '#DABDAB'),
('Блідо-рожевий', '#FADADD'),
('Болотний', '#ACB78E'),
('Бронзовий', '#CD7F32'),
('Брунатний', '#964B00'),
('Брунато-малиновий', '#800000'),
('Будяковий', '#D8BFD8'),
('Бузковий', '#C8A2C8'),
('Бургундський', '#900020'),
('Бурий', '#755A57'),
('Бурштиновий', '#FFBF00'),
('Білий', '#FFFFFF'),
('Білий навахо', '#FFDEAD'),
('Бірюзовий', '#30D5C8'),
('Бістр', '#3D2B1F'),
('Вода пляжа Бонді', '#0095B6'),
('Вохра', '#CC7722'),
('Відбірний жовтий', '#FFBA00'),
('Візантійський', '#702963'),
('Гарбуз', '#FF7518'),
('Гарячо-рожевий', '#FC0FC0'),
('Геліотроп', '#DF73FF'),
('Глибокий фіолетовий', '#423189'),
('Глицінія', '#C9A0DC'),
('Грушевий', '#D1E231'),
('Гумігут', '#E49B0F'),
('Гірчичний', '#FFDB58'),
('Дерева', '#79443B'),
('Джинсовий', '#1560BD'),
('Діамантово-рожевий', '#FF55A3'),
('Жовтий', '#FFFF00'),
('Жовто-зелений', '#ADFF2F'),
('Жовто-персиковий', '#FADFAD'),
('Захисний синій', '#1E90FF'),
('Зелена весна', '#00FF7F'),
('Зелена мʼята', '#98FF98'),
('Зелена сосна', '#01796F'),
('Зелене море', '#2E8B57'),
('Зелений', '#00FF00'),
('Зелений армійський', '#4B5320'),
('Зелений мох', '#ADDFAD'),
('Зелений папороть', '#4F7942'),
('Зелений чай', '#D0F0C0'),
('Зелено-сірий чай', '#CADABA'),
('Зеленувато-блакитний', '#008080'),
('Золотаво-березовий', '#DAA520'),
('Золотий', '#FFD700'),
('Золотисто-каштановий', '#6D351A'),
('Індиго', '#4B0082'),
('Іржавий', '#B7410E'),
('Кардинал (колір)', '#C41E3A'),
('Карміновий', '#960018'),
('Каштановий', '#CD5C5C'),
('Кобальтовий', '#0047AB'),
('Колір жовтого шкільного автобуса', '#FFD800'),
('Колір засмаги', '#D2B48C'),
('Колір морської піни', '#FFF5EE'),
('Колір морської хвилі', '#00FFFF'),
('Кораловий', '#FF7F50'),
('Королівський синій', '#4169E1'),
('Кремовий', '#FFFDD0'),
('Кукурудзяний', '#FBEC5D'),
('Кіновар', '#FF4D00'),
('Лавандний', '#E6E6FA'),
('Лазуровий', '#007BA7'),
('Лазурово-синій', '#2A52BE'),
('Лайм', '#CCFF00'),
('Латунний', '#B5A642'),
('Лимонний', '#FDE910'),
('Лимонно-кремовий', '#FFFACD'),
('Лляний', '#EEDC82'),
('Лляний', '#FAF0E6'),
('Лососевий', '#FF8C69'),
('Ліловий', '#DB7093'),
('Малахітовий', '#0BDA51'),
('Малиновий', '#DC143C'),
('Мандариновий', '#FFCC00'),
('Мисливський', '#004225'),
('Морквяний', '#ED9121'),
('Мідний', '#B87333'),
('Міжнародний помаранчевий', '#FF4F00'),
('Нефритовий', '#00A86B'),
('Ніжно-блакитний', '#E0FFFF'),
('Ніжно-оливковий', '#6B8E23'),
('Ніжно-рожевий', '#FB607F'),
('Оливковий', '#808000'),
('Опівнічно-синій', '#003366'),
('Орхідея', '#DA70D6'),
('Палена сіена', '#E97451'),
('Палений оранжевий', '#CC5500'),
('Панг', '#C7FCEC'),
('Паросток папаї', '#FFEFD5'),
('Пастельно-зелений', '#77DD77'),
('Пастельно-рожевий', '#FFD1DC'),
('Персиковий', '#FFE5B4'),
('Перський синій', '#6600FF'),
('Помаранчевий', '#FFA500'),
('Помаранчево-персиковий', '#FFCC99'),
('Помаранчево-рожевий', '#FF9966'),
('Пурпурний', '#FF00FF'),
('Пурпуровий', '#660099'),
('Пшеничний', '#F5DEB3'),
('Пісочний колір', '#F4A460'),
('Рожевий', '#FFC0CB'),
('Рожевий Маунтбеттена', '#997A8D'),
('Рожево-лавандний', '#FFF0F5'),
('Рожево-ліловий', '#993366'),
('Салатовий', '#7FFF00'),
('Сангрія', '#92000A'),
('Сапфіровий', '#082567'),
('Світло-синій', '#007DFF'),
('Сепія', '#704214'),
('Сиваво-зелений', '#ACE1AF'),
('Сигнально-помаранчевий', '#FF9900'),
('Синя пил', '#003399'),
('Синя сталь', '#4682B4'),
('Сині яйця малинівки', '#00CCCC'),
('Синій', '#0000FF'),
('Синій (RYB)', '#0247FE'),
('Синій (пігмент)', '#333399'),
('Синій ВПС', '#5D8AA8'),
('Синій Клейна', '#3A75C4'),
('Сливовий', '#660066'),
('Смарагдовий', '#50C878'),
('Спаржевий', '#7BA05B'),
('Срібний', '#C0C0C0'),
('Старе золото', '#CFB53B'),
('Сіра спаржа', '#465945'),
('Сірий', '#808080'),
('Сірий шифер', '#708090'),
('Темний весняно-зелений', '#177245'),
('Темний жовто-брунатний', '#918151'),
('Темний зелений чай', '#BADBAD'),
('Темний пастельно-зелений', '#03C03C'),
('Темний хакі', '#BDB76B'),
('Темний індиго', '#310062'),
('Темно-аспідний сірий', '#2F4F4F'),
('Темно-брунатний', '#654321'),
('Темно-бірюзовий', '#116062'),
('Темно-зелений', '#013220'),
('Темно-зелений хакі', '#78866B'),
('Темно-золотий', '#B8860B'),
('Темно-карміновий', '#560319'),
('Темно-каштановий', '#986960'),
('Темно-кораловий', '#CD5B45'),
('Темно-лазурний', '#08457E'),
('Темно-лососевий', '#E9967A'),
('Темно-мандариновий', '#FFA812'),
('Темно-оливковий', '#556832'),
('Темно-персиковий', '#FFDAB9'),
('Темно-рожевий', '#E75480'),
('Темно-синій', '#000080'),
('Ультрамариновий', '#120A8F'),
('Умбра', '#734A12'),
('Умбра палена', '#8A3324'),
('Фуксія', '#FF00FF'),
('Фіолетовий', '#8B00FF'),
('Фіолетово-баклажановий', '#991199'),
('Фіолетово-червоний', '#C71585'),
('Хакі', '#C3B091'),
('Цинамоновий', '#7B3F00'),
('Циннвальдит', '#EBC2AF'),
('Ціан (колір)', '#00FFFF'),
('Ціано-блакитний', '#F0F8FF'),
('Червоний', '#FF0000'),
('Червоно-буро-помаранчевий', '#CD5700'),
('Червоновато-брунатний', '#CC8899'),
('Чорний', '#000000'),
('Шафрановий', '#F4C430'),
('Шкіра буйвола', '#F0DC82'),
('Шоколадний', '#D2691E'),
('Яскраво-бурштиновий', '#FF7E00'),
('Яскраво-бірюзовий', '#08E8DE'),
('Яскраво-зелений', '#66FF00'),
('Яскраво-зелений', '#40826D'),
('Яскраво-рожевий', '#FF007F'),
('Яскраво-фіолетовий', '#CD00CD'),
('Ясно-брунатний', '#CD853F'),
('Ясно-вишневий', '#DE3163'),
('Ясно-лазуровий', '#007FFF'),
('Ясно-лазуровий (веб)', '#F0FFFF'),
))
| [
"b.scharlau@abdn.ac.uk"
] | b.scharlau@abdn.ac.uk |
fdb8b901086777739d2ab22a08e6e45e12e6d7f2 | 3ae62276c9aad8b9612d3073679b5cf3cb695e38 | /easyleetcode/leetcodes/SYL_4数学和位运算_9Fast Power.py | ce4086955015bc4ab044ceec5c0c1ae6f05889ed | [
"Apache-2.0"
] | permissive | gongtian1234/easy_leetcode | bc0b33c3c4f61d58a6111d76707903efe0510cb4 | d2b8eb5d2cafc71ee1ca633ce489c1a52bcc39ce | refs/heads/master | 2022-11-16T17:48:33.596752 | 2020-07-13T02:55:03 | 2020-07-13T02:55:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py |
class Solution:
"""
@param a, b, n: 32bit integers
@return: An integer
"""
def fastPower(self, a, b, n):
if n == 1:
return a % b
elif n == 0:
return 1 % b
elif n < 0:
return -1
# 拆分:(a * b) % p = ((a % p) * (b % p)) % p
# 拆分:(a^n) % p = ((a^n/2) * (a^n/2)) % p
product = self.fastPower(a, b, int(n / 2))
product = (product * product) % b
# 奇数次
if n % 2 == 1:
product = (product * a) % b
return product
'''
没必要算2^32再不断取%!!
(a+b) % p =((a % p)+(b % p))% p
(a*b) % p =((a % p)*(b % p))% p
'''
s = Solution()
print(s.fastPower(2, 3, 31))
print(s.fastPower(100, 1000, 1000))
| [
"425776024@qq.com"
] | 425776024@qq.com |
44eb876e9aaf0e2fd5d2c0b3e7b4f9bda5960f39 | 7393987b67f845cd5db4c83e3063b3d36108aa58 | /ansible/roles/cloud_master/files/api_srv/do_api.py | 77bcdca698bd9bbf5cfd3ce948fd211766bf3407 | [] | no_license | HackerDom/ructfe-2020 | 2d859afe113203813b1f65e9a55d275963b3af65 | a7a13546389bac2d39ef51e65eb3320569d02247 | refs/heads/main | 2023-02-04T21:10:40.035902 | 2020-12-26T20:37:25 | 2020-12-26T20:37:25 | 310,278,476 | 9 | 2 | null | null | null | null | UTF-8 | Python | false | false | 7,932 | py | # Developed by Alexander Bersenev from Hackerdom team, bay@hackerdom.ru
"""Common functions that make requests to digital ocean api"""
import requests
import time
import json
import sys
from do_token import TOKEN
VERBOSE = True
HEADERS = {
"Content-Type": "application/json",
"Authorization": "Bearer %s" % TOKEN,
}
def log(*params):
if VERBOSE:
print(*params, file=sys.stderr)
def get_all_vms(attempts=5, timeout=10):
vms = {}
url = "https://api.digitalocean.com/v2/droplets?per_page=200"
cur_attempt = 1
while True:
try:
resp = requests.get(url, headers=HEADERS)
if not str(resp.status_code).startswith("2"):
log(resp.status_code, resp.headers, resp.text)
raise Exception("bad status code %d" % resp.status_code)
data = json.loads(resp.text)
for droplet in data["droplets"]:
vms[droplet["id"]] = droplet
if ("links" in data and "pages" in data["links"] and
"next" in data["links"]["pages"]):
url = data["links"]["pages"]["next"]
else:
break
except Exception as e:
log("get_all_vms trying again %s" % (e,))
cur_attempt += 1
if cur_attempt > attempts:
return None # do not return parts of the output
time.sleep(timeout)
return list(vms.values())
def get_ids_by_vmname(vm_name):
ids = set()
droplets = get_all_vms()
if droplets is None:
return None
for droplet in droplets:
if droplet["name"] == vm_name:
ids.add(droplet['id'])
return ids
def check_vm_exists(vm_name):
droplets = get_all_vms()
if droplets is None:
return None
for droplet in droplets:
if droplet["name"] == vm_name:
return True
return False
def create_vm(vm_name, image, ssh_keys,
region="ams2", size="s-1vcpu-1gb", attempts=10, timeout=20):
for i in range(attempts):
try:
data = json.dumps({
"name": vm_name,
"region": region,
"size": size,
"image": image,
"ssh_keys": ssh_keys,
"backups": False,
"ipv6": False,
"user_data": "#!/bin/bash\n\n",
"private_networking": None,
"volumes": None,
"tags": [] # tags are too unstable in DO
})
log("creating new")
url = "https://api.digitalocean.com/v2/droplets"
resp = requests.post(url, headers=HEADERS, data=data)
if resp.status_code not in [200, 201, 202]:
log(resp.status_code, resp.headers, resp.text)
raise Exception("bad status code %d" % resp.status_code)
droplet_id = json.loads(resp.text)["droplet"]["id"]
return droplet_id
except Exception as e:
log("create_vm trying again %s" % (e,))
time.sleep(timeout)
return None
def delete_vm_by_id(droplet_id, attempts=10, timeout=20):
for i in range(attempts):
try:
log("deleting droplet")
url = "https://api.digitalocean.com/v2/droplets/%d" % droplet_id
resp = requests.delete(url, headers=HEADERS)
if not str(resp.status_code).startswith("2"):
log(resp.status_code, resp.headers, resp.text)
raise Exception("bad status code %d" % resp.status_code)
return True
except Exception as e:
log("delete_vm_by_id trying again %s" % (e,))
time.sleep(timeout)
return False
def get_ip_by_id(droplet_id, attempts=5, timeout=20):
for i in range(attempts):
try:
url = "https://api.digitalocean.com/v2/droplets/%d" % droplet_id
resp = requests.get(url, headers=HEADERS)
data = json.loads(resp.text)
ip = data['droplet']['networks']['v4'][0]['ip_address']
if ip.startswith("10."):
# take next
ip = data['droplet']['networks']['v4'][1]['ip_address']
return ip
except Exception as e:
log("get_ip_by_id trying again %s" % (e,))
time.sleep(timeout)
log("failed to get ip by id")
return None
def get_ip_by_vmname(vm_name):
ids = set()
droplets = get_all_vms()
if droplets is None:
return None
for droplet in droplets:
if droplet["name"] == vm_name:
ids.add(droplet['id'])
if len(ids) > 1:
log("warning: there are more than one droplet with name " + vm_name +
", using random :)")
if not ids:
return None
return get_ip_by_id(list(ids)[0])
def get_all_domain_records(domain, attempts=5, timeout=20):
records = {}
url = ("https://api.digitalocean.com/v2/domains/" + domain +
"/records?per_page=200")
cur_attempt = 1
while True:
try:
resp = requests.get(url, headers=HEADERS)
if not str(resp.status_code).startswith("2"):
log(resp.status_code, resp.headers, resp.text)
raise Exception("bad status code %d" % resp.status_code)
data = json.loads(resp.text)
for record in data["domain_records"]:
records[record["id"]] = record
if ("links" in data and "pages" in data["links"] and
"next" in data["links"]["pages"]):
url = data["links"]["pages"]["next"]
else:
break
except Exception as e:
log("get_all_domain_records trying again %s" % (e,))
cur_attempt += 1
if cur_attempt > attempts:
return None # do not return parts of the output
time.sleep(timeout)
return list(records.values())
def get_domain_ids_by_hostname(host_name, domain, print_warning_on_fail=False):
ids = set()
records = get_all_domain_records(domain)
if records is None:
return None
for record in records:
if record["type"] == "A" and record["name"] == host_name:
ids.add(record['id'])
if not ids:
if print_warning_on_fail:
log("failed to get domain ids by hostname", host_name)
return ids
def create_domain_record(name, ip, domain, attempts=10, timeout=20):
for i in range(attempts):
try:
data = json.dumps({
"type": "A",
"name": name,
"data": ip,
"ttl": 30
})
url = "https://api.digitalocean.com/v2/domains/%s/records" % domain
resp = requests.post(url, headers=HEADERS, data=data)
if not str(resp.status_code).startswith("2"):
log(resp.status_code, resp.headers, resp.text)
raise Exception("bad status code %d" % resp.status_code)
return True
except Exception as e:
log("create_domain_record trying again %s" % (e,))
time.sleep(timeout)
return None
def delete_domain_record(domain_id, domain, attempts=10, timeout=20):
for i in range(attempts):
try:
log("deleting domain record %d" % domain_id)
url = ("https://api.digitalocean.com/v2/domains" +
"/%s/records/%d" % (domain, domain_id))
resp = requests.delete(url, headers=HEADERS)
if not str(resp.status_code).startswith("2"):
log(resp.status_code, resp.headers, resp.text)
raise Exception("bad status code %d" % resp.status_code)
return True
except Exception as e:
log("delete_domain_record trying again %s" % (e,))
time.sleep(timeout)
return False
| [
"bay@hackerdom.ru"
] | bay@hackerdom.ru |
6d49234b323cdcab62ce20e342bac60b8ce76fdb | 5ee3aa64cffb7cd13df824b0e669145cf41ca106 | /kinoko/misc/__init__.py | 6a575e89380da378b3e32bb85991341812c11671 | [
"MIT"
] | permissive | youxiaoxing/kinoko | aa0af77bde7a8349293c29a02e977d147c06f9d1 | 4750d8e6b1a68ba771cd89b352989ef05b293d45 | refs/heads/master | 2022-03-25T19:28:55.737172 | 2019-10-19T17:56:03 | 2019-10-19T17:56:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 expandtab number
"""
Authors: qianweishuo<qzy922@gmail.com>
Date: 2019/6/27 下午11:20
"""
| [
"koyo922@qq.com"
] | koyo922@qq.com |
a0508c2b3dce01a6195c9765c62d941647c47e98 | a03eba726a432d8ef133f2dc55894ba85cdc4a08 | /events/mixins.py | c91997975613113f8e2e94939e013eea469b1be0 | [
"MIT"
] | permissive | mansonul/events | 2546c9cfe076eb59fbfdb7b4ec8bcd708817d59b | 4f6ca37bc600dcba3f74400d299826882d53b7d2 | refs/heads/master | 2021-01-15T08:53:22.442929 | 2018-01-30T16:14:20 | 2018-01-30T16:14:20 | 99,572,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 697 | py | from django.http import JsonResponse
from django.forms.models import model_to_dict
class AjaxFormMixin(object):
def form_invalid(self, form):
response = super(AjaxFormMixin, self).form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
def form_valid(self, form):
response = super(AjaxFormMixin, self).form_valid(form)
if self.request.is_ajax():
data = {
'title': form.instance.title,
'description': form.instance.description,
}
return JsonResponse(data)
else:
return response
| [
"contact@dragosnicu.com"
] | contact@dragosnicu.com |
c3ebfd0b4f069d5706c9ede95f3ab0a751c57bf6 | 4f2cdd9a34fce873ff5995436edf403b38fb2ea5 | /Data-Structures/String/part1/P008.py | 5734c472a41b5a886b102af012eaecfaaa7e3e87 | [] | no_license | sanjeevseera/Python-Practice | 001068e9cd144c52f403a026e26e9942b56848b0 | 5ad502c0117582d5e3abd434a169d23c22ef8419 | refs/heads/master | 2021-12-11T17:24:21.136652 | 2021-08-17T10:25:01 | 2021-08-17T10:25:01 | 153,397,297 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | """
Write a Python function that takes a list of words and returns the length of the longest one
"""
def Lword(wlist):
word=wlist[0]
for w in wlist[1:]:
if len(w)>len(word):
word=w
return word
wlist=input("enter the words by comma separated:...").split(',')
print(Lword(wlist)) | [
"seerasanjeev@gmail.com"
] | seerasanjeev@gmail.com |
3aa4e4c2b6fe388c724541de0a28a0383afd51c3 | 5ba3115523fb052d32db827e09443248ec5f6629 | /algorithm/PycharmProjects/0211/ladder.py | eb6210c9100757556cc04b234568b9a34172aa51 | [] | no_license | oliviaspark0825/TIL | 841095003ae794e14bd8c7e8c883826667c25f37 | 8bc66836f9a1eea5f42e9e1172f81f005abc042d | refs/heads/master | 2023-01-10T22:14:15.341489 | 2019-08-22T09:09:52 | 2019-08-22T09:09:52 | 162,099,057 | 0 | 0 | null | 2023-01-04T07:52:28 | 2018-12-17T08:32:43 | Jupyter Notebook | UTF-8 | Python | false | false | 1,040 | py | import sys
sys.stdin = open("ladder_input.txt")
T = 10
SIZE = 100
for tc in range(T):
data = [[0 for i in range(100)] for j in range(100)]
data = list(map(int, input().split()))
while x < 100 and y < 100:
if data[x][y] == 1 and data[x][y-1] == 1:
x -= 1
elif data[x][y] == 1 and data[x][y-1] != 1:
while x !=0 and x <99:
for x in range(100) and y in range(100):
# for x in range(100):
# for y in range(100):
# ans = 0
# # 다음 칸이 1이 아닐 경우는 아래로 이동
# if data[x][y] == 1 and data[x][y+1] != 1:
# ans = x
# x = x
# y += 1
# # 다음 칸도 1일 경우는 옆으로 이동
# elif data[x][y] == 1 and data[x][y+1] == 1:
# x += 1
# y = y
# if data[x][y] == 2:
# return ans
# 다시 못돌아가게 하려면, 이미 지나온 1은 값을 바꿔야 함
# print("{} {}".format()) | [
"suhyunpark0825@gmail.com"
] | suhyunpark0825@gmail.com |
63fa17d294b0132764b6e2dcc7e328765d7b7745 | d3d730cda1d4fd89dc2f52bcb5366c8e7dd8e1db | /Tenka1/D2.py | 4ad83d559b672ddd0b07c717e3cc39720f530999 | [] | no_license | sasakishun/atcoder | 86e3c161f306d96e026172138aca06ac9a90f3ea | 687afaa05b5a98a04675ab24ac7a53943a295d8e | refs/heads/master | 2020-03-19T19:58:07.717059 | 2019-05-04T14:39:00 | 2019-05-04T14:39:00 | 136,882,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | N=int(input())
k=2
while N>=(k-1)*k/2:
if N==(k-1)*k/2:
print("Yes")
print(k)
a=list([] for i in range(k))
t=1
for i in range(0,k):
for n in range(1,k-i):
a[i].append(t)
a[i+n].append(t)
t+=1
for i in range(k):
print(str(k-1)+" "+" ".join(map(str,a[i])))
exit()
else:
k+=1
print("No")
| [
"Pakka-xeno@keio.jp"
] | Pakka-xeno@keio.jp |
a5b7bc2c28b4c11679b81349b8926af80e4b08ab | 1c2428489013d96ee21bcf434868358312f9d2af | /ultracart/models/gift_certificate_response.py | 199e3478d7a6a976125ce016d228ad964699033a | [
"Apache-2.0"
] | permissive | UltraCart/rest_api_v2_sdk_python | 7821a0f6e0e19317ee03c4926bec05972900c534 | 8529c0bceffa2070e04d467fcb2b0096a92e8be4 | refs/heads/master | 2023-09-01T00:09:31.332925 | 2023-08-31T12:52:10 | 2023-08-31T12:52:10 | 67,047,356 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,234 | py | # coding: utf-8
"""
UltraCart Rest API V2
UltraCart REST API Version 2 # noqa: E501
OpenAPI spec version: 2.0.0
Contact: support@ultracart.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GiftCertificateResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'error': 'Error',
'gift_certificate': 'GiftCertificate',
'metadata': 'ResponseMetadata',
'success': 'bool',
'warning': 'Warning'
}
attribute_map = {
'error': 'error',
'gift_certificate': 'gift_certificate',
'metadata': 'metadata',
'success': 'success',
'warning': 'warning'
}
def __init__(self, error=None, gift_certificate=None, metadata=None, success=None, warning=None): # noqa: E501
"""GiftCertificateResponse - a model defined in Swagger""" # noqa: E501
self._error = None
self._gift_certificate = None
self._metadata = None
self._success = None
self._warning = None
self.discriminator = None
if error is not None:
self.error = error
if gift_certificate is not None:
self.gift_certificate = gift_certificate
if metadata is not None:
self.metadata = metadata
if success is not None:
self.success = success
if warning is not None:
self.warning = warning
@property
def error(self):
"""Gets the error of this GiftCertificateResponse. # noqa: E501
:return: The error of this GiftCertificateResponse. # noqa: E501
:rtype: Error
"""
return self._error
@error.setter
def error(self, error):
"""Sets the error of this GiftCertificateResponse.
:param error: The error of this GiftCertificateResponse. # noqa: E501
:type: Error
"""
self._error = error
@property
def gift_certificate(self):
"""Gets the gift_certificate of this GiftCertificateResponse. # noqa: E501
:return: The gift_certificate of this GiftCertificateResponse. # noqa: E501
:rtype: GiftCertificate
"""
return self._gift_certificate
@gift_certificate.setter
def gift_certificate(self, gift_certificate):
"""Sets the gift_certificate of this GiftCertificateResponse.
:param gift_certificate: The gift_certificate of this GiftCertificateResponse. # noqa: E501
:type: GiftCertificate
"""
self._gift_certificate = gift_certificate
@property
def metadata(self):
"""Gets the metadata of this GiftCertificateResponse. # noqa: E501
:return: The metadata of this GiftCertificateResponse. # noqa: E501
:rtype: ResponseMetadata
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this GiftCertificateResponse.
:param metadata: The metadata of this GiftCertificateResponse. # noqa: E501
:type: ResponseMetadata
"""
self._metadata = metadata
@property
def success(self):
"""Gets the success of this GiftCertificateResponse. # noqa: E501
Indicates if API call was successful # noqa: E501
:return: The success of this GiftCertificateResponse. # noqa: E501
:rtype: bool
"""
return self._success
@success.setter
def success(self, success):
"""Sets the success of this GiftCertificateResponse.
Indicates if API call was successful # noqa: E501
:param success: The success of this GiftCertificateResponse. # noqa: E501
:type: bool
"""
self._success = success
@property
def warning(self):
"""Gets the warning of this GiftCertificateResponse. # noqa: E501
:return: The warning of this GiftCertificateResponse. # noqa: E501
:rtype: Warning
"""
return self._warning
@warning.setter
def warning(self, warning):
"""Sets the warning of this GiftCertificateResponse.
:param warning: The warning of this GiftCertificateResponse. # noqa: E501
:type: Warning
"""
self._warning = warning
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(GiftCertificateResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GiftCertificateResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"perry@ultracart.com"
] | perry@ultracart.com |
1f5101cb29cfe1daa30392aa6f2aeeafa8e4209b | eba02c3c98f00288e81b5898a201cc29518364f7 | /chapter_005/exercises/more_conditional_tests.py | 9f4d2560c6178f258467c7d80860c4bb30f28dcb | [] | no_license | kengru/pcrash-course | 29f3cf49acfd4a177387634410d28de71d279e06 | 5aa5b174e85a0964eaeee1874b2be1c144b7c192 | refs/heads/master | 2021-05-16T09:36:16.349626 | 2017-10-11T17:56:56 | 2017-10-11T17:56:56 | 104,481,645 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | # Exercise 5-2. Creating diferent test conditions based on what was learned.
if 'ham' == 'ham' and 'ham' != 'cheese':
print('Yes it does, ham.')
if 'ham' == 'HAM'.lower() or 'magic' != 'cool':
print('Lower works.')
if 45 > 22:
print('Math is on point.')
compilation = ['movies', 'tv', 'internet']
if 'movies' in compilation:
print('Its here.')
if 'ipad' not in compilation:
print('Yes, it is not here.')
| [
"kengrullon@gmail.com"
] | kengrullon@gmail.com |
5b8097b0e65b0a05a6362af661749b6e98f2a706 | 1a9852fe468f18e1ac3042c09286ccda000a4135 | /Specialist Certificate in Data Analytics Essentials/DataCamp/05-Working_with_Dates_and_Times/e21_what_time_did_the_bike_leave_global_edition.py | 6d9438cf0cfab5cf725998d2d137511ed7776d3e | [] | no_license | sarmabhamidipati/UCD | 452b2f1e166c1079ec06d78e473730e141f706b2 | 101ca3152207e2fe67cca118923896551d5fee1c | refs/heads/master | 2023-08-14T15:41:24.312859 | 2021-09-22T17:33:01 | 2021-09-22T17:33:01 | 386,592,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,105 | py | """
What time did the bike leave? (Global edition)
When you need to move a datetime from one timezone into another, use .astimezone() and tz.
Often you will be moving things into UTC, but for fun let's try moving things from 'America/New_York'
into a few different time zones.
Set uk to be the timezone for the UK: 'Europe/London'.
Change local to be in the uk timezone and assign it to notlocal.
Set ist to be the timezone for India: 'Asia/Kolkata'.
Change local to be in the ist timezone and assign it to notlocal.
Set sm to be the timezone for Samoa: 'Pacific/Apia'.
Change local to be in the sm timezone and assign it to notlocal.
"""
from dateutil import tz
from datetime import datetime, timedelta, timezone
onebike_datetimes = [
{'start': datetime(2017, 10, 1, 15, 23, 25), 'end': datetime(2017, 10, 1, 15, 26, 26)},
{'start': datetime(2017, 10, 1, 15, 42, 57), 'end': datetime(2017, 10, 1, 17, 49, 59)},
{'start': datetime(2017, 10, 2, 6, 37, 10), 'end': datetime(2017, 10, 2, 6, 42, 53)},
{'start': datetime(2017, 10, 2, 8, 56, 45), 'end': datetime(2017, 10, 2, 9, 18, 3)},
{'start': datetime(2017, 10, 2, 18, 23, 48), 'end': datetime(2017, 10, 2, 18, 45, 5)}
]
# Create the timezone object
uk = tz.gettz('Europe/London')
# Pull out the start of the first trip
local = onebike_datetimes[0]['start']
# What time was it in the UK?
notlocal = local.astimezone(uk)
# Print them out and see the difference
print(local.isoformat())
print(notlocal.isoformat())
# Create the timezone object
ist = tz.gettz('Asia/Kolkata')
# Pull out the start of the first trip
local = onebike_datetimes[0]['start']
# What time was it in India?
notlocal = local.astimezone(ist)
# Print them out and see the difference
print(local.isoformat())
print(notlocal.isoformat())
# Create the timezone object
sm = tz.gettz('Pacific/Apia')
# Pull out the start of the first trip
local = onebike_datetimes[0]['start']
# What time was it in Samoa?
notlocal = local.astimezone(sm)
# Print them out and see the difference
print(local.isoformat())
print(notlocal.isoformat()) | [
"b_vvs@yahoo.com"
] | b_vvs@yahoo.com |
1da4536c0af19d8e8bde11b153ecb6d410d36f41 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_moderator.py | 74b75d194a30c3cf654b57a41026b96f7db76daf | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py |
#calss header
class _MODERATOR():
def __init__(self,):
self.name = "MODERATOR"
self.definitions = [u'someone who tries to help other people come to an agreement: ', u'someone who makes certain that a formal discussion happens without problems and follows the rules: ', u'someone who makes certain that all the people marking an examination use the same standards: ', u'someone who makes sure that the rules of an internet discussion are not broken, for example by removing any threatening or offensive messages']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
62d9c8feb5e10a434cb0452b1e2a854e61bd7836 | 4751fd86184b64316d694a98671d34faae76ffe6 | /plannerrr/migrations/0025_schedules_course_title.py | 3e43b6f4e109551e60a1553e79f52dd5b692afbb | [] | no_license | mohammedaliyu136/dg_planner | 8a6a4888cc109d6c3a1cb115494a1e6decbb864a | a0fb87e182527e541e7758a2c4720ddbb2438145 | refs/heads/master | 2020-04-03T08:09:02.020426 | 2018-10-29T19:57:16 | 2018-10-29T19:57:16 | 155,124,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2018-01-29 06:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('planner', '0024_auto_20180129_0715'),
]
operations = [
migrations.AddField(
model_name='schedules',
name='course_title',
field=models.CharField(max_length=50, null=True),
),
]
| [
"mohammedaliyu136@gmail.com"
] | mohammedaliyu136@gmail.com |
7b0e8af35ae9596fa6784ec9856f4ceaa39818e0 | 97886c65242f9fa3814f205b509483890b709e8a | /1_Zadania/Dzien_2/4_Iteratory_generatory/zad_1.py | 2b92f19d7f42761f1100686273b493a56f09f56c | [] | no_license | Danutelka/Coderslab-Python-progr-obiektowe | d16cad0711079c9dd83676066f8f44dedb9013a2 | b68aeda14024be48fdb4fb1b5e3d48afbaac0b8c | refs/heads/master | 2020-08-04T15:26:20.479103 | 2019-05-05T18:16:04 | 2019-05-05T18:25:56 | 212,183,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | import random
class Dice:
def __init__(self, type):
self._types = [3, 4, 6, 8, 10, 12, 20, 100]
self.type = type
@property
def type(self):
return self._type
@type.setter
def type(self, type):
if type in self._types:
self._type = type
else:
self._type = 6
def roll(self):
return random.randint(1, self.type)
d = Dice(10)
print(d.roll())
| [
"kawecka.d@gmail.com"
] | kawecka.d@gmail.com |
9631e0cb94c18704e11042ac1a62aa26b73253be | 3a547785455c4b447de5f43e134aee1f57388a7e | /SWEA/4406.py | 66281b078c22f92d03c424cb7c51ce997463f0e7 | [] | no_license | Jungwoo-20/Algorithm | bfdbca1b87500e508307a639dc2af5a86258c227 | 7767bf5b0ce089155809743af0b562b076e75d9b | refs/heads/master | 2023-08-17T05:59:25.083859 | 2021-09-22T03:26:45 | 2021-09-22T03:26:45 | 289,666,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | T = int(input())
arr = ['a','e','i','o','u']
for cnt in range(1, T + 1):
n = list(map(str, input()))
temp = []
result = ''
for i in n:
if i not in arr:
temp.append(i)
for i in temp:
result +=i
print('#' + str(cnt) + ' ' + str(result)) | [
"jungwoo7250@naver.com"
] | jungwoo7250@naver.com |
a3e2fd3c796bc8fc667d472ac22d714eeb9e8107 | 4175c20f89fc408696d22a488a29b46836e15cbf | /travelly/travelly/wsgi.py | b96b60b26a9cae9a3f63b84f092c6c3d187f1e2f | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | UPstartDeveloper/fiercely-souvenir | 47df7885a153b8df9e6af1aac72579da4af85e63 | 65d933c64a3bf830f51ac237f5781ddfb69f342c | refs/heads/master | 2022-12-09T21:37:19.810414 | 2021-04-30T23:47:39 | 2021-04-30T23:47:39 | 228,493,146 | 0 | 0 | MIT | 2022-12-08T03:24:23 | 2019-12-16T23:20:11 | JavaScript | UTF-8 | Python | false | false | 393 | py | """
WSGI config for travelly project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'travelly.settings')
application = get_wsgi_application()
| [
"zainr7989@gmail.com"
] | zainr7989@gmail.com |
cc5701e8bd14efc27bacd072459b9c2c0c1f0638 | f4f147a9859c5605b22429b05dc43315b06b3215 | /manage.py | 32de577bd064ffbff0ca93d7ae91639c2e93486b | [] | no_license | nickdotreid/hit-fails | 151cebdf3ecfb5168bf7a5b3937d6ed4abdb1cb9 | 7e8f0b0af60181d1922043270de27159ba4f4337 | refs/heads/master | 2016-09-15T18:08:00.311352 | 2014-07-22T16:43:51 | 2014-07-22T16:43:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
# Load the Heroku environment.
from herokuapp.env import load_env
load_env(__file__, "hitfails")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hitfails.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"nickreid@nickreid.com"
] | nickreid@nickreid.com |
eacc91c40d88de42dd69c15079c96805027b981b | e4cd0810417fecc5aaa5f1e5ccaf4af75c57b4cd | /data_set/error_row_parsing.py | 268e86abffd99f42508fc86ab90101e8b92bedbd | [] | no_license | Areum120/epis_data_project | c7c9d859d70df1f9bef4b7dd691a09c27d078e8f | 567c51aa89139666521e45f76c9fd23029d2660b | refs/heads/master | 2023-06-01T15:03:16.908647 | 2021-04-14T01:39:37 | 2021-04-14T01:39:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | import json
with open('bds_safe_restaurant_error.json', encoding='utf-8') as f:
ls = f.readlines()
for l in ls:
print(json.loads(l))
| [
"oceanfog1@gmail.com"
] | oceanfog1@gmail.com |
ad79bcdb94077ac1c13af74936728b2ff0f4b9bf | 1577e1cf4e89584a125cffb855ca50a9654c6d55 | /pyobjc/pyobjc/pyobjc-framework-Cocoa-2.5.1/PyObjCTest/test_cfdictionary.py | be87f85973a7b9246d9e2b734926df2bda623acc | [
"MIT"
] | permissive | apple-open-source/macos | a4188b5c2ef113d90281d03cd1b14e5ee52ebffb | 2d2b15f13487673de33297e49f00ef94af743a9a | refs/heads/master | 2023-08-01T11:03:26.870408 | 2023-03-27T00:00:00 | 2023-03-27T00:00:00 | 180,595,052 | 124 | 24 | null | 2022-12-27T14:54:09 | 2019-04-10T14:06:23 | null | UTF-8 | Python | false | false | 6,093 | py | from CoreFoundation import *
from Foundation import NSDictionary, NSMutableDictionary, NSCFDictionary
from PyObjCTools.TestSupport import *
try:
long
except NameError:
long = int
class TestCFDictionary (TestCase):
def testCreation(self):
dictionary = CFDictionaryCreate(None,
('aap', 'noot', 'mies', 'wim'),
('monkey', 'nut', 'missy', 'john'),
4, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assert_(isinstance(dictionary, CFDictionaryRef))
self.assertEqual(dictionary, {
'aap': 'monkey',
'noot': 'nut',
'mies': 'missy',
'wim': 'john'
})
dictionary = CFDictionaryCreateMutable(None, 0, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assert_(isinstance(dictionary, CFMutableDictionaryRef))
CFDictionarySetValue(dictionary, 'hello', 'world')
self.assertEqual(dictionary, {'hello': 'world'})
def testApplyFunction(self):
dictionary = CFDictionaryCreate(None,
('aap', 'noot', 'mies', 'wim'),
('monkey', 'nut', 'missy', 'john'), 4, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
context = []
def function(key, value, context):
context.append((key, value))
self.assertArgIsFunction(CFDictionaryApplyFunction, 1, b'v@@@', False)
self.assertArgHasType(CFDictionaryApplyFunction, 2, b'@')
CFDictionaryApplyFunction(dictionary, function, context)
context.sort()
self.assertEqual(len(context) , 4)
self.assertEqual(context,
[
(b'aap'.decode('ascii'), b'monkey'.decode('ascii')),
(b'mies'.decode('ascii'), b'missy'.decode('ascii')),
(b'noot'.decode('ascii'), b'nut'.decode('ascii')),
(b'wim'.decode('ascii'), b'john'.decode('ascii'))
])
def testTypeID(self):
self.assertIsInstance(CFDictionaryGetTypeID(), (int, long))
def testCreation(self):
dct = CFDictionaryCreate(None, [b"key1".decode('ascii'), b"key2".decode('ascii')], [42, 43], 2, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertIsInstance(dct, CFDictionaryRef)
dct = CFDictionaryCreateCopy(None, dct)
self.assertIsInstance(dct, CFDictionaryRef)
dct = CFDictionaryCreateMutable(None, 0, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertIsInstance(dct, CFDictionaryRef)
dct = CFDictionaryCreateMutableCopy(None, 0, dct)
self.assertIsInstance(dct, CFDictionaryRef)
def testInspection(self):
dct = CFDictionaryCreate(None, [b"key1".decode('ascii'), b"key2".decode('ascii')], [42, 42], 2, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertIsInstance(dct, CFDictionaryRef)
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertEqual(CFDictionaryGetCountOfKey(dct, b"key1".decode('ascii')) , 1)
self.assertEqual(CFDictionaryGetCountOfKey(dct, b"key3".decode('ascii')) , 0)
self.assertEqual(CFDictionaryGetCountOfValue(dct, 42) , 2)
self.assertEqual(CFDictionaryGetCountOfValue(dct, 44) , 0)
self.assertResultHasType(CFDictionaryContainsKey, objc._C_NSBOOL)
self.assertTrue(CFDictionaryContainsKey(dct, b"key1".decode('ascii')))
self.assertFalse(CFDictionaryContainsKey(dct, b"key3".decode('ascii')))
self.assertResultHasType(CFDictionaryContainsValue, objc._C_NSBOOL)
self.assertTrue(CFDictionaryContainsValue(dct, 42))
self.assertFalse(CFDictionaryContainsValue(dct, b"key3".decode('ascii')))
self.assertEqual(CFDictionaryGetValue(dct, "key2") , 42)
self.assertIs(CFDictionaryGetValue(dct, "key3"), None)
self.assertResultHasType(CFDictionaryGetValueIfPresent, objc._C_NSBOOL)
self.assertArgIsOut(CFDictionaryGetValueIfPresent, 2)
ok, value = CFDictionaryGetValueIfPresent(dct, "key2", None)
self.assertTrue(ok)
self.assertEqual(value , 42)
ok, value = CFDictionaryGetValueIfPresent(dct, "key3", None)
self.assertFalse(ok)
self.assertIs(value, None)
keys, values = CFDictionaryGetKeysAndValues(dct, None, None)
self.assertEqual(values , (42, 42))
keys = list(keys)
keys.sort()
self.assertEqual(keys , ['key1', 'key2'])
def testMutation(self):
dct = CFDictionaryCreateMutable(None, 0, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertEqual(CFDictionaryGetCount(dct) , 0)
CFDictionaryAddValue(dct, b"key1".decode('ascii'), b"value1".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 1)
self.assertTrue(CFDictionaryContainsKey(dct, b"key1".decode('ascii')))
CFDictionarySetValue(dct, b"key2".decode('ascii'), b"value2".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertTrue(CFDictionaryContainsKey(dct, b"key2".decode('ascii')))
CFDictionaryReplaceValue(dct, b"key2".decode('ascii'), b"value2b".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertTrue(CFDictionaryContainsKey(dct, b"key2".decode('ascii')))
self.assertEqual(CFDictionaryGetValue(dct, "key2") , b"value2b".decode('ascii'))
CFDictionaryReplaceValue(dct, b"key3".decode('ascii'), b"value2b".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertFalse(CFDictionaryContainsKey(dct, b"key3".decode('ascii')))
CFDictionaryRemoveValue(dct, b"key1".decode('ascii'))
self.assertFalse(CFDictionaryContainsKey(dct, b"key1".decode('ascii')))
CFDictionaryRemoveAllValues(dct)
self.assertFalse(CFDictionaryContainsKey(dct, b"key2".decode('ascii')))
self.assertEqual(CFDictionaryGetCount(dct) , 0)
if __name__ == "__main__":
main()
| [
"opensource@apple.com"
] | opensource@apple.com |
4347b0b5ff4fe7dbd42fa72e6f26f59966cde029 | f336bcdc1eeab553e0d3d1de2ca6da64cd7f27bc | /macd/ma.py | 095e1514d3c74ba792b173092c2ffe23a7839f1a | [] | no_license | tonylibing/stockpractice | 04568c017a96815e3796c895e74f11fa128d3ffe | 039e144b3a4cc00e400338174b31fa277df55517 | refs/heads/main | 2023-09-05T03:53:02.565539 | 2021-10-30T22:08:16 | 2021-10-30T22:08:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,019 | py | # coding:utf-8
# 1000元实盘练习程序
# 测试判断牛熊的指标
# 根据《阿佩尔均线操盘术》第二章
import pandas as pd
import numpy as np
import akshare as ak
import run
import tools
import efinance as ef
import datetime, quandl
import matplotlib.pyplot as plt
import os
from backtest import BackTest
import strategy as st
# 对策略进行回测
@run.change_dir
def backTest(refresh = False):
month = 15*12
code = "000300" # 沪深300指数
benchmark = tools.getBenchmarkData(month = month, refresh = refresh, path = "./stockdata/")
backtest = BackTest(codes = [code], strategy = st.MA, benchmark = benchmark, month = month, cash = 1000000, refresh = refresh, path = "./stockdata/", bOpt = False)
results = backtest.getResults()
print(results)
backtest.drawResults(code + "result")
# res = backtest.optRun(period = range(5,200))
# print("测试c", res)
if __name__ == "__main__":
tools.init()
backTest(refresh = False)
| [
"zwdnet@163.com"
] | zwdnet@163.com |
1f6538e3cfedc22ca80c5652086f6deb7f4bf652 | 6fce025097cebfd9d1dd37f6611e7fdfdbea90e6 | /data_sync/nwp_prec_map.py | 6c629d1d70cd7fceffb4ccd6c33fc27c2260f5bf | [] | no_license | ANU-WALD/pluvi_pondus | ec0439d19acdcf4fdf712d6b14a1714297d661b2 | ff8680f7115ab2cb75138bf6705abb59618e47d1 | refs/heads/master | 2021-07-01T14:32:14.501631 | 2020-08-22T09:41:28 | 2020-08-22T09:41:28 | 138,804,652 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,533 | py | import xarray as xr
import numpy as np
import sys
import imageio
import os
if len(sys.argv) != 3:
sys.exit(1)
ds = xr.open_dataset(sys.argv[1])
print(ds['tp'].shape)
p = ds['tp'][0,:,:].data * 1000
p = np.clip(p, 0, 150)
p = np.log(1 + p)
norm_p = np.log(1 + p) / 5.01728
im = np.zeros((p.shape[0], p.shape[1], 4), dtype=np.float64)
im[:,:,2] = 1
im[:,:,3] = norm_p
im = (im*255).astype(np.uint8)
fname, _ = os.path.splitext(sys.argv[2])
imageio.imwrite(sys.argv[2], im)
os.system("gdal_translate -of GTiff -a_ullr -180 90 180 -90 -a_srs EPSG:4326 {}.png {}.tif".format(fname, fname))
os.system("gdalwarp -of GTiff -s_srs EPSG:4326 -t_srs EPSG:3857 -te_srs EPSG:4326 -te -180 -85.0511 180 85.0511 {}.tif {}_proj.tif".format(fname, fname))
os.system("gdal_translate -of PNG {}_proj.tif {}.png".format(fname, fname))
os.system("rm *.tif")
print(ds['cp'].shape)
p = ds['cp'][0,:,:].data * 1000
p = np.clip(p, 0, 150)
p = np.log(1 + p)
norm_p = np.log(1 + p) / 5.01728
im = np.zeros((p.shape[0], p.shape[1], 4), dtype=np.float64)
im[:,:,2] = 1
im[:,:,3] = norm_p
im = (im*255).astype(np.uint8)
fname = "CP-" + fname
imageio.imwrite("{}.png".format(fname), im)
os.system("gdal_translate -of GTiff -a_ullr -180 90 180 -90 -a_srs EPSG:4326 {}.png {}.tif".format(fname, fname))
os.system("gdalwarp -of GTiff -s_srs EPSG:4326 -t_srs EPSG:3857 -te_srs EPSG:4326 -te -180 -85.0511 180 85.0511 {}.tif {}_proj.tif".format(fname, fname))
os.system("gdal_translate -of PNG {}_proj.tif {}.png".format(fname, fname))
os.system("rm *.tif")
| [
"pablo.larraondo@anu.edu.au"
] | pablo.larraondo@anu.edu.au |
31c568493d455ebdff42337b27ee809862d80424 | 16516732031deb7f7e074be9fe757897557eee2d | /朝活/朝活/20200420/A - C-Filter.py | 8f6a3a88e641497c59026898a80ef871bdebcde2 | [] | no_license | cale-i/atcoder | 90a04d3228864201cf63c8f8fae62100a19aefa5 | c21232d012191ede866ee4b9b14ba97eaab47ea9 | refs/heads/master | 2021-06-24T13:10:37.006328 | 2021-03-31T11:41:59 | 2021-03-31T11:41:59 | 196,288,266 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | # https://atcoder.jp/contests/digitalarts2012/tasks/digitalarts_1
import re
s=list(input().split())
n=int(input())
t=[input().replace('*','.') for _ in range(n)]
for pat in t:
regex=re.compile(r'^{}$'.format(pat))
for i in range(len(s)):
has_word=regex.search(s[i])
if has_word:
s[i]='*'*len(s[i])
print(*s) | [
"calei078029@gmail.com"
] | calei078029@gmail.com |
2eb4a12253b0787e1d7727e128ff4724255853eb | a4b938b953d25bb529564d0e3f025b5a93a73d8b | /gui/http_api_e2e_test.py | 5bc9859a94d4cf20f236341bd64619f36fb03bcb | [
"DOC",
"Apache-2.0"
] | permissive | greg-gallaway/grr | 7887d3ecca33f9b5544f297d1bb1320672678078 | 919a844c396136bd49c457f18d853dd10b79abed | refs/heads/master | 2021-01-18T17:27:19.111359 | 2015-06-05T09:44:25 | 2015-06-05T09:44:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,378 | py | #!/usr/bin/env python
"""End-to-end tests for HTTP API.
HTTP API plugins are tested with their own dedicated unit-tests that are
protocol- and server-independent. Tests in this file test the full GRR server
stack with regards to the HTTP API.
"""
import json
import requests
import logging
from grr.gui import runtests
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import test_lib
class HTTPApiEndToEndTestProgram(test_lib.GrrTestProgram):
server_port = None
def setUp(self):
self.trd = runtests.DjangoThread()
self.trd.StartAndWaitUntilServing()
class CSRFProtectionTest(test_lib.GRRBaseTest):
"""Tests GRR's CSRF protection logic for the HTTP API."""
def setUp(self):
super(CSRFProtectionTest, self).setUp()
port = (HTTPApiEndToEndTestProgram.server_port or
config_lib.CONFIG["AdminUI.port"])
self.base_url = "http://localhost:%s" % port
def testGETRequestWithoutCSRFTokenSucceeds(self):
response = requests.get(self.base_url + "/api/config")
self.assertEquals(response.status_code, 200)
# Assert XSSI protection is in place.
self.assertEquals(response.text[:5], ")]}'\n")
def testPOSTRequestWithoutCSRFTokenFails(self):
data = {
"client_ids": ["C.0000000000000000"],
"labels": ["foo", "bar"]
}
response = requests.post(self.base_url + "/api/clients/labels/add",
data=json.dumps(data))
self.assertEquals(response.status_code, 403)
self.assertTrue("CSRF" in response.text)
def testPOSTRequestWithCSRFTokenSucceeds(self):
# Fetch csrf token from the cookie set on the main page.
index_response = requests.get(self.base_url)
csrf_token = index_response.cookies.get("csrftoken")
headers = {
"x-csrftoken": csrf_token,
"x-requested-with": "XMLHttpRequest"
}
data = {
"client_ids": ["C.0000000000000000"],
"labels": ["foo", "bar"]
}
cookies = {
"csrftoken": csrf_token
}
response = requests.post(self.base_url + "/api/clients/labels/add",
headers=headers, data=json.dumps(data),
cookies=cookies)
self.assertEquals(response.status_code, 200)
def main(argv):
HTTPApiEndToEndTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| [
"github@mailgreg.com"
] | github@mailgreg.com |
f2d7d79f569684d8a1acd419e1f4cded176a399e | 71894f980d1209017837d7d02bc38ffb5dbcb22f | /audio/DIYAmazonAlexa/DIYAmazonAlexa.py | 9238be8e205d02461cb2611d43648d5199630f4c | [] | no_license | masomel/py-iot-apps | 0f2418f8d9327a068e5db2cdaac487c321476f97 | 6c22ff2f574a37ba40a02625d6ed68d7bc7058a9 | refs/heads/master | 2021-03-22T04:47:59.930338 | 2019-05-16T06:48:32 | 2019-05-16T06:48:32 | 112,631,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,807 | py | #! /usr/bin/env python
import os
import random
import time
import random
from creds import *
import requests
import json
import re
import subprocess
from memcache import Client
# Setup
recorded = False
servers = ["127.0.0.1:11211"]
mc = Client(servers, debug=1)
path = os.path.realpath(__file__).rstrip(os.path.basename(__file__))
def internet_on():
print("Checking Internet Connection")
try:
r = requests.get('https://api.amazon.com/auth/o2/token')
print("Connection OK")
return True
except:
print("Connection Failed")
return False
def gettoken():
token = mc.get("access_token")
refresh = refresh_token
if token:
return token
elif refresh:
payload = {"client_id": Client_ID, "client_secret": Client_Secret,
"refresh_token": refresh, "grant_type": "refresh_token", }
url = "https://api.amazon.com/auth/o2/token"
print("payload=")
print(payload)
r = requests.post(url, data=payload)
print("res=")
print((r.text))
resp = json.loads(r.text)
mc.set("access_token", resp['access_token'], 3570)
return resp['access_token']
else:
return False
def alexa():
url = 'https://access-alexa-na.amazon.com/v1/avs/speechrecognizer/recognize'
headers = {'Authorization': 'Bearer %s' % gettoken()}
d = { # a dict
"messageHeader": {
"deviceContext": [
{
"name": "playbackState",
"namespace": "AudioPlayer",
"payload": {
"streamId": "",
"offsetInMilliseconds": "0",
"playerActivity": "IDLE"
}
}
]
},
"messageBody": {
"profile": "alexa-close-talk",
"locale": "en-us",
"format": "audio/L16; rate=16000; channels=1"
}
}
with open(path + 'recording.wav') as inf:
files = [ # a list
('file', ('request', json.dumps(d), 'application/json; charset=UTF-8')),
('file', ('audio', inf, 'audio/L16; rate=16000; channels=1'))
]
print(type(files))
print(type(d))
r = requests.post(url, headers=headers, files=files)
if r.status_code == 200:
for v in r.headers['content-type'].split(";"):
if re.match('.*boundary.*', v):
boundary = v.split("=")[1]
data = r.content.split(boundary)
for d in data:
if (len(d) >= 1024):
audio = d.split('\r\n\r\n')[1].rstrip('--')
print(type(audio))
with open(path + "response.mp3", 'wb') as f:
f.write(audio)
os.system(
'mpg123 -q {}1sec.mp3 {}response.mp3'.format(path + "/assets/", path))
else:
print("requests returned r.status_code = %r" % r.status_code)
def start():
print("Touch MATRIX Creator IR Sensor")
process = subprocess.Popen(
['./micarray/build/micarray_dump'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
audio, err = process.communicate()
rf = open(path + 'recording.wav', 'w')
rf.write(audio)
rf.close()
alexa()
if __name__ == "__main__":
print("This is a MATRIX Creator demo - not ready for production")
print("Running workaround for GPIO 16 (IR-RX) ")
subprocess.Popen(['sudo', 'rmmod', 'lirc_rpi'])
while internet_on() == False:
print(".")
token = gettoken()
os.system('mpg123 -q {}1sec.mp3 {}hello.mp3'.format(path +
"/assets/", path + "/assets/"))
while True:
subprocess.Popen(['gpio','edge','16','both'])
start()
| [
"msmelara@gmail.com"
] | msmelara@gmail.com |
067bb5ca47e251d38571d4f8c1e9fea477cedd2b | bd8d89a09438328e0e9b76b1ed8bc7517cfd0f79 | /pifify/materials/inconel.py | c510e89e163c0968faac5471f318f885d8af349d | [] | no_license | bkappes/pifify | 3361925b875ce3ce216361d0657251f058e30d82 | 92ed2d27d7bca26c23db4604e155c7565f14413c | refs/heads/master | 2021-01-01T03:35:37.989045 | 2016-11-17T17:28:12 | 2016-11-17T17:28:12 | 58,249,456 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,777 | py | import sys, os
# sys.path.append(os.path.dirname(os.path.realpath(__file__) + \
# os.path.sep + os.path.pardir + \
# os.path.sep + os.path.pardir))
# this is specific the location of pypif, since I haven't
# installed pypif
sys.path.append('/Users/bkappes/src/citrine/pypif')
from pypif import pif
from alloy import AlloyBase
class Inconel718(AlloyBase):
def __init__(self, **kwds):
super(Inconel718, self).__init__(**kwds)
# set names
names = ['Inconel', 'Inconel 718', '718', 'UNS N07718',
'W.Nr. 2.4668', 'AMS 5596', 'ASTM B637']
self.names = names
# set references
url='http://www.specialmetals.com/documents/Inconel%20alloy%20718.pdf'
references = [pif.Reference(url=url)]
self.references = references
# preparation
if 'preparation' in kwds:
self.preparation = kwds['preparation']
else:
self.preparation = []
# set composition
balance = {'low' : 100., 'high' : 100.}
# at some point, allow the user to tweak the composition on an
# element-by-element basis by passing something to the class
# alloy compositions are typically defined in weight/mass percent
# with one element set by "balance".
composition = []
for elem, (low, high) in (('Ni', (50., 55.)),
('Cr', (17., 21.)),
('Nb', (4.75, 5.5)),
('Mo', (2.8, 3.3)),
('Ti', (0.65, 1.15)),
('Al', (0.2, 0.8)),
('Co', (0.0, 1.0)),
('C', (0.0, 0.08)),
('Mn', (0.0, 0.35)),
('Si', (0.0, 0.35)),
('P', (0.0, 0.015)),
('S', (0.0, 0.015)),
('B', (0.0, 0.006)),
('Cu', (0.0, 0.30))):
balance['low'] -= high
balance['high'] -= low
component = pif.Composition(element=elem,
ideal_weight_percent=pif.Scalar(minimum=low,
maximum=high))
composition.append(component)
assert(balance['low'] >= 0.0)
assert(balance['high'] >= 0.0)
component = pif.Composition(element='Fe',
ideal_weight_percent=pif.Scalar(minimum=balance['low'],
maximum=balance['high']))
composition.append(component)
self.composition = composition
#end 'class Inconel718(pif.Alloy):'
| [
"bkappes@mines.edu"
] | bkappes@mines.edu |
ac66cdaca079fc5ed364b91e7b7c335ff66e2240 | 09e5cfe06e437989a2ccf2aeecb9c73eb998a36c | /modules/cctbx_project/gltbx/viewer_utils.py | cf30f350988463aeda5d40e35106362d81ad140a | [
"BSD-3-Clause",
"BSD-3-Clause-LBNL",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jorgediazjr/dials-dev20191018 | b81b19653624cee39207b7cefb8dfcb2e99b79eb | 77d66c719b5746f37af51ad593e2941ed6fbba17 | refs/heads/master | 2020-08-21T02:48:54.719532 | 2020-01-25T01:41:37 | 2020-01-25T01:41:37 | 216,089,955 | 0 | 1 | BSD-3-Clause | 2020-01-25T01:41:39 | 2019-10-18T19:03:17 | Python | UTF-8 | Python | false | false | 1,298 | py | from __future__ import absolute_import, division, print_function
import scitbx.array_family.flex # import dependency
import time
import boost.python
ext = boost.python.import_ext("gltbx_viewer_utils_ext")
from gltbx_viewer_utils_ext import *
def read_pixels_to_str(x, y, width, height):
from gltbx.gl import glPixelStorei, glReadPixels, \
GL_PACK_ALIGNMENT, GL_RGB, GL_UNSIGNED_BYTE
glPixelStorei(GL_PACK_ALIGNMENT, 1)
pixels = []
glReadPixels(
x=0, y=0, width=width, height=height,
format=GL_RGB, type=GL_UNSIGNED_BYTE,
pixels=pixels)
return pixels[0]
def read_pixels_to_pil_image(x, y, width, height):
try:
import PIL.Image
except ImportError:
return None
mode = "RGB"
size = (width, height)
data = read_pixels_to_str(x=x, y=y, width=width, height=height)
decoder_name = "raw"
raw_mode = "RGB"
stride = 0
orientation = -1
return PIL.Image.frombytes(
mode, size, data, decoder_name, raw_mode, stride, orientation)
class fps_monitor(object):
def __init__(self):
self._t_start = time.time()
self._n = 0
def update(self):
self._n += 1
if (self._n % 10 == 0):
t_curr = time.time()
t_elapsed = t_curr - self._t_start
self._t_start = t_curr
print("%.2f fps" % (10 / t_elapsed))
self._n = 0
| [
"jorge7soccer@gmail.com"
] | jorge7soccer@gmail.com |
ff9d75b69e1c2286b7676c81629e2cea48dce8fd | 24dfab72bd987988a0f1d7786ba98281287704f7 | /proposed_algorithms/RF_DT_xgboost_demo.py | a87b41a354680189a11e8b79999b7570916a5dbe | [] | no_license | yougwypf1991/application_classification | 435432aea5b2ad055c67889057047291ef200feb | 667a86b98eb7cc2d8bd87eb1dcdad0efeaca38a7 | refs/heads/master | 2022-11-14T19:13:17.054673 | 2020-07-13T03:15:32 | 2020-07-13T03:15:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,893 | py | import random
from sklearn import metrics
from sklearn.decomposition import PCA
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from numpy_load_and_arff import load_npy_data
from xgboost import XGBClassifier
from sklearn.preprocessing import StandardScaler
random.seed(20)
def main_xgboost(X, Y, session_size=2000, test_percent=0.1):
input_size = 500
reduce_feature_flg = False
if reduce_feature_flg:
print(f'Using PCA to reduce features.')
sc = StandardScaler()
X_train = sc.fit_transform(X)
X = sc.transform(X)
pca_model = PCA(n_components=input_size, random_state=0)
pca_model.fit_transform(X, y)
X = pca_model.transform(X)
explained_variance = pca_model.explained_variance_ratio_
print(f'explained_variance={explained_variance}')
# session_size = input_size # X.shape[1]
print(f'X.shape={X.shape}')
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_percent, random_state=42)
print(f'train_test_ratio:[{1-test_percent}:{test_percent}]')
# listdir = {}
# for i in range(0, len(y_train)):
# if y_train[i] not in listdir:
# listdir.update({y_train[i]: 0})
# else:
# listdir[y_train[i]] = listdir[y_train[i]] + 1
# print(f'X_train:{listdir}')
#
# listdir = {}
# for i in range(0, len(y_test)):
# if y_test[i] not in listdir:
# listdir.update({y_test[i]: 0})
# else:
# listdir[y_test[i]] = listdir[y_test[i]] + 1
# print(f'X_test:{listdir}')
# train&test
result = []
# for i in range(10,300,30):
# value.append(i)
# value = [100]
value = 100
print("n_estimators: ", value)
truncatelist = [10, 100, 300, 500, 2000, 3000, 6000, 7000, 8000, session_size]
# truncatelist = [i * 100 + 500 for i in range(50, 100, 5)]
print(f'{truncatelist}')
# for i in range(50,1500,50):
for i in truncatelist:
print(f'session_size:{i}')
# clf = RandomForestClassifier(n_estimators=value, min_samples_leaf=2)
# clf = RandomForestClassifier()
# clf = DecisionTreeClassifier(criterion="entropy", splitter="best",
# max_depth = 20,
# # min_samples_split=5,
# min_samples_leaf=5,
# # min_weight_fraction_leaf=0.,
# # max_features=None,
# random_state=20)
clf = DecisionTreeClassifier(random_state=20)
# clf = XGBClassifier(n_estimators=150)
X_train_t = X_train[:, :i]
X_test_t = X_test[:, :i]
print("before input....")
print(f'X_train_t.shape:{X_train_t.shape}')
print(y_train.shape)
print(f'X_test_t.shape:{X_test_t.shape}')
print(y_test.shape)
# print((X_train_t[0])[0:10])
clf.fit(X_train_t, y_train)
predtrain = clf.predict(X_train_t)
print(confusion_matrix(y_train, predtrain))
predtest = clf.predict(X_test_t)
print(confusion_matrix(y_test, predtest))
print("train acc:", metrics.accuracy_score(y_train, predtrain))
print("test acc", metrics.accuracy_score(y_test, predtest))
result.append(metrics.accuracy_score(y_test, predtest))
# print(result)
print(f'test acc: {result}')
if __name__ == '__main__':
input_file = '../input_data/trdata-8000B_payload.npy'
# input_file = '../input_data/trdata-8000B_header_payload_20190326.npy'
input_file = '../input_data/trdata_P_8000.npy' # test acc: [0.26696329254727474, 0.42936596218020023, 0.43492769744160176, 0.492769744160178, 0.6651835372636262, 0.6685205784204672]
# input_file = '../input_data/trdata_PH_8000.npy' # test acc: [0.22024471635150167, 0.5183537263626251, 0.5717463848720801, 0.610678531701891, 0.8153503893214683, 0.8209121245828699]
# input_file = '../input_data/trdata_PHT_8000.npy' # test acc: [0.27697441601779754, 0.5116796440489433, 0.6028921023359288, 0.6062291434927698, 0.8075639599555061, 0.8186874304783093]
# input_file = '../input_data/trdata_PT_8000.npy' # test acc: [0.24916573971078976, 0.45161290322580644, 0.5617352614015573, 0.5761957730812013, 0.7552836484983315, 0.7552836484983315]
# input_file ='../input_data/trdata_PT_8000_padding.npy' # test acc: [0.389321468298109, 0.5828698553948832, 0.6262513904338154, 0.6551724137931034, 0.8731924360400445, 0.8921023359288098]
input_file = '../input_data/newapp_10220_pt.npy'
session_size = 10220
X, y = load_npy_data(input_file, session_size)
main_xgboost(X, y, session_size)
| [
"kun.bj@foxmail.com"
] | kun.bj@foxmail.com |
3cbf273f544246dd4945252ce79cf5936764e9eb | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adverbs/_enormously.py | 08cbf1915bafcb261f9de99d048c67794cf5bd02 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py |
#calss header
class _ENORMOUSLY():
def __init__(self,):
self.name = "ENORMOUSLY"
self.definitions = [u'extremely or very much: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adverbs'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
887f2b39b26d4a530f903ceabe283c002de6052c | 05c5349fff1c85c41c92c9894006e2fe2464177b | /lib/api/mineration/mineration_controller.py | ea6b03f5a61d5d0bfeec33029e3474d5a6c76236 | [] | no_license | gabrielmoreira-dev/blockchain-flask | 2a0b637a4a3e1d3f732f06cc59ae6e01422efd76 | df70ed9535e397d192ddaff04be017a15b621253 | refs/heads/main | 2023-04-26T21:18:43.490144 | 2021-05-30T18:04:21 | 2021-05-30T18:04:21 | 361,319,236 | 0 | 0 | null | 2021-05-30T18:04:22 | 2021-04-25T03:11:00 | Python | UTF-8 | Python | false | false | 2,376 | py | from domain.model.block import Block
from domain.use_case.add_transaction_uc import AddTransactionUC, AddTransactionUCParams
from domain.use_case.create_block_uc import CreateBlockUC, CreateBlockUCParams
from domain.use_case.get_hash_uc import GetHashUC, GetHashUCParams
from domain.use_case.get_address_uc import GetAddressUC
from domain.use_case.get_previous_block_uc import GetPreviousBlockUC
from domain.use_case.get_proof_of_work_uc import GetProofOfWorkUC, GetProofOfWorkUCParams
from .mineration_mapper import MinerationMapper
class MinerationController:
def __init__(self, add_transaction_uc: AddTransactionUC,
get_address_uc: GetAddressUC,
get_previous_block_uc: GetPreviousBlockUC,
get_proof_of_work_uc: GetProofOfWorkUC,
get_hash_uc: GetHashUC, create_block_uc: CreateBlockUC):
self.add_transaction_uc = add_transaction_uc
self.get_address_uc = get_address_uc
self.get_previous_block_uc = get_previous_block_uc
self.get_proof_of_work_uc = get_proof_of_work_uc
self.get_hash_uc = get_hash_uc
self.create_block_uc = create_block_uc
def mine_block(self):
previous_block = self._get_previous_block()
proof = self._get_proof_of_work(previous_proof=previous_block.proof)
previous_hash = self._generate_block_hash(previous_block)
self._get_reward()
block = self._create_block(proof, previous_hash)
return MinerationMapper.toDict(block)
def _get_previous_block(self):
return self.get_previous_block_uc.execute()
def _get_proof_of_work(self, previous_proof: str):
params = GetProofOfWorkUCParams(previous_proof)
return self.get_proof_of_work_uc.execute(params)
def _generate_block_hash(self, block: Block):
params = GetHashUCParams(block)
return self.get_hash_uc.execute(params)
def _get_reward(self):
node_address = self.get_address_uc.execute()
params = AddTransactionUCParams(sender='',
receiver=node_address,
amount=1)
self.add_transaction_uc.execute(params)
def _create_block(self, proof: str, previous_hash: str):
params = CreateBlockUCParams(proof, previous_hash)
return self.create_block_uc.execute(params) | [
"="
] | = |
6ce349ee762970cd72732c32964c64ab5a9aa36f | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/Practical_Python_Programming_Practices/Practice 39. How to get Percentage of Uppercase and NUKE.py | ebdaf0316b7c79b9a2f73adaa920b50ccc796029 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 328 | py | s.. input("Insert some strings of Uppercase and Lowercase: ")
len_str l..(s..)
upper lower 0
___ i __ s..:
__ 'a' < i < 'z':
lower + 1
____ 'A' < i < 'Z':
upper + 1
print("Percentage of Uppercase: %.2f %%" % (upper/len_str * 100))
print("Percentage of Lowercase: %.2f %%" % (lower/len_str * 100)) | [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
d6bd4f9b0d5c2cf392d5313ef024de5f1b8ae429 | cfd93c9d0a39c1f1a2778a23977e7b3bd5fd9b84 | /baseline2018a-doc/baseline-doc/configs/astro-lsst-01_2022/deepdrillingcosmology1_prop.py | 43bbee995904d3508477450e9a88d2b4e4213039 | [
"CC-BY-4.0"
] | permissive | lsst-pst/survey_strategy | ec3f7a277b60559c6d7f0fad4a837e22255565ea | 47aa3e00576172bfeec264e2b594c99355b875ea | refs/heads/main | 2023-06-11T17:36:24.000124 | 2023-05-25T23:07:29 | 2023-05-25T23:07:29 | 102,892,062 | 10 | 6 | null | 2022-09-23T21:08:27 | 2017-09-08T18:24:28 | Jupyter Notebook | UTF-8 | Python | false | false | 8,041 | py | import lsst.sims.ocs.configuration.science.deep_drilling_cosmology1
assert type(config)==lsst.sims.ocs.configuration.science.deep_drilling_cosmology1.DeepDrillingCosmology1, 'config is of type %s.%s instead of lsst.sims.ocs.configuration.science.deep_drilling_cosmology1.DeepDrillingCosmology1' % (type(config).__module__, type(config).__name__)
# The maximum airmass allowed for any field.
config.sky_constraints.max_airmass=1.5
# The maximum fraction of clouds allowed for any field.
config.sky_constraints.max_cloud=0.7
# Flag to use 2 degree exclusion zone around bright planets.
config.sky_constraints.exclude_planets=True
# The minimum distance (units=degrees) from the moon a field must be.
config.sky_constraints.min_distance_moon=30.0
# Name for the proposal.
config.name='DeepDrillingCosmology1'
# Sky user regions for the proposal as a list of field Ids.
config.sky_user_regions=[290, 744, 1427, 2412, 2786]
config.sub_sequences={}
config.sub_sequences[0]=lsst.sims.ocs.configuration.proposal.sub_sequence.SubSequence()
# Time (units=seconds) between subsequent visits for a field/filter combination. Must be non-zero if number of grouped visits is greater than one.
config.sub_sequences[0].time_interval=259200.0
# The number of visits required for each filter in the sub-sequence.
config.sub_sequences[0].visits_per_filter=[20, 10, 20, 26, 20]
# Relative time when the window reaches maximum rank for subsequent grouped visits.
config.sub_sequences[0].time_window_max=1.0
# The number of required events for the sub-sequence.
config.sub_sequences[0].num_events=27
# The maximum number of events the sub-sequence is allowed to miss.
config.sub_sequences[0].num_max_missed=0
# Weighting factor for scaling the shape of the time window.
config.sub_sequences[0].time_weight=1.0
# The list of filters required for the sub-sequence.
config.sub_sequences[0].filters=['r', 'g', 'i', 'z', 'y']
# Relative time when the window opens for subsequent grouped visits.
config.sub_sequences[0].time_window_start=0.8
# Relative time when the window ends for subsequent grouped visits.
config.sub_sequences[0].time_window_end=1.4
# The identifier for the sub-sequence.
config.sub_sequences[0].name='main'
config.sub_sequences[1]=lsst.sims.ocs.configuration.proposal.sub_sequence.SubSequence()
# Time (units=seconds) between subsequent visits for a field/filter combination. Must be non-zero if number of grouped visits is greater than one.
config.sub_sequences[1].time_interval=86400.0
# The number of visits required for each filter in the sub-sequence.
config.sub_sequences[1].visits_per_filter=[20]
# Relative time when the window reaches maximum rank for subsequent grouped visits.
config.sub_sequences[1].time_window_max=1.0
# The number of required events for the sub-sequence.
config.sub_sequences[1].num_events=7
# The maximum number of events the sub-sequence is allowed to miss.
config.sub_sequences[1].num_max_missed=0
# Weighting factor for scaling the shape of the time window.
config.sub_sequences[1].time_weight=1.0
# The list of filters required for the sub-sequence.
config.sub_sequences[1].filters=['u']
# Relative time when the window opens for subsequent grouped visits.
config.sub_sequences[1].time_window_start=0.8
# Relative time when the window ends for subsequent grouped visits.
config.sub_sequences[1].time_window_end=1.4
# The identifier for the sub-sequence.
config.sub_sequences[1].name='u-band'
config.filters={}
config.filters['g']=lsst.sims.ocs.configuration.proposal.band_filter.BandFilter()
# Brightest magnitude limit for filter.
config.filters['g'].bright_limit=19.5
# Darkest magnitude limit for filter.
config.filters['g'].dark_limit=30.0
# The maximum seeing limit for filter
config.filters['g'].max_seeing=1.5
# Band name of the filter.
config.filters['g'].name='g'
# The list of exposure times (units=seconds) for the filter
config.filters['g'].exposures=[15.0, 15.0]
config.filters['i']=lsst.sims.ocs.configuration.proposal.band_filter.BandFilter()
# Brightest magnitude limit for filter.
config.filters['i'].bright_limit=19.5
# Darkest magnitude limit for filter.
config.filters['i'].dark_limit=30.0
# The maximum seeing limit for filter
config.filters['i'].max_seeing=1.5
# Band name of the filter.
config.filters['i'].name='i'
# The list of exposure times (units=seconds) for the filter
config.filters['i'].exposures=[15.0, 15.0]
config.filters['r']=lsst.sims.ocs.configuration.proposal.band_filter.BandFilter()
# Brightest magnitude limit for filter.
config.filters['r'].bright_limit=19.5
# Darkest magnitude limit for filter.
config.filters['r'].dark_limit=30.0
# The maximum seeing limit for filter
config.filters['r'].max_seeing=1.5
# Band name of the filter.
config.filters['r'].name='r'
# The list of exposure times (units=seconds) for the filter
config.filters['r'].exposures=[15.0, 15.0]
config.filters['u']=lsst.sims.ocs.configuration.proposal.band_filter.BandFilter()
# Brightest magnitude limit for filter.
config.filters['u'].bright_limit=21.3
# Darkest magnitude limit for filter.
config.filters['u'].dark_limit=30.0
# The maximum seeing limit for filter
config.filters['u'].max_seeing=1.5
# Band name of the filter.
config.filters['u'].name='u'
# The list of exposure times (units=seconds) for the filter
config.filters['u'].exposures=[15.0, 15.0]
config.filters['y']=lsst.sims.ocs.configuration.proposal.band_filter.BandFilter()
# Brightest magnitude limit for filter.
config.filters['y'].bright_limit=17.5
# Darkest magnitude limit for filter.
config.filters['y'].dark_limit=30.0
# The maximum seeing limit for filter
config.filters['y'].max_seeing=1.5
# Band name of the filter.
config.filters['y'].name='y'
# The list of exposure times (units=seconds) for the filter
config.filters['y'].exposures=[15.0, 15.0]
config.filters['z']=lsst.sims.ocs.configuration.proposal.band_filter.BandFilter()
# Brightest magnitude limit for filter.
config.filters['z'].bright_limit=17.5
# Darkest magnitude limit for filter.
config.filters['z'].dark_limit=30.0
# The maximum seeing limit for filter
config.filters['z'].max_seeing=1.5
# Band name of the filter.
config.filters['z'].name='z'
# The list of exposure times (units=seconds) for the filter
config.filters['z'].exposures=[15.0, 15.0]
# Flag to restart sequences that were lost due to observational constraints.
config.scheduling.restart_lost_sequences=True
# Bonus to apply to fields giving precedence to low arimass ones. Bonus runs from 0 to 1.
config.scheduling.airmass_bonus=0.0
# Bonus to apply to fields giving precedence to fields near the meridian. Bonus runs from 0 to 1.
config.scheduling.hour_angle_bonus=0.3
# The maximum number of visits requested for the proposal over the lifetime of the survey. This effects the time-balancing for the proposal, but does not prevent more visits from being taken.
config.scheduling.max_visits_goal=250000
# Flag to determine if consecutive visits are accepted.
config.scheduling.accept_consecutive_visits=True
# Flag to restart sequences that were already completed.
config.scheduling.restart_complete_sequences=True
# Maximum hour angle (units=hours) for the bonus factor calculation. Hour angles larger will cause the bonus to be negative. Range is 0.1 to 12.
config.scheduling.hour_angle_max=6.0
# The maximum number of targets the proposal will propose.
config.scheduling.max_num_targets=100
# Flag to determine if observations other than proposal's top target are accepted.
config.scheduling.accept_serendipity=False
# The sun altitude (units=degrees) for twilight consideration.
config.sky_nightly_bounds.twilight_boundary=-12.0
# LST extent (units=degrees) before sunset LST (-) and after sunrise LST (+) for providing a region of the sky to select.
config.sky_nightly_bounds.delta_lst=60.0
config.master_sub_sequences={}
# Angle (units=degrees) around the observing site's latitude for which to create a Declination window for field selection.
config.sky_exclusion.dec_window=90.0
config.sky_exclusion.selections={}
| [
"lynnej@uw.edu"
] | lynnej@uw.edu |
4161acb2fdd7cfcdf83223d02609b5bd32490bb8 | 42eaacac77b57d7bd1379afe249e2d3286596fe4 | /problems/1/problem173.py | bbdea0dd73ae7f3abe9553900c94ffebec17e20c | [] | no_license | JustinKnueppel/ProjectEuler | 08256bda59a4ad6c40d33bada17c59e3338c3525 | 21a805a061383bc75a2ec6eb7473975e377e701a | refs/heads/master | 2021-09-27T07:46:03.073046 | 2018-11-07T01:20:37 | 2018-11-07T01:20:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 542 | py | """Solution to problem 173 on project euler"""
# https://projecteuler.net/problem=173
# We shall define a square lamina to be a square outline with a square "hole" so that the shape possesses vertical and horizontal symmetry. For example, using exactly thirty-two square tiles we can form two different square laminae:
# With one-hundred tiles, and not necessarily using all of the tiles at one time, it is possible to form forty-one different square laminae.
# Using up to one million tiles how many different square laminae can be formed?
| [
"justinknueppel@gmail.com"
] | justinknueppel@gmail.com |
8048ac30dfdd0589ef3441d32f3d6debb2b76c92 | 039f2c747a9524daa1e45501ada5fb19bd5dd28f | /ARC041/ARC041d.py | 0d72600f2ec256a877e7d9897a34350f18b80ed0 | [
"Unlicense"
] | permissive | yuto-moriizumi/AtCoder | 86dbb4f98fea627c68b5391bf0cc25bcce556b88 | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | refs/heads/master | 2023-03-25T08:10:31.738457 | 2021-03-23T08:48:01 | 2021-03-23T08:48:01 | 242,283,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | #ARC041d
def main():
import sys
input=sys.stdin.readline
sys.setrecursionlimit(10**6)
if __name__ == '__main__':
main() | [
"kurvan1112@gmail.com"
] | kurvan1112@gmail.com |
72a4439b0560de9d63f09cece712d914e09dd71d | 1046db6bc56b41d01b5ccb885f3686918c657ecc | /matrix/argparser.py | bb228ab7d929d67c5622d509a5757d29198ba904 | [] | no_license | astsu-dev/matrix | 016d4044da640337ec3fde1611befcbbbb76f749 | 1835804fba08ad2d24cb430c6d3234d736001074 | refs/heads/master | 2023-04-26T08:49:21.993850 | 2021-05-30T10:23:50 | 2021-05-30T10:23:50 | 307,655,496 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | import argparse
from .consts import AVAILABLE_COLORS
from .defaults import DEFAULT_CHARS, DEFAULT_COLOR, DEFAULT_SPEED
def setup_argparser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("--color", "-c", default=DEFAULT_COLOR,
choices=AVAILABLE_COLORS, help="matrix characters color")
parser.add_argument("--speed", "-s", type=int,
default=DEFAULT_SPEED, help="lines per second")
parser.add_argument("--chars", "-ch", type=list, default=DEFAULT_CHARS,
help="matrix will consist of these characters")
| [
"None"
] | None |
64ed1cf2b4a71b236997456f92b6ad8258b2fd68 | 9fb52109b2fb6e6e2ebc49d646e5436406bc60c2 | /tests/pools/test_add_liquidity_initial.py | 5c4e5361bf7bc6979040788c2e6c73affcf2f90f | [] | no_license | lurium/curve-factory | d5083c116b006f3a68f6500081d3494d3a96d317 | d6f0ef79f0fbb215033330cd1b61e78eee5cb0a1 | refs/heads/master | 2023-03-13T01:35:57.683184 | 2021-03-01T16:52:03 | 2021-03-01T16:52:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 995 | py | import brownie
import pytest
pytestmark = pytest.mark.usefixtures("mint_alice", "approve_alice")
@pytest.mark.parametrize("min_amount", [0, 2 * 10**18])
def test_initial(
alice, swap, wrapped_coins, min_amount, wrapped_decimals, initial_amounts, base_pool
):
amounts = [10**i for i in wrapped_decimals]
swap.add_liquidity(amounts, min_amount, {'from': alice})
for coin, amount, initial in zip(wrapped_coins, amounts, initial_amounts):
assert coin.balanceOf(alice) == initial - amount
assert coin.balanceOf(swap) == amount
ideal = 10**18 + base_pool.get_virtual_price()
assert 0.9999 < swap.balanceOf(alice) / ideal < 1
assert swap.balanceOf(alice) == swap.totalSupply()
@pytest.mark.parametrize("idx", range(2))
def test_initial_liquidity_missing_coin(alice, swap, idx, wrapped_decimals):
amounts = [10**i for i in wrapped_decimals]
amounts[idx] = 0
with brownie.reverts():
swap.add_liquidity(amounts, 0, {'from': alice})
| [
"ben@hauser.id"
] | ben@hauser.id |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.