blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c99be8ab1fbd55dd291d94871960eb9885eac72f | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/communication/azure-communication-email/samples/send_email_to_multiple_recipients_sample.py | 4009aaf0f805e47a8f8b3d1e2f8df2a6da58972b | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 3,050 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: send_email_to_multiple_recipient_sample.py
DESCRIPTION:
This sample demonstrates sending an email to multiple recipients. The Email client is
authenticated using a connection string.
USAGE:
python send_email_to_single_recipient_sample.py
Set the environment variable with your own value before running the sample:
1) COMMUNICATION_CONNECTION_STRING - the connection string in your ACS resource
2) SENDER_ADDRESS - the address found in the linked domain that will send the email
3) RECIPIENT_ADDRESS - the address that will receive the email
4) SECOND_RECIPIENT_ADDRESS - the second address that will receive the email
"""
import os
import sys
from azure.core.exceptions import HttpResponseError
from azure.communication.email import EmailClient
sys.path.append("..")
class EmailMultipleRecipientSample(object):
connection_string = os.getenv("COMMUNICATION_CONNECTION_STRING_EMAIL")
sender_address = os.getenv("SENDER_ADDRESS")
recipient_address = os.getenv("RECIPIENT_ADDRESS")
second_recipient_address = os.getenv("SECOND_RECIPIENT_ADDRESS")
def send_email_to_multiple_recipients(self):
# creating the email client
email_client = EmailClient.from_connection_string(self.connection_string)
# creating the email message
message = {
"content": {
"subject": "This is the subject",
"plainText": "This is the body",
"html": "html><h1>This is the body</h1></html>"
},
"recipients": {
"to": [
{"address": self.recipient_address, "displayName": "Customer Name"},
{"address": self.second_recipient_address, "displayName": "Customer Name 2"}
],
"cc": [
{"address": self.recipient_address, "displayName": "Customer Name"},
{"address": self.second_recipient_address, "displayName": "Customer Name 2"}
],
"bcc": [
{"address": self.recipient_address, "displayName": "Customer Name"},
{"address": self.second_recipient_address, "displayName": "Customer Name 2"}
]
},
"senderAddress": self.sender_address
}
try:
# sending the email message
poller = email_client.begin_send(message)
response = poller.result()
print("Operation ID: " + response['id'])
except HttpResponseError as ex:
print(ex)
pass
if __name__ == '__main__':
sample = EmailMultipleRecipientSample()
sample.send_email_to_multiple_recipients()
| [
"noreply@github.com"
] | Azure.noreply@github.com |
77ba68c5ea388336d8038d5e9e64d2d07a52abb5 | 5342c87436c514f6428524d8f9fca33f6745a791 | /ros2cli/ros2param/ros2param/verb/get.py | 1a7c3f1943742ebee688e9b2ed5beb9385c2aec3 | [
"Apache-2.0"
] | permissive | hfz-Nick/ROS | 9d64cb220539a29c65fb6ae8ae0f5e42c5ad955b | 1c8909c9709a0cbaed7f3084557ee4c3fb1ff380 | refs/heads/main | 2022-12-28T05:01:44.255695 | 2020-10-10T01:24:43 | 2020-10-10T01:24:43 | 302,788,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,227 | py | # Copyright 2018 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rcl_interfaces.msg import ParameterType
from ros2cli.node.direct import DirectNode
from ros2cli.node.strategy import add_arguments
from ros2cli.node.strategy import NodeStrategy
from ros2node.api import get_node_names
from ros2node.api import NodeNameCompleter
from ros2param.api import call_get_parameters
from ros2param.verb import VerbExtension
class GetVerb(VerbExtension):
"""Get parameter."""
def add_arguments(self, parser, cli_name): # noqa: D102
add_arguments(parser)
arg = parser.add_argument(
'node_name', help='Name of the ROS node')
arg.completer = NodeNameCompleter(
include_hidden_nodes_key='include_hidden_nodes')
parser.add_argument(
'--include-hidden-nodes', action='store_true',
help='Consider hidden nodes as well')
parser.add_argument(
'name', help='Name of the parameter')
parser.add_argument(
'--hide-type', action='store_true',
help='Hide the type information')
def main(self, *, args): # noqa: D102
with NodeStrategy(args) as node:
node_names = get_node_names(
node=node, include_hidden_nodes=args.include_hidden_nodes)
if args.node_name not in node_names:
return 'Node not found'
with DirectNode(args) as node:
response = call_get_parameters(
node=node, node_name=args.node_name,
parameter_names=[args.name])
assert len(response.values) <= 1
# requested parameter not set
if not response.values:
return 'Parameter not set'
# extract type specific value
pvalue = response.values[0]
if pvalue.type == ParameterType.PARAMETER_BOOL:
label = 'Boolean value is:'
value = pvalue.bool_value
elif pvalue.type == ParameterType.PARAMETER_INTEGER:
label = 'Integer value is:'
value = pvalue.integer_value
elif pvalue.type == ParameterType.PARAMETER_DOUBLE:
label = 'Double value is:'
value = pvalue.double_value
elif pvalue.type == ParameterType.PARAMETER_STRING:
label = 'String value is:'
value = pvalue.string_value
elif pvalue.type == ParameterType.PARAMETER_BYTE_ARRAY:
label = 'Byte values are:'
value = pvalue.byte_array_value
elif pvalue.type == ParameterType.PARAMETER_BOOL_ARRAY:
label = 'Boolean values are:'
value = pvalue.bool_array_value
elif pvalue.type == ParameterType.PARAMETER_INTEGER_ARRAY:
label = 'Integer values are:'
value = pvalue.integer_array_value
elif pvalue.type == ParameterType.PARAMETER_DOUBLE_ARRAY:
label = 'Double values are:'
value = pvalue.double_array_value
elif pvalue.type == ParameterType.PARAMETER_STRING_ARRAY:
label = 'String values are:'
value = pvalue.string_array_value
elif pvalue.type == ParameterType.PARAMETER_NOT_SET:
label = 'Parameter not set.'
value = None
else:
return "Unknown parameter type '{pvalue.type}'" \
.format_map(locals())
# output response
if not args.hide_type:
print(label, value) if value is not None else print(label)
else:
print(value)
| [
"you@example.com"
] | you@example.com |
681cb012173559ca0073167310544329505a424b | a7266a2c39e309bdc0fdd4c771942412465d0fb5 | /McCoy Group Code Academy/Exercises/LegendreDVR.py | 308ba21907ec13ef8c94b648285571a964cb0c65 | [] | no_license | McCoyGroup/References | 68a930280e865d3efd4d7d29d7a961126258494d | 7bcf80bebfed92f7967135cc909e7280b2365680 | refs/heads/gh-pages | 2023-04-28T11:24:37.881869 | 2022-08-18T15:38:31 | 2022-08-18T15:38:31 | 127,345,918 | 1 | 6 | null | 2023-04-12T05:21:47 | 2018-03-29T20:49:01 | Mathematica | UTF-8 | Python | false | false | 534 | py | # THIS IS A PLACEHOLDER: AS WE ADD THINGS WE'LL FILL THIS OUT
"""
Goal: ...
Fundamentals: ... (Comma-separated list)
Related Exercises: ... (Comma-separated list)
"""
## Imports: put all import statments here
## Exports: put all the names things we might want to use in other scripts here
__all__ = [
]
## Objects: put all the classes we're defining here
...
## Functions: put all the functions we're defining here
...
## Run Script: put the script we'd want to run from the command line here
if __name__ == '__main__':
... | [
"b3m2a1@gmail.com"
] | b3m2a1@gmail.com |
ddab472e1041d209d6ee9169203e5b8e135d0abe | bd3a1843e2b0dc15837628c77f73e95a9bb1264f | /PyFunceble/helpers/file.py | 9f4d1b0cfbd3975feb10c42d58ddc346c3e4026d | [
"Apache-2.0"
] | permissive | funilrys/PyFunceble | 404c64d1b281d4ae06a939b54f4088d63e12b828 | 214a57d0eca3df7c4ed3421937aaff9998452ba6 | refs/heads/dev | 2023-06-24T18:39:29.372775 | 2023-06-18T13:15:39 | 2023-06-18T13:15:39 | 106,995,518 | 267 | 62 | Apache-2.0 | 2023-08-25T16:05:17 | 2017-10-15T08:25:14 | Python | UTF-8 | Python | false | false | 6,582 | py | """
The tool to check the availability or syntax of domain, IP or URL.
::
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
Provides the file helpers.
Author:
Nissar Chababy, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/#/special-thanks
Contributors:
https://pyfunceble.github.io/#/contributors
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/dev/
Project homepage:
https://pyfunceble.github.io/
License:
::
Copyright 2017, 2018, 2019, 2020, 2022, 2023 Nissar Chababy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import shutil
from typing import Any, Optional
from PyFunceble.helpers.directory import DirectoryHelper
class FileHelper:
"""
Simplify the file manipulations.
:param str path: The file path to work with.
"""
_path: Optional[str] = None
def __init__(self, path: Optional[str] = None):
if path:
self.path = path
@property
def path(self) -> Optional[str]:
"""
Provides the current state of the :code:`_path` attribute.
"""
return self._path
@path.setter
def path(self, value: str) -> None:
"""
Sets the path to work with.
:param value:
The path to work with.
:raise TypeError:
When :code:`value` is a :py:class:`value`.
"""
if not isinstance(value, str):
raise TypeError(f"<value> should be {str}, {type(value)} is given.")
self._path = value
def set_path(self, value: str) -> "FileHelper":
"""
Sets the path to work with.
:param value:
The path to work with.
"""
self.path = value
return self
def join_path(self, *args) -> str:
"""
Joins the given arguments with the given path.
"""
return os.path.join(self.path, *args)
def exists(self) -> bool:
"""
Checks if the given file path exists.
"""
return os.path.isfile(self.path)
def get_size(self) -> int:
"""
Provides the size (in bytes) of the
given file path.
"""
return os.stat(self.path).st_size
def is_empty(self) -> bool:
"""
Checks if the given file path is empty.
"""
return self.get_size() <= 0
def delete(self) -> "FileHelper":
"""
Deletes the given file path if it exists.
"""
if self.exists():
os.remove(self.path)
return self
def write(
self, data: Any, *, overwrite: bool = False, encoding: str = "utf-8"
) -> "FileHelper":
"""
Write the given data into the given file path.
:param data: The data to write.
:param encoding: The encoding to use while opening the file.
"""
if overwrite or not self.exists():
DirectoryHelper(os.path.dirname(self.path)).create()
with self.open("w", encoding=encoding) as file_stream:
file_stream.write(data)
else:
with self.open("a", encoding=encoding) as file_stream:
file_stream.write(data)
return self
def read(self, *, encoding: str = "utf-8") -> Optional[str]:
"""
Read the given file path and return it's content.
:param str encoding: The encoding to use.
"""
data = None
if self.exists():
with self.open("r", encoding=encoding) as file_stream:
data = file_stream.read()
return data
def read_bytes(self) -> Optional[bytes]:
"""
Read the given file ath and returns it's bytes contetn.
"""
data = None
if self.exists():
with self.open("rb") as file_stream:
data = file_stream.read()
return data
def open(self, *args, **kwargs) -> "open":
"""
A wrapper for the built-in :py:class:`open` function.
"""
return open(self.path, *args, **kwargs) # pylint: disable=unspecified-encoding
def copy(self, destination: str) -> "FileHelper":
"""
Copy the globaly given file path to the given destination.
:param str destination: The destination of the copy.
"""
if self.exists():
shutil.copy(self.path, destination)
return self
def move(self, destination) -> "FileHelper":
"""
Move the globally given file path to the given destination.
:param str destination: The destination of the file.
"""
if self.exists():
shutil.move(self.path, destination)
return self
| [
"contact@funilrys.com"
] | contact@funilrys.com |
82ce73e6415e6d017e3700546ee09e7625280e80 | 3c114c083af073421fc0becfa4b4471ba1d77de5 | /google/sparse_matrix.py | 0dd73b7847553e67b60991e24b0e2ed383dac254 | [] | no_license | alonsovidales/interview_questions | 99f757c7e35c5ede450be25d3bebd54a18b1312b | 5e63e238950c2f6bdfd3ff48311d6c69a676d382 | refs/heads/master | 2021-01-17T12:06:48.419891 | 2018-03-25T08:44:14 | 2018-03-25T08:44:14 | 30,909,319 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,795 | py | """
Given a sparse matrix, implement below two methods:
void set(int row, int col, int val) /*Update value at given row and col*/
int sum(int row, int col) /*give sum from top left corner to given row, col sub-matrix*/
"""
class SortedArray(object):
def __init__(self):
self._dirty = False
self._arr = []
self._elems = {}
def set(self, val, score):
self._elems[score] = val
self._arr.append((val, score))
self._dirty = True
def get_by_score(self, score):
return self._elems.get(score)
def get_to_score(self, score):
if self._dirty:
self._arr = sorted(self._arr, key=lambda x: x[1])
self._dirty = False
result = []
for i in xrange(len(self._arr)):
if self._arr[i][1] > score:
return result
result.append(self._arr[i][0])
return result
class SparseMatrix(object):
def __init__(self):
self._rows = SortedArray()
def set(self, row, col, v):
cols = self._rows.get_by_score(row)
if cols is None:
cols = SortedArray()
self._rows.set(cols, row)
cols.set(v, col)
def sum(self, row, col):
total = 0
for cols in self._rows.get_to_score(row):
for value in cols.get_to_score(col):
total += value
return total
import unittest
class TestSparseMatrix(unittest.TestCase):
def test_set_sum(self):
sm = SparseMatrix()
sm.set(1, 2, 1)
sm.set(3, 2, 2)
sm.set(9, 1, 3)
sm.set(3, 8, 4)
self.assertEqual(sm.sum(1, 2), 1)
self.assertEqual(sm.sum(9, 9), 10)
self.assertEqual(sm.sum(3, 2), 3)
if __name__ == '__main__':
unittest.main()
| [
"alonso.vidales@tras2.es"
] | alonso.vidales@tras2.es |
9a13592fb7c388eae0315d097c09293ad3beca18 | 238c16de19b2b5928eeba6ca35abffdbfaa961e4 | /tests/conftest.py | d730fbb8a3dfcce7244fa8d564c373d6d0aec9b9 | [] | no_license | multiscripter/random-phrase-fastapi | dd7637d25ea7326659d8dfb7925697ab37d14c8f | 5cba715f898309530fa393a4cf434d45725ba6ed | refs/heads/master | 2022-12-28T23:35:08.435193 | 2020-09-28T21:00:18 | 2020-09-28T21:00:18 | 299,430,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | import pytest
from Database import Database
from db import PhraseInput
@pytest.hookimpl()
def pytest_sessionstart(session):
"""Actions before all tests."""
db = Database()
for a in range(1, 4):
data = {
'author': f'test-author-{a}',
'text': f'test-text-{a}'
}
phrase = PhraseInput(**data)
db.add(phrase)
print('created:')
print(list(db.items.keys()))
@pytest.hookimpl()
def pytest_sessionfinish(session, exitstatus):
"""Actions after all tests."""
db = Database()
for key in db.items.scan_iter(f'phrase*'):
db.items.delete(key)
print('deletion completed')
print(list(db.items.keys()))
| [
"ILL-JAH@yandex.ru"
] | ILL-JAH@yandex.ru |
20b01ef20b8f2d6b57de84d0d28e2bc0e71557c9 | e707164df1aa8edb5d276179538bd1eb1805f759 | /CODE/fedora_application/env/lib/python2.7/site-packages/rube/core/__init__.py | fcfa0e415c10c22ed928fa448f8bd7ebcccd9801 | [] | no_license | beckastar/cleaner_markov | af5816c14c94a8cb7924728179470e7db9ed2bc0 | a6de3fd87db77c0d80789cbce0ff409c222b4e67 | refs/heads/master | 2021-01-02T22:52:08.989862 | 2013-11-10T04:51:04 | 2013-11-10T04:51:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,358 | py | # -*- coding: utf-8 -*-
# This file is part of Rube.
#
# Rube is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rube is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rube. If not, see <http://www.gnu.org/licenses/>.
import logging
import unittest
import selenium.webdriver.support.ui as ui
from selenium.webdriver.support.expected_conditions import title_is
from pyvirtualdisplay import Display
from selenium import webdriver
from testconfig import config
from utils import (
prompt_for_auth,
expects_zmqmsg,
tolerant,
skip_logout,
collect_har,
ensures_after,
)
selenium_logger = logging.getLogger("selenium.webdriver")
selenium_logger.setLevel(logging.INFO)
display = None
driver = None
proxy = None
def get_driver_and_proxy():
global display
global driver
global proxy
if not driver:
if int(config.get('browsermob', {}).get('collect-har', 0)):
from browsermobproxy import Server
server = Server(config['browsermob']['path'])
server.start()
proxy = server.create_proxy()
if int(config.get('xconfig', {}).get('headless', 0)):
display = Display(visible=0, size=(800, 600))
display.start()
profile = webdriver.FirefoxProfile()
if proxy:
profile.set_proxy(proxy.selenium_proxy())
driver = webdriver.Firefox(firefox_profile=profile)
driver.implicitly_wait(60)
return driver, proxy
def tearDown():
global display
global driver
global proxy
if driver:
driver.close()
if display:
display.stop()
if proxy:
proxy.close()
class RubeTest(unittest.TestCase):
base = None
title = None
logout_url = None
timeout = 20000
# If you subclass and set this to True, then we won't prompt you for auth.
no_auth = False
# Change this in your subclass to use a different realm in the keyring.
realm = None
# Internally used to skip logout and whatnot during teardown
_no_teardown = []
def setUp(self):
self.driver, self.proxy = get_driver_and_proxy()
self.driver.delete_all_cookies()
# not no_auth ~= yes auth
if not self.no_auth and self.realm:
self.auth = prompt_for_auth(self.realm)
def tearDown(self):
if self._testMethodName in self._no_teardown:
return # skip the teardown
if not self.no_auth and self.logout_url:
self.driver.get(self.logout_url)
def wait_for(self, target):
wait = ui.WebDriverWait(self.driver, self.timeout)
wait.until(lambda d: target in d.page_source)
@collect_har()
@tolerant()
def test_title(self):
self.driver.get(self.base)
assert title_is(self.title), self.driver.title
__all__ = [
'RubeTest',
'expects_zmqmsg',
'tolerant',
'get_driver',
'skip_logout'
]
| [
"rebecca.robbins.et@gmail.com"
] | rebecca.robbins.et@gmail.com |
6a39bf88ed26fd334ca58f0d9fa50928ff9d0a6a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_1/208.py | 3cff053a0d8355a115d24f870f7da1280b84ee66 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | #!/usr/bin/python
import sys
def handle_case(case_no, engiens, words):
saw ={}
answer = 0
for w in words:
if w not in saw.keys():
if len(saw.keys()) == (len(engiens)-1):
answer+=1
saw = {}
saw[w] = 1
print "Case #%d: %s" % (case_no, answer)
def main():
filename = sys.argv[1]
fsock = open(filename, "r")
size = int(fsock.readline())
for case in range(1,size+1):
engiens_no = int(fsock.readline())
engiens= []
for e in range(1,engiens_no+1):
engiens.append(fsock.readline().rstrip("\n"))
words_no = int(fsock.readline())
words = []
for w in range(1,words_no+1):
words.append(fsock.readline().rstrip("\n"))
handle_case(case, engiens, words)
fsock.close()
if __name__ == "__main__":
main()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
aec1b46ee90e6ad349ec1ef3880aafad45dd2339 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_17926.py | 3f65a0747b3ebe817fa6bb84dae8bed99654f41d | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | # Django-Models: TypeError: coercing to Unicode: need string or buffer, User found
def __unicode__(self):
return '%s' % (self.user)
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
9558aff2a7a109b33beb176497844e5998bf15cd | d88397be1c6a31985bc2283280e743fd3b988dd1 | /beta/examples/tensorflow/common/optimizer.py | ebf3e1b4079c43f388b1f9256596c3a7c9103cec | [
"Apache-2.0"
] | permissive | sshyran/openvino-nncf-pytorch | f5e09066a216fa786927937a91a0e6742f347660 | fd02652950cd803a36f5283f5a5df999bb45433b | refs/heads/develop | 2023-04-18T06:58:54.646669 | 2021-03-12T15:41:39 | 2021-03-12T15:41:39 | 347,374,166 | 0 | 0 | Apache-2.0 | 2023-04-03T23:52:21 | 2021-03-13T13:11:32 | null | UTF-8 | Python | false | false | 3,837 | py | """
Copyright (c) 2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tensorflow as tf
import tensorflow_addons as tfa
from beta.examples.tensorflow.common.logger import logger
def build_optimizer(config, scheduler):
optimizer_config = config.get('optimizer', {})
optimizer_type = optimizer_config.get('type', 'adam').lower()
optimizer_params = optimizer_config.get("optimizer_params", {})
logger.info('Building %s optimizer with params %s', optimizer_type, optimizer_params)
if optimizer_type == 'sgd':
logger.info('Using SGD optimizer')
nesterov = optimizer_params.get('nesterov', False)
optimizer = tf.keras.optimizers.SGD(learning_rate=scheduler,
nesterov=nesterov)
elif optimizer_type == 'momentum':
logger.info('Using momentum optimizer')
nesterov = optimizer_params.get('nesterov', False)
momentum = optimizer_params.get('momentum', 0.9)
optimizer = tf.keras.optimizers.SGD(learning_rate=scheduler,
momentum=momentum,
nesterov=nesterov)
elif optimizer_type == 'rmsprop':
logger.info('Using RMSProp')
rho = optimizer_params.get('rho', 0.9)
momentum = optimizer_params.get('momentum', 0.9)
epsilon = optimizer_params.get('epsilon', 1e-07)
optimizer = tf.keras.optimizers.RMSprop(learning_rate=scheduler,
rho=rho,
momentum=momentum,
epsilon=epsilon)
elif optimizer_type == 'adam':
logger.info('Using Adam')
beta_1 = optimizer_params.get('beta_1', 0.9)
beta_2 = optimizer_params.get('beta_2', 0.999)
epsilon = optimizer_params.get('epsilon', 1e-07)
optimizer = tf.keras.optimizers.Adam(learning_rate=scheduler,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon)
elif optimizer_type == 'adamw':
logger.info('Using AdamW')
weight_decay = optimizer_params.get('weight_decay', 0.01)
beta_1 = optimizer_params.get('beta_1', 0.9)
beta_2 = optimizer_params.get('beta_2', 0.999)
epsilon = optimizer_params.get('epsilon', 1e-07)
optimizer = tfa.optimizers.AdamW(weight_decay=weight_decay,
learning_rate=scheduler,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon)
else:
raise ValueError('Unknown optimizer %s' % optimizer_type)
moving_average_decay = optimizer_params.get('moving_average_decay', 0.)
if moving_average_decay > 0.:
logger.info('Including moving average decay.')
optimizer = tfa.optimizers.MovingAverage(
optimizer,
average_decay=moving_average_decay,
num_updates=None)
if optimizer_params.get('lookahead', None):
logger.info('Using lookahead optimizer.')
optimizer = tfa.optimizers.Lookahead(optimizer)
return optimizer
| [
"noreply@github.com"
] | sshyran.noreply@github.com |
234c2b61255e3d404e666a25ecc316deb34ed85f | 148072ce210ca4754ea4a37d83057e2cf2fdc5a1 | /src/core/w3af/w3af/core/data/kb/vuln_templates/tests/test_base_template.py | 8108d8d84b9422931d1e1c5e55f63f073e4fecaa | [] | no_license | ycc1746582381/webfuzzer | 8d42fceb55c8682d6c18416b8e7b23f5e430c45f | 0d9aa35c3218dc58f81c429cae0196e4c8b7d51b | refs/heads/master | 2021-06-14T18:46:59.470232 | 2017-03-14T08:49:27 | 2017-03-14T08:49:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,586 | py | """
test_base_template.py
Copyright 2012 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import unittest
from mock import Mock
from w3af.core.data.kb.vuln_templates.base_template import BaseTemplate
class BaseTemplateTest(unittest.TestCase):
def test_basic(self):
bt = BaseTemplate()
options_list = bt.get_options()
name = options_list['name']
url = options_list['url']
data = options_list['data']
method = options_list['method']
vulnerable_parameter = options_list['vulnerable_parameter']
name.set_value('SQL injection')
url.set_value('http://host.tld/foo.php')
data.set_value('id=3')
method.set_value('GET')
vulnerable_parameter.set_value('id')
bt.get_vulnerability_name = Mock(return_value='unittest')
bt.set_options(options_list)
one = bt.get_vuln_id()
two = bt.get_vuln_id()
self.assertEqual(one + 1, two)
| [
"everping@outlook.com"
] | everping@outlook.com |
7eff3f83f034b7bb0330f386c24b489e1bcf3e28 | 45ddd3d0d568b3d28c25c2023839933496753d52 | /Bai34-multitaskLearning/predict.py | d2f931a016260ca73f857a1c73eacd52fe4fb4e6 | [] | no_license | phamdinhkhanh/khanhBlogTurtorial | 7d65259066b1eb6f48a2c7ef78840005d61c9fdb | 0685ef989f72057581b0268bd6c9e01981833549 | refs/heads/master | 2022-12-13T08:30:18.866016 | 2020-05-05T01:26:44 | 2020-05-05T01:26:44 | 248,902,988 | 21 | 20 | null | 2022-12-12T11:57:04 | 2020-03-21T03:57:22 | Jupyter Notebook | UTF-8 | Python | false | false | 2,173 | py | # USAGE
# python predict.py --model fashion_multitask_learning.h5 --labelbin mlb.pkl --image examples/example_01.jpg
# import the necessary packages
from tensorflow.keras.preprocessing.image import img_to_array
from tensorflow.keras.models import load_model
import numpy as np
import argparse
import pickle
import cv2
import os
import requests
import matplotlib.pyplot as plt
IMAGE_DIMS = (96, 96, 2)
# Khởi tạo ArgumentParser
ap = argparse.ArgumentParser()
ap.add_argument("-m", "--model", required=True,
help="path to trained model model")
ap.add_argument("-l", "--labelbin", required=True,
help="path to label binarizer")
ap.add_argument("-i", "--image", required=True,
help="url link to input image")
args = vars(ap.parse_args())
# Load model và multilabel
print("[INFO] loading network...")
model = load_model(args["model"])
mlb = pickle.loads(open(args["labelbin"], "rb").read())
# read image
def _downloadImage(url):
img = cv2.imread(url)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img
# dự báo image
def _predict_image(image, model, mlb):
# Lấy kích thước 3 kênh của image
(w, h, c) = image.shape
# Nếu resize width = 400 thì height resize sẽ là
height_rz = int(h*400/w)
# Resize lại ảnh để hiện thị
output = cv2.resize(image, (height_rz, 400))
# Resize lại ảnh để dự báo
image = cv2.resize(image, IMAGE_DIMS[:2])/255.0
# Dự báo xác suất của ảnh
prob = model.predict(np.expand_dims(image, axis=0))[0]
# Trích ra 2 xác suất cao nhất
argmax = np.argsort(prob)[::-1][:2]
# Show classes và probability ra ảnh hiển thị
for (i, j) in enumerate(argmax):
# popup nhãn và xác suất dự báo lên ảnh hiển thị
label = "{}: {:.2f}%".format(mlb.classes_[j], prob[j] * 100)
cv2.putText(output, label, (5, (i * 20) + 15),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (225, 0, 0), 2)
# show the output image
output = cv2.cvtColor(output, cv2.COLOR_BGR2RGB)
# plt.imshow(output)
# cv2.imwrite('predict.jpg', output)
cv2.imshow("Output", output)
cv2.waitKey(0)
image = _downloadImage(args['image'])
_predict_image(image, model, mlb)
| [
"phamdinhkhanh.tkt53.neu@gmail.com"
] | phamdinhkhanh.tkt53.neu@gmail.com |
b53181ff1835a17047d6c94d41b850630c4e82a4 | a08d85552ed0db1a906c3b31ed99f56bae857c60 | /PythonCourse/d2e2.py | fd961bf13b6f7ec843e3d610fd509829890c3ca1 | [] | no_license | MagdalenaZZ/Python_ditties | 90866e53f9aafa603f05735e2ceb094cf5518a18 | 757d8de1df0e53d38d4ba9854b092eabe6ec6570 | refs/heads/master | 2023-02-20T12:23:09.778092 | 2023-02-07T10:06:55 | 2023-02-07T10:06:55 | 136,293,051 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py |
def maximum (x,y):
"""Assumes two numbers and returns the biggest one"""
if x > y:
return x
else:
return y
| [
"magz@MacBook-Air.local"
] | magz@MacBook-Air.local |
f687f0ca5526f5ba9070037e195218aa01be0a95 | 339cc015ad260661e02ad32fe229807988a92487 | /accounting_addons/accounting_addons/accounting_addons/doctype/asset_depreciation_record/asset_depreciation_record.py | 172e3396e7b5bb0fe5bda91c2abe72601df8ee71 | [
"MIT"
] | permissive | bobzz-zone/gsi_accounting_addons | 009448596ca6ca114af27f0bdf35744f2835616b | 373ef3c7e8179fb7e4d003e8f4b809b36c4cfe1d | refs/heads/master | 2016-09-10T12:04:49.737175 | 2015-05-07T10:18:48 | 2015-05-07T10:18:48 | 35,213,204 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Myme and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class AssetDepreciationRecord(Document):
pass
| [
"bobzz.zone@gmail.com"
] | bobzz.zone@gmail.com |
652711257a3cd0e0ebee83693a0c9cef26803857 | a6390e0bbd5a7070c0abd2504afecc8ef6028997 | /indigo/nn/input.py | 4563f18522af8de264df296082c45aff9c167847 | [] | no_license | mlberkeley/indigo | 2f287a82c939a9d0adc41db23e59ae777fc88466 | c155b16265f13d87be0108fcf815517491b93a74 | refs/heads/master | 2021-06-14T01:21:06.702854 | 2020-05-22T08:10:47 | 2020-05-22T08:10:47 | 254,468,158 | 4 | 0 | null | 2020-04-09T20:08:16 | 2020-04-09T20:08:16 | null | UTF-8 | Python | false | false | 3,142 | py | from dataclasses import dataclass
from typing import Any
import tensorflow as tf
@dataclass
class AttentionInput(object):
"""Fields of a data class for computing multihead attention
in indigo.variables.attention.Attention
Arguments:
queries: tf.Tensor
the Queries tensor in a multihead attention mechanism
see 'Attention Is All You Need'
keys: tf.Tensor
the Keys tensor in a multihead attention mechanism
see 'Attention Is All You Need'
values: tf.Tensor
the Values tensor in a multihead attention mechanism
see 'Attention Is All You Need'
queries_mask: tf.Tensor
a boolean mask for the Queries tensor
in a multihead attention mechanism
values_mask: tf.Tensor
a boolean mask for the Keys and Values tensor
in a multihead attention mechanism
_keras_mask: tf.Tensor
a required placeholder for tf.layers.Sequential"""
# these are required for the network
queries: Any = None
keys: Any = None
values: Any = None
# if left unassigned these will not mask anything
queries_mask: Any = tf.constant([[True]])
values_mask: Any = tf.constant([[True]])
# this does not need to be set during construction
_keras_mask: Any = None
@dataclass
class TransformerInput(object):
"""Fields of a data class for computing multihead attention
in indigo.transformer.Transformer
Arguments:
queries: tf.Tensor
the Queries tensor in a multihead attention mechanism
see 'Attention Is All You Need'
values: tf.Tensor
the Keys and Values tensor in a multihead attention mechanism
see 'Attention Is All You Need'
queries_mask: tf.Tensor
a boolean mask for the Queries tensor
in a multihead attention mechanism
values_mask: tf.Tensor
a boolean mask for the Keys and Values tensor
in a multihead attention mechanism
_keras_mask: tf.Tensor
a required placeholder for tf.layers.Sequential"""
# these are required for the network
queries: Any = None
values: Any = None
# if left unassigned these will not mask anything
queries_mask: Any = tf.constant([[True]])
values_mask: Any = tf.constant([[True]])
# this does not need to be set during construction
_keras_mask: Any = None
@dataclass
class RegionFeatureInput(object):
"""Fields of a data class for computing multihead attention
in indigo.transformer.Transformer
Arguments:
features: tf.Tensor
the Keys and Values tensor in a multihead attention mechanism
see 'Attention Is All You Need'
boxes: tf.Tensor
the Keys and Values tensor in a multihead attention mechanism
see 'Attention Is All You Need'
detections: tf.Tensor
the Keys and Values tensor in a multihead attention mechanism
see 'Attention Is All You Need'"""
# these are required for the network
features: Any = None
boxes: Any = None
detections: Any = None
| [
"brandon@btrabucco.com"
] | brandon@btrabucco.com |
89a494a1ece606add63023592b124bfaf796cc21 | ebec8b55938903f97f66bc3629ce73db177b8bcc | /ultimatewebsite/members/forms.py | 2a879513f03f4cc84b77452c291fb16db54bd212 | [
"MIT"
] | permissive | NischalLal/class-ultimate-classof2020 | 284788f87c95e4889b10c2f9072c8e16daf15c4d | c069dc7211a640267e35c2e956ad9440a03e1ab8 | refs/heads/master | 2021-04-25T23:30:12.434127 | 2017-10-17T08:27:53 | 2017-10-17T08:27:53 | 107,240,286 | 1 | 0 | MIT | 2020-10-04T05:44:04 | 2017-10-17T08:32:21 | CSS | UTF-8 | Python | false | false | 2,464 | py | from django import forms
from members.models import Member
class MemberForm(forms.ModelForm):
favourite_quote = forms.CharField(
widget = forms.Textarea)
class Meta:
model = Member
fields = ('full_name', 'image', 'phone_number', 'email', 'hometown',
'favourite_quote', 'bio', 'your_website', 'facebook_url', 'twitter_url',
'github_url', 'instagram_url')
def clean_full_name(self):
full_name = self.cleaned_data.get('full_name')
length = len(full_name)
if length <= 3 or length >=30:
raise forms.ValidationError("WoW, Your Name is So Boring!!message")
return full_name
def clean_phone_number(self):
phone_number = self.cleaned_data.get('phone_number')
if len(phone_number) != 10 or not phone_number.startswith('9'):
raise forms.ValidationError("Sorry! We Cannot Accept This SHIT!!")
return phone_number
def clean_email(self):
email = self.cleaned_data.get('email')
if not '@' in email or '@.' in email:
raise forms.ValidationError("ERROR AT ITS BEST")
elif not '.' in email:
raise forms.ValidationError("Something Missing E.G '.com', '.edu', '.me', '.org'")
return email
def clean_facebook_url(self):
facebook_url = self.cleaned_data.get('facebook_url')
if facebook_url is not None:
if not 'facebook.com' in facebook_url:
raise forms.ValidationError("We don't think this is a facebook URL")
return facebook_url
def clean_twitter_url(self):
twitter_url = self.cleaned_data.get('twitter_url')
if twitter_url is not None:
if not 'twitter.com' in twitter_url:
raise forms.ValidationError("We don't think this is a twitter URL")
return twitter_url
def clean_instagram_url(self):
instagram_url = self.cleaned_data.get('instagram_url')
if instagram_url is not None:
if not 'instagram.com' in instagram_url:
raise forms.ValidationError("We don't think this is a instagram URL")
return instagram_url
def clean_github_url(self):
github_url = self.cleaned_data.get('github_url')
if github_url is not None:
if not 'github.com' in github_url:
raise forms.ValidationError("We don't think this is a Github URL")
return github_url | [
"aakrist666@gmail.com"
] | aakrist666@gmail.com |
e60f622fd08209dd0fccd070ecab295d750160cd | 03ec2daac0989f9b6936b1e87d8ca1b0d99f1bce | /optfn/plastic_linear.py | 58257991529ef71676ed0cdc4dd2f255adf46f1a | [] | no_license | SSS135/optfn | f7364dce8c1857baa90d2d6564316762c574a9ba | 48ae4f5439daa89ac54921a7642e612838c724eb | refs/heads/master | 2020-05-29T15:21:38.827291 | 2020-04-29T17:51:09 | 2020-04-29T17:51:09 | 189,217,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,014 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
import math
class PlasticLinear(nn.Module):
def __init__(self, in_features, out_features, bias=True, single_plastic_lr=False, initial_plastic_lr=0.1, oja_rule=True):
super().__init__()
self.in_features = in_features
self.out_features = out_features
self.single_plastic_lr = single_plastic_lr
self.initial_plastic_lr = initial_plastic_lr
self.oja_rule = oja_rule
self.weight = nn.Parameter(torch.Tensor(out_features, in_features))
self.plastic_scale = nn.Parameter(torch.Tensor(out_features, in_features))
self.plastic_lr = nn.Parameter(torch.Tensor(1) if single_plastic_lr else torch.Tensor(out_features, in_features))
if bias:
self.bias = nn.Parameter(torch.Tensor(out_features))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
self.plastic_scale.data.uniform_(-stdv, stdv)
if self.single_plastic_lr:
self.plastic_lr.data.fill_(self.initial_plastic_lr)
else:
self.plastic_lr.data.uniform_(min(self.initial_plastic_lr, 1e-6), self.initial_plastic_lr)
if self.bias is not None:
self.bias.data.uniform_(-stdv, stdv)
def forward(self, input: torch.Tensor, hebb: torch.Tensor):
if hebb is None:
hebb = input.new_zeros((input.shape[0], self.out_features, self.in_features))
out = input.unsqueeze(-2) @ (self.weight.unsqueeze(0) + self.plastic_scale.unsqueeze(0) * hebb).transpose(-1, -2)
out = out.squeeze(-2)
uin, uout = input.unsqueeze(-2), out.unsqueeze(-1)
if self.oja_rule:
hebb = hebb + self.plastic_lr * uout * (uin - uout * hebb)
else:
hebb = self.plastic_lr * uin * uout + (1 - self.plastic_lr) * hebb
if self.bias is not None:
out = out + self.bias
return out, hebb
class PlasticLinearRec(nn.Module):
def __init__(self, num_features, single_plastic_lr=True, initial_plastic_lr=0.01, oja_rule=True):
super().__init__()
self.num_features = num_features
self.single_plastic_lr = single_plastic_lr
self.initial_plastic_lr = initial_plastic_lr
self.oja_rule = oja_rule
self.weight = nn.Parameter(torch.Tensor(num_features, num_features))
self.plastic_scale = nn.Parameter(torch.Tensor(num_features, num_features))
self.plastic_lr = nn.Parameter(torch.Tensor(1) if single_plastic_lr else torch.Tensor(num_features, num_features))
self.reset_parameters()
def reset_parameters(self):
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
self.plastic_scale.data.uniform_(-stdv, stdv)
self.plastic_scale.data -= torch.diag(torch.diag(self.plastic_scale.data))
if self.single_plastic_lr:
self.plastic_lr.data.fill_(self.initial_plastic_lr)
else:
self.plastic_lr.data.uniform_(min(self.initial_plastic_lr, 1e-6), self.initial_plastic_lr)
def forward(self, input: torch.Tensor, memory):
if memory is None:
last_out, hebb = input.new_zeros((input.shape[0], self.num_features)), \
input.new_zeros((input.shape[0], self.num_features, self.num_features))
else:
last_out, hebb = memory
out = last_out.unsqueeze(-2) @ (self.weight.unsqueeze(0) + self.plastic_scale.unsqueeze(0) * hebb).transpose(-1, -2)
out = F.tanh(out.squeeze(-2) + input)
uin, uout = last_out.unsqueeze(-2), out.unsqueeze(-1)
if self.oja_rule:
hebb = hebb + self.plastic_lr * uout * (uin - uout * hebb)
else:
hebb = self.plastic_lr * uin * uout + (1 - self.plastic_lr) * hebb
return out, (out, hebb) | [
"sss13594@gmail.com"
] | sss13594@gmail.com |
01c8f82a0e2570725b639af3a837aed5c0198892 | 3b504a983f1807ae7c5af51078bfab8c187fc82d | /client/gui/HudElements/ForestallingPoint.py | 79091f51fde6a17f9469bf38b316f591b2728d8a | [] | no_license | SEA-group/wowp_scripts | 7d35fd213db95ea6b3dbd1ec6d3e0f13de86ba58 | 2fe54a44df34f2dcaa6860a23b835dcd8dd21402 | refs/heads/master | 2021-09-07T23:10:13.706605 | 2018-03-02T17:23:48 | 2018-03-02T17:23:48 | 117,280,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,554 | py | # Embedded file name: scripts/client/gui/HudElements/ForestallingPoint.py
import BigWorld
import GUI
from consts import *
from gui.HUDconsts import *
from EntityHelpers import isAvatar, isTeamObject
class ForestallingPoint:
def __init__(self, offsetMtx):
self.__offsetMtx = offsetMtx
self.__centerPointOffsetMtx = GUI.OffsetMp()
self.__inited = False
self.__matrixProvider = None
return
def setTarget(self, entity):
if not self.__inited:
self.__createTarget()
if entity is not None and isAvatar(entity):
self.__matrixProvider.target = entity.matrix
self.__deflectionTarget(entity)
self.__offsetMtx.target = self.__matrixProvider
self.__centerPointOffsetMtx.target = self.__matrixProvider
if COLLISION_RECORDER:
self.__matrixProvider.targetEntity = entity
else:
self.__matrixProvider.target = None
self.__deflectionTarget(None)
if entity is not None and TEAM_OBJECT_PARALLAX_ENABLED and isTeamObject(entity):
self.__offsetMtx.target = entity.matrix
self.__centerPointOffsetMtx.target = entity.matrix
else:
self.__offsetMtx.target = None
self.__centerPointOffsetMtx.target = None
if COLLISION_RECORDER:
self.__matrixProvider.targetEntity = None
return
def setBulletSpeed(self, bulletSpeed):
if not self.__inited:
self.__createTarget()
self.__matrixProvider.bulletSpeed = bulletSpeed
def destroy(self):
self.__inited = False
self.__matrixProvider = None
self.__offsetMtx.target = None
self.__offsetMtx = None
self.__centerPointOffsetMtx.target = None
self.__centerPointOffsetMtx = None
return
def __deflectionTarget(self, entity):
BigWorld.player().deflectionTargetsInProgress += 1
BigWorld.player().cell.setDeflectionTarget(entity.id if entity is not None else 0)
return
def __createTarget(self):
self.__matrixProvider = GUI.ForestallingMp()
self.__matrixProvider.source = BigWorld.player().fakeRealMatrix
self.__matrixProvider.target = None
self.__matrixProvider.offset = self.__offsetMtx
if COLLISION_RECORDER:
self.__matrixProvider.sourceEntity = BigWorld.player()
self.__matrixProvider.targetEntity = None
self.__inited = True
return | [
"55k@outlook.com"
] | 55k@outlook.com |
aeb2278cba6e6f5ab83eeea7a5279fdb438b5902 | b66e70a8bb3c53595acd01dceb23298694884b67 | /cloudy/settings/base.py | e14d1c5960cb86cbbfc54a2d682480988098d21e | [] | no_license | flupke/cloudy-release | d7735a38d79f816c52da3d983c714512a32919b1 | 6b160188a7067f125b107eb68dc8db4bbb4bfdf4 | refs/heads/master | 2016-09-06T05:23:40.856287 | 2013-02-23T18:17:16 | 2013-02-23T18:17:16 | 8,377,854 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,892 | py | import os.path as op
ROOT_DIR = op.abspath(op.join(op.dirname(__file__), '..'))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = op.join(ROOT_DIR, '..', 'static')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
op.join(ROOT_DIR, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'h)q^_kvx$sg+%e%=lg^a+q1!z9a5-1x%vky5*76_j-_wx7am-m'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'cloudy.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'cloudy.wsgi.application'
TEMPLATE_DIRS = (
op.join(ROOT_DIR, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
from .local import *
except ImportError:
pass
| [
"luper.rouch@gmail.com"
] | luper.rouch@gmail.com |
808a9f59c4c857fb35b4ea766f82d0994b5016fd | bf1e6aa6ee7687363427c87b7e5bef1d157410fc | /backend/chat/api/v1/serializers.py | 4a6ada475bba6fc32ca69b58440701201883d804 | [] | no_license | crowdbotics-apps/chatme-27551 | 8cee7a31badc71575ba7c1a31c9d8638dadacd08 | 9c10dd5c0413ab2e10ffde4c8b90f650464cfa45 | refs/heads/master | 2023-05-04T12:18:31.110095 | 2021-05-29T10:40:55 | 2021-05-29T10:40:55 | 371,938,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | from rest_framework import serializers
from chat.models import (
MessageAction,
ThreadMember,
Thread,
Message,
ThreadAction,
ForwardedMessage,
)
class ForwardedMessageSerializer(serializers.ModelSerializer):
class Meta:
model = ForwardedMessage
fields = "__all__"
class ThreadMemberSerializer(serializers.ModelSerializer):
class Meta:
model = ThreadMember
fields = "__all__"
class ThreadActionSerializer(serializers.ModelSerializer):
class Meta:
model = ThreadAction
fields = "__all__"
class MessageActionSerializer(serializers.ModelSerializer):
class Meta:
model = MessageAction
fields = "__all__"
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = Message
fields = "__all__"
class ThreadSerializer(serializers.ModelSerializer):
class Meta:
model = Thread
fields = "__all__"
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
352b6484d41dbe8b4caa795e4a050a3a6b7cc7aa | 553b34a101c54090e68f540d96369ac7d5774d95 | /python/python_koans/python2/koans/about_new_style_classes.py | 99f39683a5ef5d804c5ed0b6d1160ab2d6b98939 | [
"MIT"
] | permissive | topliceanu/learn | fd124e1885b5c0bfea8587510b5eab79da629099 | 1c5b1433c3d6bfd834df35dee08607fcbdd9f4e3 | refs/heads/master | 2022-07-16T19:50:40.939933 | 2022-06-12T15:40:20 | 2022-06-12T15:40:20 | 21,684,180 | 26 | 12 | MIT | 2020-03-26T20:51:35 | 2014-07-10T07:22:17 | JavaScript | UTF-8 | Python | false | false | 2,466 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutNewStyleClasses(Koan):
class OldStyleClass:
"An old style class"
# Original class style have been phased out in Python 3.
class NewStyleClass(object):
"A new style class"
# Introduced in Python 2.2
#
# Aside from this set of tests, Python Koans sticks exclusively to this
# kind of class
pass
def test_new_style_classes_inherit_from_object_base_class(self):
self.assertEqual(True, issubclass(self.NewStyleClass, object))
self.assertEqual(False, issubclass(self.OldStyleClass, object))
def test_new_style_classes_have_more_attributes(self):
self.assertEqual(2, len(dir(self.OldStyleClass)))
self.assertEqual("An old style class", self.OldStyleClass.__doc__)
self.assertEqual('koans.about_new_style_classes', self.OldStyleClass.__module__)
self.assertEqual(18, len(dir(self.NewStyleClass)))
# To examine the available attributes, run
# 'dir(<Class name goes here>)'
# from a python console
# ------------------------------------------------------------------
def test_old_style_classes_have_type_but_no_class_attribute(self):
self.assertEqual('classobj', type(self.OldStyleClass).__name__)
try:
cls = self.OldStyleClass.__class__.__name__
except Exception as ex:
pass
# What was that error message from the exception?
self.assertMatch("class OldStyleClass has no attribute '__class__'", ex[0])
def test_new_style_classes_have_same_class_as_type(self):
new_style = self.NewStyleClass()
self.assertEqual(type(self.NewStyleClass), self.NewStyleClass.__class__)
self.assertEqual(True,
type(self.NewStyleClass) == self.NewStyleClass.__class__)
# ------------------------------------------------------------------
def test_in_old_style_instances_class_is_different_to_type(self):
old_style = self.OldStyleClass()
self.assertEqual('OldStyleClass', old_style.__class__.__name__)
self.assertEqual('instance', type(old_style).__name__)
def test_new_style_instances_have_same_class_as_type(self):
new_style = self.NewStyleClass()
self.assertEqual('NewStyleClass', new_style.__class__.__name__)
self.assertEqual(True, type(new_style) == new_style.__class__)
| [
"alexandru.topliceanu@gmail.com"
] | alexandru.topliceanu@gmail.com |
6f5712a2576ccb525f3600b7f0802178c4a366d1 | 32c56293475f49c6dd1b0f1334756b5ad8763da9 | /google-cloud-sdk/lib/googlecloudsdk/command_lib/compute/machine_types/flags.py | 7446d105ac181ce93490efa9f4adda4468ca2080 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | bopopescu/socialliteapp | b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494 | 85bb264e273568b5a0408f733b403c56373e2508 | refs/heads/master | 2022-11-20T03:01:47.654498 | 2020-02-01T20:29:43 | 2020-02-01T20:29:43 | 282,403,750 | 0 | 0 | MIT | 2020-07-25T08:31:59 | 2020-07-25T08:31:59 | null | UTF-8 | Python | false | false | 1,181 | py | # -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags for the compute machine-types commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.command_lib.compute import completers
from googlecloudsdk.command_lib.compute import flags as compute_flags
def MakeMachineTypeArg():
return compute_flags.ResourceArgument(
resource_name='machine type',
completer=completers.MachineTypesCompleter,
zonal_collection='compute.machineTypes',
zone_explanation=compute_flags.ZONE_PROPERTY_EXPLANATION)
| [
"jonathang132298@gmail.com"
] | jonathang132298@gmail.com |
c149c43ee2040d9c4aa7f7048a612c1e6297266f | 83280aa17b415138a6f55edf9cedfdd9a45916a2 | /src/stochastic_review/cli.py | 3f05a48e626b05227ae1806c286b518dadf3e258 | [] | no_license | RubenBranco/Stochastic-Continuous-Review | dfea2b55b7c8f0d41eac289b76b1d28ced31d0ce | 9015ebe6adfc6b812cb83c2b9df06fd3d20b6fcc | refs/heads/master | 2020-05-01T12:31:50.531958 | 2019-04-28T16:50:38 | 2019-04-28T16:50:38 | 177,467,859 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | from bullet import SlidePrompt, Bullet, Numbers
DISTRIBUTIONS = [
'Uniform',
'Normal',
]
cli = SlidePrompt(
[
Bullet(prompt="Choose the distribution(Y)", choices=DISTRIBUTIONS),
Numbers(prompt="Distribution mean(μ) / Starting point(a): ", type=float),
Numbers(prompt="Distribution standard deviation(σ) / End point(b): ", type=float),
Numbers(prompt="Delivery time(l): ", type=float),
Numbers(prompt="Fixed cost of the order(A): ", type=float),
Numbers(prompt="Unitary item cost(c): ", type=float),
Numbers(prompt="Storage cost per item per timestep(h): ", type=float),
Numbers(prompt="Out of stock cost per item(p'): ", type=float),
Numbers(prompt="Stopping rate of change(ε): ", type=float),
]
)
def get_args_from_cli(cli_obj):
args = cli_obj.launch()
return dict(
distribution=args[0][1],
mean=args[1][1],
std_deviation=args[2][1],
delivery_time=args[3][1],
order_cost=args[4][1],
unit_cost=args[5][1],
storage_cost=args[6][1],
out_of_stock=args[7][1],
stop_crit=args[8][1],
)
| [
"ruben.branco@outlook.pt"
] | ruben.branco@outlook.pt |
993d08a77bcc43f54ee71a7c8ec0e59ae70641c2 | bb150497a05203a718fb3630941231be9e3b6a32 | /framework/api/nn/test_hardsigmoid.py | f591329d91524bb078a0d9157a65d000a22cedf6 | [] | no_license | PaddlePaddle/PaddleTest | 4fb3dec677f0f13f7f1003fd30df748bf0b5940d | bd3790ce72a2a26611b5eda3901651b5a809348f | refs/heads/develop | 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 | Python | UTF-8 | Python | false | false | 1,588 | py | #!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test_hardsigmoid
"""
from apibase import APIBase
from apibase import randtool
import paddle
import pytest
import numpy as np
class TestHardsigmoid(APIBase):
"""
test
"""
def hook(self):
"""
implement
"""
self.types = [np.float32, np.float64]
# self.debug = True
# self.static = True
# enable check grad
self.delta = 1e-1
# self.enable_backward = True
obj = TestHardsigmoid(paddle.nn.Hardsigmoid)
@pytest.mark.api_nn_Hardsigmoid_vartype
def test_hardsigmoid_base():
"""
base
"""
x = randtool("float", -10, 10, [2, 2])
res = []
for i in range(len(x.flatten())):
if x.flatten()[i] <= -3:
res.append(0)
elif x.flatten()[i] >= 3:
res.append(1)
else:
res.append(x.flatten()[i] / 6 + 0.5)
res = np.array(res).reshape(x.shape)
# print(res)
obj.base(res=res, data=x)
@pytest.mark.api_nn_Hardsigmoid_parameters
def test_hardsigmoid():
"""
x = [[3, 3, 3], [-5, 0, 5], [-3, -3, -3]]
"""
x = np.array([[3, 3, 3], [-5, 0, 5], [-3, -3, -3]]).astype(np.float32)
# print(x)
res = []
for i in range(len(x.flatten())):
if x.flatten()[i] <= -3:
res.append(0)
elif x.flatten()[i] >= 3:
res.append(1)
else:
res.append(x.flatten()[i] / 6 + 0.5)
res = np.array(res).reshape(x.shape)
# print(res)
obj.run(res=res, data=x)
| [
"noreply@github.com"
] | PaddlePaddle.noreply@github.com |
6973cb6fad77d9866816625849943c3739ebab02 | 45870a80cbe343efe95eb9e8d0bd47c8c88353d1 | /特殊的函数/venv/Lib/site-packages/tensorflow/tools/api/generator/api/gfile/__init__.py | d15998ef1791b0b3d2441ba82e50f147a68d311d | [] | no_license | pippichi/IntelliJ_PYTHON | 3af7fbb2c8a3c2ff4c44e66736bbfb7aed51fe88 | 0bc6ded6fb5b5d9450920e4ed5e90a2b82eae7ca | refs/heads/master | 2021-07-10T09:53:01.264372 | 2020-07-09T13:19:41 | 2020-07-09T13:19:41 | 159,319,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,154 | py | """Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python.lib.io.file_io import copy as Copy
from tensorflow.python.lib.io.file_io import create_dir as MkDir
from tensorflow.python.lib.io.file_io import delete_file as Remove
from tensorflow.python.lib.io.file_io import delete_recursively as DeleteRecursively
from tensorflow.python.lib.io.file_io import file_exists as Exists
from tensorflow.python.lib.io.file_io import get_matching_files as Glob
from tensorflow.python.lib.io.file_io import is_directory as IsDirectory
from tensorflow.python.lib.io.file_io import list_directory as ListDirectory
from tensorflow.python.lib.io.file_io import recursive_create_dir as MakeDirs
from tensorflow.python.lib.io.file_io import rename as Rename
from tensorflow.python.lib.io.file_io import stat as Stat
from tensorflow.python.lib.io.file_io import walk as Walk
from tensorflow.python.platform.gfile import FastGFile
from tensorflow.python.platform.gfile import GFile
from tensorflow.python.platform.gfile import GFile as Open | [
"874496049@qq.com"
] | 874496049@qq.com |
dab2470950559095359d56daae0b4daa36f036d0 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /ec2_write_1/vpc-endpoint_modify.py | 30909794ef3620c2cf2d203628e316109fd9fca1 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,180 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_one_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/modify-vpc-endpoint.html
if __name__ == '__main__':
"""
create-vpc-endpoint : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/create-vpc-endpoint.html
delete-vpc-endpoints : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/delete-vpc-endpoints.html
describe-vpc-endpoints : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-vpc-endpoints.html
"""
parameter_display_string = """
# vpc-endpoint-id : The ID of the endpoint.
"""
add_option_dict = {}
#######################################################################
# parameter display string
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_one_parameter("ec2", "modify-vpc-endpoint", "vpc-endpoint-id", add_option_dict)
| [
"hcseo77@gmail.com"
] | hcseo77@gmail.com |
f32746191c1cdb68cba6ceff436e1725c23ccd61 | 09021accfc6241a7eb3b17821394881518e78c84 | /backend/settings/development.py | 7e4853ea4e2f7e29e445273125de5c9c0e65fec0 | [] | no_license | mrporsh/ecommerce-core | c37ec314b7fe2a79524ed110a014b148be1edcf1 | 20de529dad2d52df20a75956d1be1d23cfa241af | refs/heads/master | 2022-11-08T02:08:05.358421 | 2020-06-15T08:18:56 | 2020-06-15T08:18:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,016 | py | from backend.settings.base import *
SECRET_KEY = '6h03)d($%+c4r#p65#ctnk3*u21^v@q+*e^ue0+llrq%zv(94z'
DEBUG = True
ALLOWED_HOSTS = ["*", ]
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# django-cors-headers
CORS_ORIGIN_ALLOW_ALL = True
# REST_FRAMEWORK
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES':
('knox.auth.TokenAuthentication',),
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.AllowAny'
]
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
# Africa's talking
AFRICASTALKING_USERNAME = os.environ.setdefault('AFRICASTALKING_USERNAME', 'sandbox')
AFRICASTALKING_API_KEY = os.environ['AFRICASTALKING_API_KEY']
AFRICASTALKING_PAYMENT_PROD_NAME = os.environ['AFRICASTALKING_PAYMENT_PROD_NAME']
AFRICASTALKING_CURRENCY = os.environ.setdefault('AFRICASTALKING_USERNAME', 'KES')
| [
"onteripaul@gmail.com"
] | onteripaul@gmail.com |
d24c822be4efd0de82f39e9ae11c2a6453533063 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_006/ch83_2020_04_13_17_27_42_530852.py | 8bd9a1e46cf9d29bc7a0f99413f2f1b5c8b8d247 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | def medias_por_inicial(dicio):
dicio2={}
listaL=[]
a=1
for i in dicio:
if i[0] not in listaL:
listaL.append(i[0])
dicio2[i[0]]=dicio[i]
else:
a=a+1
dicio2[i[0]]=(dicio2[i[0]]+dicio[i])/a
return dicio2 | [
"you@example.com"
] | you@example.com |
2d5e425ebcdd855dda89d0709c5e580068264bb4 | 99deab5f52fd7262a26de9aa5d0163bfa738590f | /python/leetcode/string/468_valid_ip_address.py | d2bed0cf860f5b1cad696de3bf10c6009a3861a1 | [] | no_license | zchen0211/topcoder | e47fc07c928b83138e27fd6681b373ce499480b0 | 4d73e4c1f2017828ff2d36058819988146356abe | refs/heads/master | 2022-01-17T16:54:35.871026 | 2019-05-08T19:26:23 | 2019-05-13T05:19:46 | 84,052,683 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,186 | py | """
468. Validate IP Address (Medium)
Write a function to check whether an input string is a valid IPv4 address or IPv6 address or neither.
IPv4 addresses are canonically represented in dot-decimal notation, which consists of four decimal numbers, each ranging from 0 to 255, separated by dots ("."), e.g.,172.16.254.1;
Besides, leading zeros in the IPv4 is invalid. For example, the address 172.16.254.01 is invalid.
IPv6 addresses are represented as eight groups of four hexadecimal digits, each group representing 16 bits. The groups are separated by colons (":"). For example, the address 2001:0db8:85a3:0000:0000:8a2e:0370:7334 is a valid one. Also, we could omit some leading zeros among four hexadecimal digits and some low-case characters in the address to upper-case ones, so 2001:db8:85a3:0:0:8A2E:0370:7334 is also a valid IPv6 address(Omit leading zeros and using upper cases).
However, we don't replace a consecutive group of zero value with a single empty group using two consecutive colons (::) to pursue simplicity. For example, 2001:0db8:85a3::8A2E:0370:7334 is an invalid IPv6 address.
Besides, extra leading zeros in the IPv6 is also invalid. For example, the address 02001:0db8:85a3:0000:0000:8a2e:0370:7334 is invalid.
Note: You may assume there is no extra space or special characters in the input string.
Example 1:
Input: "172.16.254.1"
Output: "IPv4"
Explanation: This is a valid IPv4 address, return "IPv4".
Example 2:
Input: "2001:0db8:85a3:0:0:8A2E:0370:7334"
Output: "IPv6"
Explanation: This is a valid IPv6 address, return "IPv6".
Example 3:
Input: "256.256.256.256"
Output: "Neither"
Explanation: This is neither a IPv4 address nor a IPv6 address.
"""
"""
check ipv4 and ipv6 separately:
IPv4:
4 numbers separated by "."
each number between 0, 255
no beginning zeros
check "-0"
IPv6:
8 hex numbers separated by ":"
each digit between 0, .., 9, a, .., f
"""
class Solution(object):
def validIPAddress(self, IP):
"""
:type IP: str
:rtype: str
"""
if self.is_ipv4(IP): return "IPv4"
if self.is_ipv6(IP): return "IPv6"
return 'Neither'
def is_ipv4(self, s):
slist = s.split('.')
if len(slist) != 4: return False
for item in slist:
if not item: return False
if item[0] == '0' and len(item)>1: return False
if item[0] == '-': return False
try:
x = int(item)
if x<0 or x>255: return False
except ValueError:
return False
return True
def is_ipv6(self, s):
slist = s.split(':')
if len(slist) != 8: return False
for item in slist:
if len(item)==0 or len(item)>=5: return False
item = item.lower()
for c in item:
if c not in set(['0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f']):
return False
return True
if __name__ == '__main__':
a = Solution()
print a.validIPAddress('172.16.254.1')
print a.validIPAddress('192.0.0.1')
print a.validIPAddress('172.16.254.01')
print a.validIPAddress('2001:0db8:85a3:0000:0000:8a2e:0370:7334')
print a.validIPAddress('2001:db8:85a3:0:0:8A2E:0370:7334')
print a.validIPAddress('2001:0db8:85a3::8A2E:0370:7334')
| [
"chenzhuoyuan07@gmail.com"
] | chenzhuoyuan07@gmail.com |
00bc2dabdcd7e010bc2735303ed4cc9a35ed7325 | 776fa03e088c148578c5fe4b361734d1b5517249 | /comments/signals.py | c1ff8c39e82d81fdff911228614f931b6a3f1618 | [] | no_license | zhexuejia53/book_share_demo | de8b8801bf9af757f19e280cbf0d98d4ea80bfa7 | 7c810b06bc1f810650e471fd8bbe902c657c048b | refs/heads/master | 2020-04-06T14:40:03.226213 | 2018-11-14T14:17:47 | 2018-11-14T14:17:47 | 157,549,811 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | # encoding: utf-8
"""
@author: Sunmouren
@contact: sunxuechao1024@gmail.com
@time: 2018/9/29 16:58
@desc: data change signals
"""
from django.db.models.signals import m2m_changed
from django.dispatch import receiver
from .models import Comment
@receiver(m2m_changed, sender=Comment.like_user.through)
def like_user_changed(sender, instance, **kwargs):
instance.like_number = instance.like_user.count()
instance.save() | [
"sunxuechao1024@gmail.com"
] | sunxuechao1024@gmail.com |
bf63be0f63eb2aec45ffda446384b490c422dca9 | b129b450cf5edce677f284858a1ab14c003edca6 | /project/chapter6/user.py | e2fbbd4ec01ad4e9adca8fee5e79f4ce95fe0312 | [] | no_license | mentalclear/PythonCrashCourse | 7ab5a7691cd6ece83043ded2b91049723945b6e0 | 11d96ed6c7e36d254158ee49586ee40aa09493a1 | refs/heads/master | 2023-07-18T17:24:49.046848 | 2021-08-16T14:05:08 | 2021-08-16T14:05:08 | 233,722,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,476 | py | user_0 = {
'username': 'efermi',
'first': 'enrico',
'last': 'fermi',
}
for key, value in user_0.items():
print("\nKey: " + key)
print("Value: " + value)
# Fav languages sample too
favorite_languages = {
'jen': 'python',
'sarah': 'c',
'edward': 'ruby',
'phil': 'python',
}
for name, language in favorite_languages.items():
print(name.title() + "'s favorite language is " +
language.title() + ".")
print("\n")
for name in favorite_languages: # same as favorite_languages.keys()
print(name.title())
print("\n")
friends = ['phil', 'sarah']
for name in favorite_languages.keys():
print(name.title())
if name in friends:
print(" Hi " + name.title() +
", I see your favorite language is " +
favorite_languages[name].title() + "!")
if 'erin' not in favorite_languages.keys():
print("Erin, please take our poll!")
print("\n")
# Printing it out sorted
for name in sorted(favorite_languages.keys()):
print(name.title() + ", thank you for taking the poll.")
# Printing values
print("\nThe following languages have been mentioned:")
for language in favorite_languages.values():
print(language.title())
# This approach pulls all the values from the dictionary without checking
# for repeats.
# to exclude duplications:
print("\nExcluding duplications: ")
for language in set(favorite_languages.values()):
print(language.title())
# Only unique items will be printed out
| [
"mentalclear@gmail.com"
] | mentalclear@gmail.com |
a991f2bbc4f316c01fa0ef216d76f54796d6cf5f | a59d55ecf9054d0750168d3ca9cc62a0f2b28b95 | /.install/.backup/lib/googlecloudsdk/gcloud/sdktools/auth/revoke.py | 563c17681dffdf72d8b48e802ca6e428a1e526f5 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | bopopescu/google-cloud-sdk | bb2746ff020c87271398196f21a646d9d8689348 | b34e6a18f1e89673508166acce816111c3421e4b | refs/heads/master | 2022-11-26T07:33:32.877033 | 2014-06-29T20:43:23 | 2014-06-29T20:43:23 | 282,306,367 | 0 | 0 | NOASSERTION | 2020-07-24T20:04:47 | 2020-07-24T20:04:46 | null | UTF-8 | Python | false | false | 1,945 | py | # Copyright 2013 Google Inc. All Rights Reserved.
"""Revoke credentials being used by the CloudSDK.
"""
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions as c_exc
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.credentials import store as c_store
class Revoke(base.Command):
"""Revoke authorization for credentials.
Revoke credentials. If no account is provided, the currently active account is
used.
"""
@staticmethod
def Args(parser):
parser.add_argument('accounts', nargs='*',
help='Accounts whose credentials shall be revoked.')
parser.add_argument('--all', action='store_true',
help='Revoke all known credentials.')
@c_exc.RaiseToolExceptionInsteadOf(c_store.Error)
def Run(self, args):
"""Revoke credentials and update active account."""
accounts = args.accounts or []
if type(accounts) is str:
accounts = [accounts]
available_accounts = c_store.AvailableAccounts()
unknown_accounts = set(accounts) - set(available_accounts)
if unknown_accounts:
raise c_exc.UnknownArgumentException(
'accounts', ' '.join(unknown_accounts))
if args.all:
accounts = available_accounts
active_account = properties.VALUES.core.account.Get()
if not accounts and active_account:
accounts = [active_account]
if not accounts:
raise c_exc.InvalidArgumentException(
'accounts', 'No credentials available to revoke.')
for account in accounts:
if active_account == account:
properties.PersistProperty(properties.VALUES.core.account, None)
c_store.Revoke(account)
return accounts
def Display(self, unused_args, result):
if result:
log.Print('Revoked credentials for {account}.'.format(
account=', '.join(result)))
self.entry_point.auth.list()
| [
"alfred.wechselberger@technologyhatchery.com"
] | alfred.wechselberger@technologyhatchery.com |
0bd5fa45d9a802e9ef2aad12ba344f7a145ca37c | f6688132ec14a9d03c8bb05e85819f810fd3e4e6 | /tfold/nets/nets_factory.py | 2f93c133b7f6deb9c1be5c4a6d13a25aadb0c6aa | [
"Apache-2.0"
] | permissive | mariusionescu/tfold | 44515b9eba027a8d4a9265e6f7299dc08294dc42 | b6a9913d29a62326bfc3086fa14ed317d1e02a0a | refs/heads/master | 2020-04-08T19:59:39.676558 | 2018-12-05T19:47:57 | 2018-12-05T19:47:57 | 159,679,441 | 0 | 0 | Apache-2.0 | 2018-11-29T14:33:13 | 2018-11-29T14:33:12 | null | UTF-8 | Python | false | false | 7,298 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains a factory for building various models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import tensorflow as tf
from nets import alexnet
from nets import cifarnet
from nets import inception
from nets import lenet
from nets import mobilenet_v1
from nets import overfeat
from nets import resnet_v1
from nets import resnet_v2
from nets import vgg
from nets.mobilenet import mobilenet_v2
from nets.nasnet import nasnet
from nets.nasnet import pnasnet
slim = tf.contrib.slim
networks_map = {'alexnet_v2': alexnet.alexnet_v2,
'cifarnet': cifarnet.cifarnet,
'overfeat': overfeat.overfeat,
'vgg_a': vgg.vgg_a,
'vgg_16': vgg.vgg_16,
'vgg_19': vgg.vgg_19,
'inception_v1': inception.inception_v1,
'inception_v2': inception.inception_v2,
'inception_v3': inception.inception_v3,
'inception_v4': inception.inception_v4,
'inception_resnet_v2': inception.inception_resnet_v2,
'lenet': lenet.lenet,
'resnet_v1_50': resnet_v1.resnet_v1_50,
'resnet_v1_101': resnet_v1.resnet_v1_101,
'resnet_v1_152': resnet_v1.resnet_v1_152,
'resnet_v1_200': resnet_v1.resnet_v1_200,
'resnet_v2_50': resnet_v2.resnet_v2_50,
'resnet_v2_101': resnet_v2.resnet_v2_101,
'resnet_v2_152': resnet_v2.resnet_v2_152,
'resnet_v2_200': resnet_v2.resnet_v2_200,
'mobilenet_v1': mobilenet_v1.mobilenet_v1,
'mobilenet_v1_075': mobilenet_v1.mobilenet_v1_075,
'mobilenet_v1_050': mobilenet_v1.mobilenet_v1_050,
'mobilenet_v1_025': mobilenet_v1.mobilenet_v1_025,
'mobilenet_v2': mobilenet_v2.mobilenet,
'mobilenet_v2_140': mobilenet_v2.mobilenet_v2_140,
'mobilenet_v2_035': mobilenet_v2.mobilenet_v2_035,
'nasnet_cifar': nasnet.build_nasnet_cifar,
'nasnet_mobile': nasnet.build_nasnet_mobile,
'nasnet_large': nasnet.build_nasnet_large,
'pnasnet_large': pnasnet.build_pnasnet_large,
'pnasnet_mobile': pnasnet.build_pnasnet_mobile,
}
arg_scopes_map = {'alexnet_v2': alexnet.alexnet_v2_arg_scope,
'cifarnet': cifarnet.cifarnet_arg_scope,
'overfeat': overfeat.overfeat_arg_scope,
'vgg_a': vgg.vgg_arg_scope,
'vgg_16': vgg.vgg_arg_scope,
'vgg_19': vgg.vgg_arg_scope,
'inception_v1': inception.inception_v3_arg_scope,
'inception_v2': inception.inception_v3_arg_scope,
'inception_v3': inception.inception_v3_arg_scope,
'inception_v4': inception.inception_v4_arg_scope,
'inception_resnet_v2':
inception.inception_resnet_v2_arg_scope,
'lenet': lenet.lenet_arg_scope,
'resnet_v1_50': resnet_v1.resnet_arg_scope,
'resnet_v1_101': resnet_v1.resnet_arg_scope,
'resnet_v1_152': resnet_v1.resnet_arg_scope,
'resnet_v1_200': resnet_v1.resnet_arg_scope,
'resnet_v2_50': resnet_v2.resnet_arg_scope,
'resnet_v2_101': resnet_v2.resnet_arg_scope,
'resnet_v2_152': resnet_v2.resnet_arg_scope,
'resnet_v2_200': resnet_v2.resnet_arg_scope,
'mobilenet_v1': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v1_075': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v1_050': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v1_025': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v2': mobilenet_v2.training_scope,
'mobilenet_v2_035': mobilenet_v2.training_scope,
'mobilenet_v2_140': mobilenet_v2.training_scope,
'nasnet_cifar': nasnet.nasnet_cifar_arg_scope,
'nasnet_mobile': nasnet.nasnet_mobile_arg_scope,
'nasnet_large': nasnet.nasnet_large_arg_scope,
'pnasnet_large': pnasnet.pnasnet_large_arg_scope,
'pnasnet_mobile': pnasnet.pnasnet_mobile_arg_scope,
}
def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False):
"""Returns a network_fn such as `logits, end_points = network_fn(images)`.
Args:
name: The name of the network.
num_classes: The number of classes to use for classification. If 0 or None,
the logits layer is omitted and its input features are returned instead.
weight_decay: The l2 coefficient for the model weights.
is_training: `True` if the model is being used for training and `False`
otherwise.
Returns:
network_fn: A function that applies the model to a batch of images. It has
the following signature:
net, end_points = network_fn(images)
The `images` input is a tensor of shape [batch_size, height, width, 3]
with height = width = network_fn.default_image_size. (The permissibility
and treatment of other sizes depends on the network_fn.)
The returned `end_points` are a dictionary of intermediate activations.
The returned `net` is the topmost layer, depending on `num_classes`:
If `num_classes` was a non-zero integer, `net` is a logits tensor
of shape [batch_size, num_classes].
If `num_classes` was 0 or `None`, `net` is a tensor with the input
to the logits layer of shape [batch_size, 1, 1, num_features] or
[batch_size, num_features]. Dropout has not been applied to this
(even if the network's original classification does); it remains for
the caller to do this or not.
Raises:
ValueError: If network `name` is not recognized.
"""
if name not in networks_map:
raise ValueError('Name of network unknown %s' % name)
func = networks_map[name]
@functools.wraps(func)
def network_fn(images, **kwargs):
arg_scope = arg_scopes_map[name](weight_decay=weight_decay)
with slim.arg_scope(arg_scope):
return func(images, num_classes, is_training=is_training, **kwargs)
if hasattr(func, 'default_image_size'):
network_fn.default_image_size = func.default_image_size
return network_fn
| [
"marius@mi.www.ro"
] | marius@mi.www.ro |
022ecb8993edbe27c3273a1472d037923525e9ec | ff8aa03818c31db7dea740d65b79f5517385ec79 | /lib/flows/general/checks_test.py | b19f9ae9dbc87be3b99214a1b50e989da81dbd48 | [
"DOC",
"Apache-2.0"
] | permissive | pchaigno/grr | cdaf4db3289cf80359441fef5be39bbf0729d3ac | 69c81624c281216a45c4bb88a9d4e4b0613a3556 | refs/heads/master | 2021-01-21T08:24:45.699745 | 2015-08-03T17:01:30 | 2015-08-03T17:01:30 | 25,120,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,783 | py | #!/usr/bin/env python
"""Test the collector flows."""
import os
from grr.client import vfs
from grr.lib import action_mocks
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import flow
from grr.lib import test_lib
from grr.lib.checks import checks
from grr.lib.checks import checks_test_lib
# pylint: disable=unused-import
from grr.lib.flows.general import checks as _
# pylint: enable=unused-import
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import paths as rdf_paths
# pylint: mode=test
class TestCheckFlows(test_lib.FlowTestsBaseclass,
checks_test_lib.HostCheckTest):
checks_loaded = False
def setUp(self, **kwargs):
super(TestCheckFlows, self).setUp(**kwargs)
# Only load the checks once.
if self.checks_loaded is False:
self.checks_loaded = self.LoadChecks()
if not self.checks_loaded:
raise RuntimeError("No checks to test.")
test_lib.ClientFixture(self.client_id, token=self.token)
vfs.VFS_HANDLERS[
rdf_paths.PathSpec.PathType.OS] = test_lib.FakeTestDataVFSHandler
self.client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile",
"Find", "HashBuffer",
"ListDirectory", "HashFile",
"FingerprintFile")
def SetLinuxKB(self):
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
kb = client.Schema.KNOWLEDGE_BASE()
kb.os = "Linux"
user = rdf_client.KnowledgeBaseUser(username="user1", homedir="/home/user1")
kb.users = [user]
client.Set(client.Schema.KNOWLEDGE_BASE, kb)
client.Set(client.Schema.SYSTEM("Linux"))
client.Set(client.Schema.OS_VERSION("12.04"))
client.Flush()
def SetWindowsKB(self):
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
kb = client.Schema.KNOWLEDGE_BASE()
kb.os = "Windows"
client.Set(client.Schema.KNOWLEDGE_BASE, kb)
client.Set(client.Schema.SYSTEM("Windows"))
client.Set(client.Schema.OS_VERSION("6.2"))
client.Flush()
def RunFlow(self):
session_id = None
with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply:
for session_id in test_lib.TestFlowHelper(
"CheckRunner", client_mock=self.client_mock, client_id=self.client_id,
token=self.token):
pass
session = aff4.FACTORY.Open(session_id, token=self.token)
results = {r.check_id: r for _, r in send_reply.args if isinstance(
r, checks.CheckResult)}
return session, results
def LoadChecks(self):
"""Load the checks, returning the names of the checks that were loaded."""
config_lib.CONFIG.Set("Checks.max_results", 5)
checks.CheckRegistry.Clear()
check_configs = ("sshd.yaml", "sw.yaml", "unix_login.yaml")
cfg_dir = os.path.join(config_lib.CONFIG["Test.data_dir"], "checks")
chk_files = [os.path.join(cfg_dir, f) for f in check_configs]
checks.LoadChecksFromFiles(chk_files)
return checks.CheckRegistry.checks.keys()
def testSelectArtifactsForChecks(self):
self.SetLinuxKB()
session, _ = self.RunFlow()
self.assertTrue("DebianPackagesStatus" in session.state.artifacts_wanted)
self.assertTrue("SshdConfigFile" in session.state.artifacts_wanted)
self.SetWindowsKB()
session, _ = self.RunFlow()
self.assertTrue("WMIInstalledSoftware" in session.state.artifacts_wanted)
def testCheckFlowSelectsChecks(self):
"""Confirm the flow runs checks for a target machine."""
self.SetLinuxKB()
_, results = self.RunFlow()
expected = ["SHADOW-HASH", "SSHD-CHECK", "SSHD-PERMS", "SW-CHECK"]
self.assertRanChecks(expected, results)
def testChecksProcessResultContext(self):
"""Test the flow returns parser results."""
self.SetLinuxKB()
_, results = self.RunFlow()
# Detected by result_context: PARSER
exp = "Found: Sshd allows protocol 1."
self.assertCheckDetectedAnom("SSHD-CHECK", results, exp)
# Detected by result_context: RAW
exp = "Found: The filesystem supports stat."
found = ["/etc/ssh/sshd_config"]
self.assertCheckDetectedAnom("SSHD-PERMS", results, exp, found)
# Detected by result_context: ANOMALY
exp = "Found: Unix system account anomalies."
found = ["Accounts with invalid gid.",
"Mismatched passwd and shadow files."]
self.assertCheckDetectedAnom("ODD-PASSWD", results, exp, found)
# No findings.
self.assertCheckUndetected("SHADOW-HASH", results)
self.assertCheckUndetected("SW-CHECK", results)
def main(argv):
# Run the full test suite
test_lib.GrrTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| [
"github@mailgreg.com"
] | github@mailgreg.com |
e3bd418f73a95ee66b1b3560cdfc93ce323bfebc | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/surface/deploy/delivery_pipelines/describe.py | 627fd4b62b1af7f91ba485c9069aa7c4c39558ac | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 3,850 | py | # -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Describes a Gcloud Deploy delivery pipeline resource."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.clouddeploy import delivery_pipeline
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.deploy import describe
from googlecloudsdk.command_lib.deploy import resource_args
from googlecloudsdk.command_lib.deploy import target_util
from googlecloudsdk.core import log
_DETAILED_HELP = {
'DESCRIPTION':
'{description}',
'EXAMPLES':
""" \
To describe a delivery pipeline called 'test-pipeline' in region 'us-central1', run:
$ {command} test-pipeline --region=us-central1
""",
}
def _CommonArgs(parser):
"""Register flags for this command.
Args:
parser: An argparse.ArgumentParser-like object. It is mocked out in order to
capture some information, but behaves like an ArgumentParser.
"""
resource_args.AddDeliveryPipelineResourceArg(parser, positional=True)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class Describe(base.DescribeCommand):
"""Describes details specific to the individual target, delivery pipeline qualified.
The output contains the following sections:
Delivery Pipeline:
- detail of the delivery pipeline to be described.
Targets:
- target name.
- active release in the target.
- timestamp of the last successful deployment.
- list of the rollouts that require approval.
"""
detailed_help = _DETAILED_HELP
@staticmethod
def Args(parser):
_CommonArgs(parser)
def Run(self, args):
"""This is what gets called when the user runs this command."""
pipeline_ref = args.CONCEPTS.delivery_pipeline.Parse()
# Check if the pipeline exists.
try:
pipeline = delivery_pipeline.DeliveryPipelinesClient().Get(
pipeline_ref.RelativeName())
except apitools_exceptions.HttpError as error:
raise exceptions.HttpException(error)
output = {'Delivery Pipeline': pipeline}
region = pipeline_ref.AsDict()['locationsId']
targets = []
# output the deployment status of the targets in the pipeline.
for stage in pipeline.serialPipeline.stages:
target_ref = target_util.TargetReference(
stage.targetId,
pipeline_ref.AsDict()['projectsId'], region)
try:
target_obj = target_util.GetTarget(target_ref)
except apitools_exceptions.HttpError as error:
log.debug('Failed to get target {}: {}'.format(stage.targetId, error))
log.status.Print('Unable to get target {}'.format(stage.targetId))
continue
detail = {'Target': target_ref.RelativeName()}
current_rollout = target_util.GetCurrentRollout(target_ref, pipeline_ref)
detail = describe.SetCurrentReleaseAndRollout(current_rollout, detail)
if target_obj.requireApproval:
detail = describe.ListPendingApprovals(target_ref, pipeline_ref, detail)
targets.append(detail)
output['Targets'] = targets
return output
| [
"cloudsdk.mirror@gmail.com"
] | cloudsdk.mirror@gmail.com |
99ede0972e7ffe258fd362c118aec60acac4a6b8 | 1034ae0e71c91b5258364e216ed724cfeed563f8 | /benchbuild/projects/benchbuild/python.py | 1a76ea30ae3b86d66b5514c305b6b900485ec4b2 | [
"MIT"
] | permissive | PolyJIT/benchbuild.projects | 4367b27f8380fc036ac8bc1ec5a767f29fdf18d6 | 878e2906aff2ae13abdd7f8515b8643bc7cf1f15 | refs/heads/master | 2020-12-20T14:07:01.130075 | 2020-01-24T23:42:54 | 2020-01-24T23:42:54 | 236,102,422 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,511 | py | from plumbum import local
from benchbuild.project import Project
from benchbuild.environments import container
from benchbuild.source import HTTP
from benchbuild.utils.cmd import make, tar
class Python(Project):
""" python benchmarks """
NAME: str = 'python'
DOMAIN: str = 'compilation'
GROUP: str = 'benchbuild'
SOURCE = [
HTTP(remote={
'3.4.3':
'https://www.python.org/ftp/python/3.4.3/Python-3.4.3.tar.xz'
},
local='python.tar.xz')
]
CONTAINER = container.Buildah().from_('debian:buster-slim')
def compile(self):
python_source = local.path(self.source_of('python.tar.xz'))
python_version = self.version_of('python.tar.xz')
tar("xfJ", python_source)
unpack_dir = local.path(f'Python-{python_version}')
clang = compiler.cc(self)
clang_cxx = compiler.cxx(self)
with local.cwd(unpack_dir):
configure = local["./configure"]
configure = run.watch(configure)
with local.env(CC=str(clang), CXX=str(clang_cxx)):
configure("--disable-shared", "--without-gcc")
make_ = run.watch(make)
make_()
def run_tests(self):
python_version = self.version_of('python.tar.xz')
unpack_dir = local.path(f'Python-{python_version}')
wrapping.wrap(unpack_dir / "python", self)
with local.cwd(unpack_dir):
make_ = run.watch(make)
make_("-i", "test")
| [
"simbuerg@fim.uni-passau.de"
] | simbuerg@fim.uni-passau.de |
17caa79403632a3b3f95dbf1c801017b4f05e9e9 | 8e2e28a191fa5ec5a6c070ec7e9ccad98c8b4a0b | /test/26-踢足球游戏.py | abb3a54a35b9b9bb2ac3c5ace47cca32545f1481 | [
"Apache-2.0"
] | permissive | kellanfan/python | 4cd61cbc062e2eee3a900fa7447ca5f0b8f1a999 | 912dc05a3bd0ded9544166a68da23ca0a97b84da | refs/heads/master | 2023-04-06T03:04:38.851928 | 2023-04-01T02:45:56 | 2023-04-01T02:45:56 | 65,542,280 | 3 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,534 | py | # pylint: disable=no-member
# -*- encoding: utf-8 -*-
'''
@File : 26-踢足球游戏.py
@Time : 2019/05/13 12:02:17
@Author : Kellan Fan
@Version : 1.0
@Contact : kellanfan1989@gmail.com
@Desc : None
'''
# here put the import lib
from random import choice
def kick():
direction = ['right','centor','left']
print('=== Now, You kick!===')
you=input("please choice which onside you shot, eg:<right,centor,left>: ")
print('you kicked ' + you)
he=choice(direction)
if you in direction:
if you != he:
print('gold!!!')
score[0] += 1
else:
print('oh!no!!!')
else:
print("please input 'right','centor','left'")
print("now the score is %d:%d"%(score[0],score[1]))
print('=== Now, You save!===')
you=input("please choice which onside you save, eg:<right,centor,left>: ")
print('you saved ' + you)
he=choice(direction)
if you in direction:
if you != he:
print('oh!no!!!')
score[1] +=1
else:
print('yes!!!')
else:
print("please input 'right','centor','left'")
print("now the score is %d:%d"%(score[0],score[1]))
if __name__ == '__main__':
score = [0, 0]
for i in range(5):
print('====Round %d===='%(i+1))
kick()
while (score[0] == score[1]):
i=i+1
print('====add time Round %d'%(i+1))
kick()
if score[0] > score[1]:
print('you win!!!')
else:
print('you lose!!')
| [
"icyfk1989@163.com"
] | icyfk1989@163.com |
7f850a7311c4ab54b2e4f5163041ce9a69043473 | 10717fe6f68c4ee9bcf27ee62e89581f4a030b8e | /extractor/yinyuetai.py | f2d6b0987afa3d3aec66c8646cac574045296fd1 | [] | no_license | HagerHosny199/Testing_Project | ff7f9a54b7a213c9d9ade0c5192845c2a29adc8b | 9bc170263e239cc24ccfb2aa33b9913ff799ffe9 | refs/heads/master | 2020-05-17T20:57:01.750640 | 2019-05-08T22:13:06 | 2019-05-08T22:13:06 | 183,954,736 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,906 | py | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from utils import ExtractorError
class YinYueTaiIE(InfoExtractor):
IE_NAME = 'yinyuetai:video'
IE_DESC = '音悦Tai'
_VALID_URL = r'https?://v\.yinyuetai\.com/video(?:/h5)?/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://v.yinyuetai.com/video/2322376',
'md5': '6e3abe28d38e3a54b591f9f040595ce0',
'info_dict': {
'id': '2322376',
'ext': 'mp4',
'title': '少女时代_PARTY_Music Video Teaser',
'creator': '少女时代',
'duration': 25,
'thumbnail': r're:^https?://.*\.jpg$',
},
}, {
'url': 'http://v.yinyuetai.com/video/h5/2322376',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
info = self._download_json(
'http://ext.yinyuetai.com/main/get-h-mv-info?json=true&videoId=%s' % video_id, video_id,
'Downloading mv info')['videoInfo']['coreVideoInfo']
if info['error']:
raise ExtractorError(info['errorMsg'], expected=True)
formats = [{
'url': format_info['videoUrl'],
'format_id': format_info['qualityLevel'],
'format': format_info.get('qualityLevelName'),
'filesize': format_info.get('fileSize'),
# though URLs ends with .flv, the downloaded files are in fact mp4
'ext': 'mp4',
'tbr': format_info.get('bitrate'),
} for format_info in info['videoUrlModels']]
self._sort_formats(formats)
return {
'id': video_id,
'title': info['videoName'],
'thumbnail': info.get('bigHeadImage'),
'creator': info.get('artistNames'),
'duration': info.get('duration'),
'formats': formats,
}
| [
"hagarhosny19@gmail.com"
] | hagarhosny19@gmail.com |
dd14425012d9898f41e0b4d01a2bf77781ef2e0f | dc7cdeecb1ed52a7bdd18cd20c69aa43897f0830 | /tests/test_client.py | 30074fd13d9005482b77f433b0d3db42355a31fb | [
"MIT"
] | permissive | hurricane1260/wechatpy | 421b0a27b78bbb3bcc33bc6e6685b6beacd55dde | 0d7916e1a894f208dcea18b33803751166378c3d | refs/heads/master | 2021-01-17T18:37:14.535895 | 2014-11-02T16:27:31 | 2014-11-02T16:27:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,581 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import unittest
import six
from httmock import urlmatch, HTTMock, response
from wechatpy import WeChatClient
from wechatpy._compat import json
_TESTS_PATH = os.path.abspath(os.path.dirname(__file__))
_FIXTURE_PATH = os.path.join(_TESTS_PATH, 'fixtures')
@urlmatch(netloc=r'(.*\.)?api\.weixin\.qq\.com$')
def wechat_api_mock(url, request):
path = url.path.replace('/cgi-bin/', '').replace('/', '_')
res_file = os.path.join(_FIXTURE_PATH, '%s.json' % path)
content = {
'errcode': 99999,
'errmsg': 'can not find fixture'
}
headers = {
'Content-Type': 'application/json'
}
try:
with open(res_file) as f:
content = json.loads(f.read())
except (IOError, ValueError):
pass
return response(200, content, headers, request=request)
class WeChatClientTestCase(unittest.TestCase):
app_id = '123456'
secret = '123456'
def setUp(self):
self.client = WeChatClient(self.app_id, self.secret)
def test_fetch_access_token(self):
with HTTMock(wechat_api_mock):
token = self.client.fetch_access_token()
self.assertEqual('1234567890', token['access_token'])
self.assertEqual(7200, token['expires_in'])
self.assertEqual('1234567890', self.client.access_token)
def test_upload_media(self):
media_file = six.StringIO('nothing')
with HTTMock(wechat_api_mock):
media = self.client.upload_media('image', media_file)
self.assertEqual('image', media['type'])
self.assertEqual('12345678', media['media_id'])
def test_create_group(self):
with HTTMock(wechat_api_mock):
group = self.client.create_group('test')
self.assertEqual(1, group['group']['id'])
self.assertEqual('test', group['group']['name'])
def test_send_text_message(self):
with HTTMock(wechat_api_mock):
result = self.client.send_text_message(1, 'test')
self.assertEqual(0, result['errcode'])
def test_send_image_message(self):
with HTTMock(wechat_api_mock):
result = self.client.send_image_message(1, '123456')
self.assertEqual(0, result['errcode'])
def test_send_voice_message(self):
with HTTMock(wechat_api_mock):
result = self.client.send_voice_message(1, '123456')
self.assertEqual(0, result['errcode'])
def test_send_video_message(self):
with HTTMock(wechat_api_mock):
result = self.client.send_video_message(
1, '123456', 'test', 'test'
)
self.assertEqual(0, result['errcode'])
def test_send_music_message(self):
with HTTMock(wechat_api_mock):
result = self.client.send_music_message(
1, 'http://www.qq.com', 'http://www.qq.com',
'123456', 'test', 'test'
)
self.assertEqual(0, result['errcode'])
def test_send_articles_message(self):
with HTTMock(wechat_api_mock):
articles = [{
'title': 'test',
'description': 'test',
'url': 'http://www.qq.com',
'image': 'http://www.qq.com'
}]
result = self.client.send_articles_message(1, articles)
self.assertEqual(0, result['errcode'])
def test_create_menu(self):
with HTTMock(wechat_api_mock):
result = self.client.create_menu({
'button': [
{
'type': 'click',
'name': 'test',
'key': 'test'
}
]
})
self.assertEqual(0, result['errcode'])
def test_get_menu(self):
with HTTMock(wechat_api_mock):
menu = self.client.get_menu()
self.assertTrue('menu' in menu)
def test_delete_menu(self):
with HTTMock(wechat_api_mock):
result = self.client.delete_menu()
self.assertEqual(0, result['errcode'])
def test_update_menu(self):
with HTTMock(wechat_api_mock):
result = self.client.update_menu({
'button': [
{
'type': 'click',
'name': 'test',
'key': 'test'
}
]
})
self.assertEqual(0, result['errcode'])
| [
"messense@icloud.com"
] | messense@icloud.com |
8ed1681fc62924eb80b4530f47c9b3d96fe5780c | d4f1bd5e52fe8d85d3d0263ede936928d5811bff | /Python/Problem Solving/BOJ/boj4447.py | 78b645da41f81e8c32a268b9227f79879a0f3d92 | [] | no_license | ambosing/PlayGround | 37f7d071c4402599995a50cac1e7f1a85c6d10dd | 0d5262dbb2fa2128ecb3fd969244fa647b104928 | refs/heads/master | 2023-04-08T04:53:31.747838 | 2023-03-23T06:32:47 | 2023-03-23T06:32:47 | 143,112,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 312 | py | for _ in range(int(input())):
g_cnt = 0
b_cnt = 0
s = input()
g_cnt = s.count("g") + s.count("G")
b_cnt = s.count("b") + s.count("B")
if g_cnt > b_cnt:
print("%s is GOOD" % s)
elif g_cnt < b_cnt:
print("%s is A BADDY" % s)
else:
print("%s is NEUTRAL" % s)
| [
"ambosing_@naver.com"
] | ambosing_@naver.com |
437621c6e78ad2b8f5d18b9dee728605c4e2ab83 | 5b4fe473179b5fadaf59ec96d55b2ec4cb326f65 | /test/runtime/frontend_test/chainer_test/deconvolution_2d_test.py | aefdcd02fcb65a69c034af9228ac2b131f2cd466 | [
"Zlib",
"MIT"
] | permissive | TarrySingh/webdnn | 13d3f1ec4936916abacfb67e270f48571e2fcff2 | b31b19de0798d8ca198b78d19cb06e4fce1bc260 | refs/heads/master | 2021-05-07T02:24:47.500746 | 2017-11-13T13:00:24 | 2017-11-13T13:00:24 | 110,582,816 | 0 | 1 | null | 2017-11-13T18:03:46 | 2017-11-13T18:03:46 | null | UTF-8 | Python | false | false | 1,218 | py | import chainer
import numpy as np
from test.util import generate_kernel_test_case, wrap_template
from webdnn.frontend.chainer.converter import ChainerConverter
@wrap_template
def template(ksize=3, stride=1, pad=0, nobias=True, description=""):
link = chainer.links.Deconvolution2D(4, 10, ksize=ksize, stride=stride, pad=pad, nobias=nobias)
link.W.data = np.random.rand(*link.W.data.shape).astype(np.float32)
vx = chainer.Variable(np.random.rand(*(2, 4, 6, 11)).astype(np.float32))
vy = link(vx)
graph = ChainerConverter().convert([vx], [vy])
x = graph.inputs[0]
y = graph.outputs[0]
generate_kernel_test_case(
description=f"[chainer] L.Deconvolution2D {description}",
graph=graph,
backend=["webgpu", "webgl", "webassembly"],
inputs={x: vx.data},
expected={y: vy.data},
EPS=1e-2
)
def test():
template()
def test_nobias():
template(nobias=True)
def test_irregular_kernel_size():
template(ksize=(3, 4))
def test_irregular_stride_size():
template(stride=(2, 3))
def test_irregular_padding_size():
template(pad=(1, 2))
def test_irregular_size():
template(ksize=(3, 5), stride=(2, 3), pad=(1, 3))
| [
"y.kikura@gmail.com"
] | y.kikura@gmail.com |
b0929f0060f43e6b4f8c23bb6e328c6a8eba810c | 00d7e9321d418a2d9a607fb9376b862119f2bd4e | /utils/html_listdir.py | 95e8bd4b432f439975c4e5bad7b9591c6079d854 | [
"MIT"
] | permissive | baluneboy/pims | 92b9b1f64ed658867186e44b92526867696e1923 | 5a07e02588b1b7c8ebf7458b10e81b8ecf84ad13 | refs/heads/master | 2021-11-16T01:55:39.223910 | 2021-08-13T15:19:48 | 2021-08-13T15:19:48 | 33,029,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 898 | py | #!/usr/bin/env python
import os
# customize as needed
def is_hz_file(filename):
return 'hz_' in filename
def createHTML(input_dir, output_html, predicate=None):
files = os.listdir(input_dir)
files.sort(reverse=True)
with open(output_html, "w") as f:
f.write("<html><body><ul>\n")
for filename in files:
fullpath = os.path.join(input_dir, filename).replace('/misc/yoda/www/plots', 'http://pims.grc.nasa.gov/plots')
if predicate:
if predicate(filename):
f.write('<li><a href="%s">%s</a></li>\n' % (fullpath, filename))
else:
f.write('<li><a href="%s">%s</a></li>\n' % (fullpath, filename))
f.write("</ul></body></html>\n")
if __name__ == "__main__":
createHTML('/misc/yoda/www/plots/screenshots', '/misc/yoda/www/plots/user/pims/screenshots.html', predicate=None)
| [
"ken@macmini3.local"
] | ken@macmini3.local |
536c72531fb568599059bffe6a9f02954ffcda14 | efdadd6e203b362531f342129040c592d4078936 | /bin/wheel | 9ae90fd44c2312bf46e13f0888ec161f5fac19d9 | [] | no_license | rameshkonatala/ML_coursera | 7ce57bc1d16f8de1258bbea49ff726145f7dea1b | 6e7413e70da82932834dc88cd52d878f2b4d53da | refs/heads/master | 2021-01-23T07:49:47.588761 | 2017-06-08T09:16:16 | 2017-06-08T09:16:16 | 86,448,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | #!/home/ramesh/Desktop/ML_coursera/bin/python2
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"konatalaramesh@gmail.com"
] | konatalaramesh@gmail.com | |
c04ae99a874b0f3d9592e05d4a459edae6af9a79 | e4183adfc4ffc9ba7c464160d4fa1ccecdeb6b69 | /scripts/gen_hcd_random.py | ef03523b1be0f9db297e4a755138cded9d5a1eef | [] | no_license | akrherz/LADOT | 0013597e6ef26077b6db08d48cb9f5b4fb6b9d6c | a3a689429dea1713760378f0c353a3a8834abe0a | refs/heads/master | 2016-09-05T15:53:58.668906 | 2015-11-02T21:00:38 | 2015-11-02T21:00:38 | 16,581,711 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,136 | py | """
Generate .hcd file for Pavement Design Guide
YYYYMMDDHH,Temperature (F),Wind speed (mph),% Sun shine, Precipitation,
Relative humidity.
"""
import math
import numpy as np
import netCDF4
import mx.DateTime
import subprocess
periods = [
[mx.DateTime.DateTime(1970,1,1), mx.DateTime.DateTime(1974,12,31)],
[mx.DateTime.DateTime(1975,1,1), mx.DateTime.DateTime(1978,12,31)],
[mx.DateTime.DateTime(1979,1,1), mx.DateTime.DateTime(1982,12,31)],
[mx.DateTime.DateTime(1983,1,1), mx.DateTime.DateTime(1987,12,31)],
[mx.DateTime.DateTime(1988,1,1), mx.DateTime.DateTime(1991,12,31)],
[mx.DateTime.DateTime(1992,1,1), mx.DateTime.DateTime(1996,12,31)],
[mx.DateTime.DateTime(1997,1,1), mx.DateTime.DateTime(2002,12,31)],
[mx.DateTime.DateTime(2003,1,1), mx.DateTime.DateTime(2009,12,31)],
]
# Result of my random sorting [914, 12, 335, 654, 162, 644, 294, 903]
periods2 = [
[mx.DateTime.DateTime(1975,1,1), mx.DateTime.DateTime(1978,12,31,23)],
[mx.DateTime.DateTime(1988,1,1), mx.DateTime.DateTime(1991,12,31,23)],
[mx.DateTime.DateTime(1997,1,1), mx.DateTime.DateTime(2002,12,31,23)],
[mx.DateTime.DateTime(1979,1,1), mx.DateTime.DateTime(1982,12,31,23)],
[mx.DateTime.DateTime(1992,1,1), mx.DateTime.DateTime(1996,12,31,23)],
[mx.DateTime.DateTime(1983,1,1), mx.DateTime.DateTime(1987,12,31,23)],
[mx.DateTime.DateTime(2003,1,1), mx.DateTime.DateTime(2009,12,31,23)],
[mx.DateTime.DateTime(1970,1,1), mx.DateTime.DateTime(1974,12,31,23)],
]
onehour = mx.DateTime.RelativeDateTime(hours=1)
anc = netCDF4.Dataset("../data/asosgrid.nc", 'r')
atmpk = anc.variables['tmpk']
asmps = anc.variables['smps']
askyc = anc.variables['skyc']
arelh = anc.variables['relh']
ap01m = anc.variables['p01m']
cnc = netCDF4.Dataset("../data/coopgrid.nc", 'r')
ahigh = cnc.variables['high']
alow = cnc.variables['low']
ap01d = cnc.variables['p01d']
def hourly_fitter_temp(asos, base, trange):
"""
Use the hourly fit of asos data to do something with the COOP data
"""
weights = (asos - np.min(asos)) / (np.max(asos) - np.min(asos))
#if (base + trange) > 100:
# print
# print trange, base
# print weights
# print base + ( trange * weights )
return base + ( trange * weights )
def hourly_fitter_precip(asos, coop):
"""
Use the hourly fit of asos data to do something with the COOP data
"""
if coop == 0:
return [0.]*len(asos)
if np.sum(asos) == 0 and coop > 0:
asos = [0]*len(asos)
asos[15:19] = [1.,2.,1.,1.] # Fake Storm signature
weights = asos / np.sum( asos )
return coop * weights
def boundschk(val, pval, lower, upper):
v = np.where( val >= lower, val, pval[:len(val)])
v = np.where( v <= upper, v, pval[:len(val)])
return v, v
def k2f(thisk):
return (9.00/5.00 * (thisk - 273.15) ) + 32.00
def computeIJ(lon, lat):
lats = anc.variables['lat'][:]
lons = anc.variables['lon'][:]
mindist = 100000
for j in range(len(lats)):
for i in range(len(lons)):
dist = math.sqrt( (lons[i] - lon)**2 + (lats[j] - lat)**2 )
if dist < mindist:
mindist = dist
mini = i
minj = j
return mini, minj
def runner():
for line in open('../data/station.dat'):
tokens = line.split(",")
stid = tokens[0]
#if stid != '00070':
# continue
lat = float(tokens[3])
lon = float(tokens[4])
gridx, gridy = computeIJ( lon, lat )
print stid, tokens[1], gridx, gridy
s_atmpk = atmpk[:,gridy,gridx]
s_asmps = asmps[:,gridy,gridx]
s_askyc = askyc[:,gridy,gridx]
s_ap01m = ap01m[:,gridy,gridx]
s_arelh = arelh[:,gridy,gridx]
s_high = ahigh[:,gridy,gridx]
s_low = alow[:,gridy,gridx]
s_p01d = ap01d[:,gridy,gridx]
out = open("%s.hcd" % (stid,), 'w')
fmt = ",%.1f,%.1f,%.1f,%.2f,%.1f\n"
sts = mx.DateTime.DateTime(1970,1,1,7)
ets = mx.DateTime.DateTime(2010,1,1,7)
BASE = mx.DateTime.DateTime(1970,1,1,0)
END = mx.DateTime.DateTime(2010,1,1,0)
MAXSZ = int((END - BASE).hours )
MAXDSZ = int((END - BASE).days )
interval = mx.DateTime.RelativeDateTime(days=1)
now = sts
p_tmpf = [0]*24
p_mph = [0]*24
p_psun = [0]*24
p_phour = [0]*24
p_relh = [0]*24
ds = {}
# We need to bootstrap the first 7 hours
tmpf = k2f( s_atmpk[:7] ) # Stored in K
mph = s_asmps[:7] * 2.0
psun = 100. - s_askyc[:7]
phour = s_ap01m[:7] / 25.4 # Convert to inches
relh = s_arelh[:7]
for i in range(len(tmpf)):
ts = now - mx.DateTime.RelativeDateTime(hours=(7-i))
ds[ts] = fmt % ( tmpf[i], mph[i],
psun[i], phour[i], relh[i])
while now < ets:
aoffset1 = int((now - BASE).hours )
aoffset2 = int(((now + mx.DateTime.RelativeDateTime(days=1)) - BASE).hours )
if aoffset1 < 0:
aoffset1 = 0
if aoffset2 >= MAXSZ:
aoffset2 = MAXSZ
coffset = int((now - BASE).days ) + 1
if coffset >= MAXDSZ:
coffset = MAXDSZ - 1
tmpf = k2f( s_atmpk[aoffset1:aoffset2] ) # Stored in K
mph = s_asmps[aoffset1:aoffset2] * 2.0
psun = 100. - s_askyc[aoffset1:aoffset2]
phour = s_ap01m[aoffset1:aoffset2] / 25.4 # Convert to inches
relh = s_arelh[aoffset1:aoffset2]
high = k2f( s_high[coffset] )
low = k2f( s_low[coffset] )
p01d = s_p01d[coffset] / 25.4 # Convert to inches
# we smear the temperature data
tmpf = hourly_fitter_temp(tmpf, low, high - low)
tmpf, p_tmpf = boundschk(tmpf, p_tmpf, -20., 120.)
# we smear the precipitation data
phour = hourly_fitter_precip(phour, p01d)
phour, p_phour = boundschk(phour, p_phour, 0.0, 10.) # 0,10 inches
#if p01d > 4:
# print phour, p01d
# can't touch these
mph, p_mph = boundschk(mph, p_mph, 0.0, 100.)
psun, p_psun = boundschk(psun, p_psun, 0.0, 100.)
relh, p_relh = boundschk(relh, p_relh, 0.0, 100.) # 0,100 %
for i in range(len(tmpf)):
ts = now + mx.DateTime.RelativeDateTime(hours=i)
ds[ts] = fmt % (tmpf[i], mph[i],
psun[i], phour[i], relh[i])
now += interval
# Okay, time for magic shifter...
realts = mx.DateTime.DateTime(1970,1,1,0)
for (sts,ets) in periods:
now = sts
while now <= ets:
out.write( realts.strftime("%Y%m%d%H") + ds[now] )
now += onehour
realts += onehour
out.close()
subprocess.call("python qc_hcd.py %s.hcd" % (stid,), shell=True)
if __name__ == '__main__':
runner()
| [
"akrherz@iastate.edu"
] | akrherz@iastate.edu |
d62acf7a2f6c04a99d7d888c3b7f1b86e68b4223 | 1342deb03620f60f0e91c9d5b579667c11cb2d6d | /3rd/pil_test2.py | 9bc143f0720b8aae2d5edb76b3fcd7307e3fd1a9 | [] | no_license | ahuer2435/python_study | 678501ff90a9fc403105bff7ba96bcf53c8f53e2 | 8a7cc568efefdc993c5738ffa2100c2e051acdb7 | refs/heads/master | 2021-01-15T18:00:43.851039 | 2018-04-04T05:37:50 | 2018-04-04T05:37:50 | 99,775,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | # -*- coding: utf-8 -*-
import Image,ImageFilter
im = Image.open("./test.jpg")
im2 = im.filter(ImageFilter.BLUR)
im2.save("./test2.jpg","jpeg")
#使用ImageFilter模块使图像变模糊。 | [
"18221079843@139.com"
] | 18221079843@139.com |
4b6ef532a08cccc3f197c7a8b880fd26edb0bc16 | 813b0d666d9ff31644814d35ab9ca26eab5b66e7 | /demo/q_model_demo.py | 4d1c01ebd1b49ce5269215c80ba9280443a1dd3b | [] | no_license | Seraphli/TankAI | 81548c74868ed52b32972b9ae8cd39def1c2b4b8 | 79020201e07d90eb6cbfe542147252b668d65d1e | refs/heads/master | 2020-04-05T11:57:48.251054 | 2018-11-01T09:02:32 | 2018-11-01T09:02:32 | 156,852,312 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,566 | py | from tank.env import Env
from qlearn.q_learn import QLearn as AI
from qlearn.state import State
import numpy as np
import time
ai = AI('tank_test', 5, False)
ai.epsilon = 0.01
ai.nn.load('./model')
env = Env()
for g_id in range(10):
ai.logger.debug(f'=== Game start ===')
end = env.reset()
ai.logger.debug(f'game map {env.game.map}')
state = [[State(12, 3), State(12, 3)],
[State(12, 3), State(12, 3)]]
s = [[0, 0], [0, 0]]
a = [[0, 0], [0, 0]]
r = [[0, 0], [0, 0]]
t = [[0, 0], [0, 0]]
s_ = [[0, 0], [0, 0]]
for p in [0, 1]:
for i in [0, 1]:
if end[p * 2 + i + 1]:
continue
_s = env.get_state(p, i)
# ai.logger.debug(f'side:{p} index:{i} state {_s}')
state[p][i].update(_s)
game_end = False
while not game_end:
for p in [0, 1]:
for i in [0, 1]:
if end[p * 2 + i + 1]:
continue
s[p][i] = state[p][i].get_state()
_state = np.reshape(s[p][i], (1, *s[p][i].shape))
a_mask = env.get_action(p, i)
ai.logger.debug(f'side:{p} index:{i} a_mask {a_mask}')
a[p][i], debug = ai.get_action(_state, a_mask)
ai.logger.debug(f'side:{p} index:{i} a {a[p][i]}')
env.take_action(p, i, a[p][i])
end = env.step()
ai.logger.debug(f'game map {env.game.map}')
for p in [0, 1]:
for i in [0, 1]:
if t[p][i] == 0:
if end[p * 2 + i + 1]:
t[p][i] = 1
_s = env.get_state(p, i)
# ai.logger.debug(f'side:{p} index:{i} state {_s}')
state[p][i].update(_s)
s_[p][i] = state[p][i].get_state()
r[p][i] = env.get_reward(p, i)
if r[p][i] != 0:
ai.logger.info(f'side:{p} index:{i} r {r[p][i]}')
else:
ai.logger.debug(f'side:{p} index:{i} r {r[p][i]}')
ai.logger.debug(f'side:{p} index:{i} t {t[p][i]}')
ai.logger.debug(f'r {r}')
ai.logger.debug(f't {t}')
ai.logger.debug(f'step {env.game.step_count}')
game_end = end[0]
ai.logger.info(f'step {env.game.step_count}')
ai.logger.info(f'base {env.game.map[4, 0]}, {env.game.map[4, 8]}')
ai.logger.debug(f'=== Game end ===')
ai.logger.info(f'Game num {g_id}')
time.sleep(1)
env.save_replay('.')
| [
"seraphlivery@gmail.com"
] | seraphlivery@gmail.com |
7701324dac2ab0297fab37a0659b155df30e6258 | 6df0d7a677129e9b325d4fdb4bbf72d512dd08b2 | /PycharmProjects/my_python_v03/spider/pingpangball.py | 5fb5d4dcbb27a14b8d00e96072d6a49500c5b795 | [] | no_license | yingxingtianxia/python | 01265a37136f2ad73fdd142f72d70f7c962e0241 | 3e1a7617a4b6552bce4a7e15a182f30e1bae221e | refs/heads/master | 2021-06-14T15:48:00.939472 | 2019-12-13T05:57:36 | 2019-12-13T05:57:36 | 152,200,507 | 0 | 0 | null | 2021-06-10T20:54:26 | 2018-10-09T06:40:10 | Python | UTF-8 | Python | false | false | 1,664 | py | #!/usr/bin/env python3
#__*__coding: utf8__*__
import pygame
from pygame.locals import *
from sys import exit
import random
basket_x = 0
basket_y = 600
ball_x = 10
ball_y = 10
screen_width = 1000
screen_height = 800
score = 0
pygame.init()
screen = pygame.display.set_mode((screen_width, screen_height))
pygame.display.set_caption('接球')
basket = pygame.image.load('lanzi.jpg').convert()
basket_w, basket_h = basket.get.size()
ball = pygame.image.load('ball.jpg').convert()
ball_w , ball_h = ball.get.size()
def update_basket():
global basket_x
global basket_y
basket_x, ignore = pygame.mouse.get_pos()
basket_x = basket_x-basket_w/2
screen.blit(basket, (basket_x, basket_y))
def update_ball():
global ball_x
global ball_y
ball_y += 1
if ball_y+ball_h>basket_y:
ball_y = 0
ball_x = random.randint(0, screen_width-ball_w)
ball_x += random.randint(-1, 1)
if ball_x <= 0:
ball_x = 0
if ball_x > screen_width-ball_w:
ball_x = screen_width-ball_w
screen.blit(ball, (ball_x, ball_y))
def display(message):
font = pygame.font.Font(None, 36)
text = font.render(message,1,(10, 10, 10))
screen.blit(text, (0, 0))
def check_for_catch():
global score
if ball_y+ball_h ==basket_y and ball_x>basket_x and ball_x<basket_x+basket_w-ball_w:
score += 1
display('分数:'+str(score))
clock = pygame.time.Clock()
while True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
screen.fill((255,255,255))
update_ball()
update_basket()
check_for_catch()
pygame.display.update()
clock.tick(1000) | [
"root@room8pc205.tedu.cn"
] | root@room8pc205.tedu.cn |
b9b704a2706e4fbeac5baf13eb273c69a7d11a4f | 0093f254452db5f88803ea628374fa3c7cb90a9b | /single_class_lab_start_code/team_class/tests/team_test.py | fe7333549c4587ffec356d8a197e2b1f2004ecec | [] | no_license | klamb95/classes_lab | 571b7917a0e3e3b8d9935250df08b6e6328b27c8 | 7a450335b1c925372232c7b1631f62434cb32230 | refs/heads/main | 2023-03-30T21:46:04.600475 | 2021-04-05T14:47:39 | 2021-04-05T14:47:39 | 354,841,095 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,954 | py | import unittest
from src.team import Team
class TestTeam(unittest.TestCase):
def setUp(self):
players = ["Derice Bannock", "Sanka Coffie", "Junior Bevil", "Yul Brenner"]
self.team = Team("Cool Runnings", players, "Irv Blitzer")
#@unittest.skip("delete this line to run the test")
def test_team_has_name(self):
self.assertEqual("Cool Runnings", self.team.name)
#@unittest.skip("delete this line to run the test")
def test_team_has_players(self):
self.assertEqual(4, len(self.team.players))
#@unittest.skip("delete this line to run the test")
def test_team_has_coach(self):
self.assertEqual("Irv Blitzer", self.team.coach)
#@unittest.skip("delete this line to run the test")
def test_coach_can_be_changed(self):
self.team.coach = "John Candy"
self.assertEqual("John Candy", self.team.coach)
#@unittest.skip("delete this line to run the test")
def test_can_add_new_player_to_team(self):
new_player = "Jeff"
self.team.add_player(new_player)
self.assertEqual(5, len(self.team.players))
#@unittest.skip("delete this line to run the test")
def test_check_player_in_team__found(self):
self.assertEqual(True, self.team.has_player("Junior Bevil"))
#@unittest.skip("delete this line to run the test")
def test_check_player_in_team__not_found(self):
self.assertEqual(False, self.team.has_player("Usain Bolt"))
#@unittest.skip("delete this line to run the test")
def test_team_has_points(self):
self.assertEqual(0, self.team.points)
#@unittest.skip("delete this line to run the test")
def test_play_game__win(self):
self.team.play_game(True)
self.assertEqual(3, self.team.points)
#@unittest.skip("delete this line to run the test")
def test_play_game__lose(self):
self.team.play_game(False)
self.assertEqual(0, self.team.points)
| [
"klamb1995@gmail.com"
] | klamb1995@gmail.com |
082e25c98d24038da1c0d418b1754b577b7e5b3f | d3b77550a40b860970450e702b6bcd28d5f9b3e4 | /LeetCode/code_night/reverse_string.py | ea53e4669e81124fafeac0940165a0e8030ec430 | [] | no_license | CateGitau/Python_programming | 47bc9277544814ad853b44a88f129713f1a40697 | 6ae42b3190134c4588ad785d62e08b0763cf6b3a | refs/heads/master | 2023-07-08T03:08:46.236063 | 2021-08-12T09:38:03 | 2021-08-12T09:38:03 | 228,712,021 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 8 03:58:29 2020
@author: aims
"""
text = ["h","e","l","l","o"]
i = 0
j = len(text)- 1
while i<j:
text[i], text[j] = text[j], text[i]
i+=1
j-=1
print(text) | [
"catherinegitau94@gmail.com"
] | catherinegitau94@gmail.com |
9a0e5d2e12de67097ad5e464ddf79e47e12149f5 | f1238c2f2079cd4fdf63cf47fe8a389f77d256fc | /homeassistant/helpers/event.py | 3934a6c52ef9f55cbcfb2c63a0aa2a02d2371b39 | [
"MIT"
] | permissive | williamluke4/home-assistant | 2d305b7133303829c38946bf5b1a626e46488d61 | 2e899bd61c0ff5ae4d576ff3cb8413fc90534e43 | refs/heads/dev | 2023-04-07T01:26:13.029469 | 2016-01-04T02:39:21 | 2016-01-04T02:39:21 | 48,983,167 | 0 | 0 | MIT | 2023-04-03T23:42:14 | 2016-01-04T08:09:21 | Python | UTF-8 | Python | false | false | 5,557 | py | """
Helpers for listening to events
"""
import functools as ft
from ..util import dt as dt_util
from ..const import (
ATTR_NOW, EVENT_STATE_CHANGED, EVENT_TIME_CHANGED, MATCH_ALL)
def track_state_change(hass, entity_ids, action, from_state=None,
to_state=None):
"""
Track specific state changes.
entity_ids, from_state and to_state can be string or list.
Use list to match multiple.
Returns the listener that listens on the bus for EVENT_STATE_CHANGED.
Pass the return value into hass.bus.remove_listener to remove it.
"""
from_state = _process_match_param(from_state)
to_state = _process_match_param(to_state)
# Ensure it is a lowercase list with entity ids we want to match on
if isinstance(entity_ids, str):
entity_ids = (entity_ids.lower(),)
else:
entity_ids = tuple(entity_id.lower() for entity_id in entity_ids)
@ft.wraps(action)
def state_change_listener(event):
""" The listener that listens for specific state changes. """
if event.data['entity_id'] not in entity_ids:
return
if 'old_state' in event.data:
old_state = event.data['old_state'].state
else:
old_state = None
if _matcher(old_state, from_state) and \
_matcher(event.data['new_state'].state, to_state):
action(event.data['entity_id'],
event.data.get('old_state'),
event.data['new_state'])
hass.bus.listen(EVENT_STATE_CHANGED, state_change_listener)
return state_change_listener
def track_point_in_time(hass, action, point_in_time):
"""
Adds a listener that fires once after a spefic point in time.
"""
utc_point_in_time = dt_util.as_utc(point_in_time)
@ft.wraps(action)
def utc_converter(utc_now):
""" Converts passed in UTC now to local now. """
action(dt_util.as_local(utc_now))
return track_point_in_utc_time(hass, utc_converter, utc_point_in_time)
def track_point_in_utc_time(hass, action, point_in_time):
"""
Adds a listener that fires once after a specific point in UTC time.
"""
# Ensure point_in_time is UTC
point_in_time = dt_util.as_utc(point_in_time)
@ft.wraps(action)
def point_in_time_listener(event):
""" Listens for matching time_changed events. """
now = event.data[ATTR_NOW]
if now >= point_in_time and \
not hasattr(point_in_time_listener, 'run'):
# Set variable so that we will never run twice.
# Because the event bus might have to wait till a thread comes
# available to execute this listener it might occur that the
# listener gets lined up twice to be executed. This will make
# sure the second time it does nothing.
point_in_time_listener.run = True
hass.bus.remove_listener(EVENT_TIME_CHANGED,
point_in_time_listener)
action(now)
hass.bus.listen(EVENT_TIME_CHANGED, point_in_time_listener)
return point_in_time_listener
# pylint: disable=too-many-arguments
def track_utc_time_change(hass, action, year=None, month=None, day=None,
hour=None, minute=None, second=None, local=False):
""" Adds a listener that will fire if time matches a pattern. """
# We do not have to wrap the function with time pattern matching logic
# if no pattern given
if all(val is None for val in (year, month, day, hour, minute, second)):
@ft.wraps(action)
def time_change_listener(event):
""" Fires every time event that comes in. """
action(event.data[ATTR_NOW])
hass.bus.listen(EVENT_TIME_CHANGED, time_change_listener)
return time_change_listener
pmp = _process_match_param
year, month, day = pmp(year), pmp(month), pmp(day)
hour, minute, second = pmp(hour), pmp(minute), pmp(second)
@ft.wraps(action)
def pattern_time_change_listener(event):
""" Listens for matching time_changed events. """
now = event.data[ATTR_NOW]
if local:
now = dt_util.as_local(now)
mat = _matcher
# pylint: disable=too-many-boolean-expressions
if mat(now.year, year) and \
mat(now.month, month) and \
mat(now.day, day) and \
mat(now.hour, hour) and \
mat(now.minute, minute) and \
mat(now.second, second):
action(now)
hass.bus.listen(EVENT_TIME_CHANGED, pattern_time_change_listener)
return pattern_time_change_listener
# pylint: disable=too-many-arguments
def track_time_change(hass, action, year=None, month=None, day=None,
hour=None, minute=None, second=None):
""" Adds a listener that will fire if UTC time matches a pattern. """
track_utc_time_change(hass, action, year, month, day, hour, minute, second,
local=True)
def _process_match_param(parameter):
""" Wraps parameter in a tuple if it is not one and returns it. """
if parameter is None or parameter == MATCH_ALL:
return MATCH_ALL
elif isinstance(parameter, str) or not hasattr(parameter, '__iter__'):
return (parameter,)
else:
return tuple(parameter)
def _matcher(subject, pattern):
""" Returns True if subject matches the pattern.
Pattern is either a tuple of allowed subjects or a `MATCH_ALL`.
"""
return MATCH_ALL == pattern or subject in pattern
| [
"paulus@paulusschoutsen.nl"
] | paulus@paulusschoutsen.nl |
18187ad1700d5c8dae585133239b763f6a402a8d | c4a57dced2f1ed5fd5bac6de620e993a6250ca97 | /huaxin/huaxin_restful_service/restful_xjb_service/v1_services_account_matchsalarycard_entity.py | c33344a6c2cb1dc4e942635adb7d56ee975d07c7 | [] | no_license | wanglili1703/firewill | f1b287b90afddfe4f31ec063ff0bd5802068be4f | 1996f4c01b22b9aec3ae1e243d683af626eb76b8 | refs/heads/master | 2020-05-24T07:51:12.612678 | 2019-05-17T07:38:08 | 2019-05-17T07:38:08 | 187,169,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,748 | py | import json
from code_gen.lib.basic_troop_service_entity_handler import BasicTroopServiceEntityHandler
DOMAIN_NAME = u'10.199.111.2'
URL = u'http://%s/V1/services/account/matchSalaryCard'
BODY_DATA = u'{}'
_BODY_DATA = ''
if BODY_DATA:
_BODY_DATA = json.loads(BODY_DATA)
QUERY_DATA = ''
METHOD_TYPE = u'post'
CONTENT_TYPE = 'json'
REQUEST_DATA = (_BODY_DATA or QUERY_DATA)
HAS_DATA_PATTERN = True
DATA_PATTERN = {"certNo": "621100197601237892", "employeeId": "17", "timestamp": "1497516592672",
"noncestr": "z9ynqcu9hqn3hp64", "signature": "FFA69A9EF4E79AC76902C8552EE3ED1CC1B425A6"}
class V1ServicesAccountMatchsalarycardEntity(BasicTroopServiceEntityHandler):
"""
accessible attribute list for response data:
%s
==================
kwargs for request:
Please refer to the constants BODY_DATA or QUERY_DATA request parameters
"""
def __init__(self, domain_name=DOMAIN_NAME, token=None, **kwargs):
super(V1ServicesAccountMatchsalarycardEntity, self).__init__(domain_name=domain_name, url_string=URL,
data=REQUEST_DATA,
method_type=METHOD_TYPE,
request_content_type=CONTENT_TYPE,
has_data_pattern=HAS_DATA_PATTERN,
token=token, **kwargs)
def _set_data_pattern(self, *args, **kwargs):
self._current_data_pattern = DATA_PATTERN
if (__name__ == '__main__'):
e = V1ServicesAccountMatchsalarycardEntity()
e.send_request()
| [
"wanglili@shhxzq.com"
] | wanglili@shhxzq.com |
7f0e2f52088c6e75b7368d2a10b3685d21df0cfd | af6e7f0927517375cb4af833f4c52e301bad0af5 | /corpus_processor/topic_aware/filter_qa_corpus_by_douban_tags.py | 92a050d0aef2a7f2289a7de7ace4392858d4757f | [] | no_license | wolfhu/DialogPretraining | 470334fd815e1299981b827fdc933d237a489efd | eeeada92146d652d81ca6e961d1298924ac8435d | refs/heads/main | 2023-06-25T15:22:54.728187 | 2021-07-21T01:40:23 | 2021-07-21T01:40:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,889 | py | # encoding: utf-8
import sys
import json
from util.trie import Trie
douban_tag_file_path = '/home/t-yuniu/xiaoice/yuniu/dataset/douban_title/douban_title.json'
tag_black_dict = {}
tag_black_dict.setdefault('游戏', True)
tag_trie = Trie()
def detect_tag(sentence):
"""
Judge if sentence contain as least a tag.
:param sentence: query or answer
:return: boolean, True if contain, False otherwise.
"""
length = len(sentence)
detected_tags = []
for idx in range(length):
node = tag_trie.lookup
idx_tmp = idx
while True:
if idx_tmp >= length:
break
if sentence[idx_tmp] in node:
node = node[sentence[idx_tmp]]
idx_tmp += 1
if Trie.END in node:
detected_tags.append(sentence[idx:idx_tmp])
else:
break
return detected_tags
if __name__ == '__main__':
# build trie from tag file
with open(douban_tag_file_path) as douban_tag_file:
for line in douban_tag_file.readlines():
line = line.strip()
tags = json.loads(line)['Tag']
for tag in tags:
if len(tag) == 1 or tag in tag_black_dict:
continue
tag_trie.insert(tag)
# filter corpus contain tags
while True:
line = sys.stdin.readline().strip()
if line:
try:
line = line.replace('#', '')
query, answer = line.split('\t')[:2]
detected_tags = detect_tag(query)
detected_tags.extend(detect_tag(answer))
if len(detected_tags) > 0:
print('\t'.join([' '.join(set(detected_tags)), query, answer]))
except ValueError:
sys.stdout.write('Illegal line.\n')
else:
break
| [
"yuwu1@microsoft.com"
] | yuwu1@microsoft.com |
4a223198785e6cf114ac528d49f2445079b91eae | 392e81cad1a563eb3a63c38e4d32782b14924cd2 | /openregistry/lots/loki/tests/blanks/transferring.py | 0775b6747de9ab5526e2e8d09bc6479b4f081441 | [
"Apache-2.0"
] | permissive | EBRD-ProzorroSale/openregistry.lots.loki | 8de71ee4e6a0db5f3fb6e527658722f7a664fc1a | 178768ca5d4ffefa428740502bce0ef48d67aa61 | refs/heads/master | 2020-09-29T17:57:00.696627 | 2019-06-25T09:07:22 | 2019-06-25T09:07:22 | 227,088,853 | 0 | 0 | Apache-2.0 | 2019-12-10T10:19:08 | 2019-12-10T10:19:07 | null | UTF-8 | Python | false | false | 1,028 | py | # -*- coding: utf-8 -*-
def switch_mode(self):
# set test mode and try to change ownership
auth = ('Basic', (self.first_owner, ''))
self.__class__.resource_name = self.resource_name
resource = self.create_resource(auth=auth)
resource_access_transfer = self.resource_transfer
self.__class__.resource_name = ''
# decision that was created from asset can't be updated (by patch)
document = self.db.get(resource['id'])
document['mode'] = 'test'
self.db.save(document)
self.app.authorization = ('Basic', (self.test_owner, ''))
transfer = self.create_transfer()
req_data = {"data": {"id": transfer['data']['id'],
'transfer': resource_access_transfer}}
req_url = '{}/{}/ownership'.format(self.resource_name, resource['id'])
response = self.app.post_json(req_url, req_data)
self.assertEqual(response.status, '200 OK')
self.assertIn('owner', response.json['data'])
self.assertEqual(response.json['data']['owner'], self.test_owner)
| [
"leitsius@gmail.com"
] | leitsius@gmail.com |
5478e186191f05f9c3c4401549ee3ff8e1687157 | d308fffe3db53b034132fb1ea6242a509f966630 | /pirates/effects/HitStar.py | be3298d218d9fc5e5af3d31213510d77d2b026f3 | [
"BSD-3-Clause"
] | permissive | rasheelprogrammer/pirates | 83caac204965b77a1b9c630426588faa01a13391 | 6ca1e7d571c670b0d976f65e608235707b5737e3 | refs/heads/master | 2020-03-18T20:03:28.687123 | 2018-05-28T18:05:25 | 2018-05-28T18:05:25 | 135,193,362 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,103 | py | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.effects.HitStar
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.particles import ParticleEffect
from direct.particles import Particles
from direct.particles import ForceGroup
from EffectController import EffectController
from PooledEffect import PooledEffect
import random
class HitStar(PooledEffect, EffectController):
__module__ = __name__
cardScale = 64.0
def __init__(self):
PooledEffect.__init__(self)
EffectController.__init__(self)
model = loader.loadModel('models/effects/particleMaps')
self.card = model.find('**/effectCandle')
self.setDepthWrite(0)
self.setLightOff()
self.setFogOff()
self.setColorScaleOff()
self.setBillboardPointEye(1.0)
self.f = ParticleEffect.ParticleEffect('HitStar')
self.f.reparentTo(self)
self.p0 = Particles.Particles('particles-1')
self.p0.setFactory('ZSpinParticleFactory')
self.p0.setRenderer('SpriteParticleRenderer')
self.p0.setEmitter('SphereSurfaceEmitter')
self.f.addParticles(self.p0)
self.p0.setPoolSize(32)
self.p0.setBirthRate(0.02)
self.p0.setLitterSize(32)
self.p0.setLitterSpread(0)
self.p0.setSystemLifespan(0.0)
self.p0.setLocalVelocityFlag(1)
self.p0.setSystemGrowsOlderFlag(0)
self.p0.factory.setLifespanBase(0.2)
self.p0.factory.setLifespanSpread(0.05)
self.p0.factory.setMassBase(1.0)
self.p0.factory.setMassSpread(0.0)
self.p0.factory.setTerminalVelocityBase(400.0)
self.p0.factory.setTerminalVelocitySpread(0.0)
self.p0.factory.setInitialAngle(0.0)
self.p0.factory.setInitialAngleSpread(360.0)
self.p0.factory.enableAngularVelocity(1)
self.p0.factory.setAngularVelocity(0.0)
self.p0.factory.setAngularVelocitySpread(0.0)
self.p0.renderer.setAlphaMode(BaseParticleRenderer.PRALPHAINOUT)
self.p0.renderer.setUserAlpha(0.5)
self.p0.renderer.setFromNode(self.card)
self.p0.renderer.setColor(Vec4(1.0, 1.0, 1.0, 1.0))
self.p0.renderer.setXScaleFlag(1)
self.p0.renderer.setYScaleFlag(1)
self.p0.renderer.setAnimAngleFlag(1)
self.p0.renderer.setInitialXScale(0.0001 * self.cardScale)
self.p0.renderer.setFinalXScale(0.01 * self.cardScale)
self.p0.renderer.setInitialYScale(0.0005 * self.cardScale)
self.p0.renderer.setFinalYScale(0.06 * self.cardScale)
self.p0.renderer.setNonanimatedTheta(0.0)
self.p0.renderer.setAlphaBlendMethod(BaseParticleRenderer.PPBLENDLINEAR)
self.p0.renderer.setAlphaDisable(0)
self.p0.renderer.setColorBlendMode(ColorBlendAttrib.MAdd, ColorBlendAttrib.OIncomingColor, ColorBlendAttrib.OOneMinusIncomingAlpha)
self.p0.emitter.setEmissionType(BaseParticleEmitter.ETRADIATE)
self.p0.emitter.setAmplitude(0.0)
self.p0.emitter.setAmplitudeSpread(0.0)
self.p0.emitter.setOffsetForce(Vec3(0.0, 0.0, 0.0))
self.p0.emitter.setExplicitLaunchVector(Vec3(0.0, 0.0, 0.0))
self.p0.emitter.setRadiateOrigin(Point3(0.0, 0.0, 0.0))
self.p0.emitter.setRadius(0.0001)
def createTrack(self):
self.startEffect = Sequence(Func(self.p0.setBirthRate, 0.02), Func(self.p0.clearToInitial), Func(self.f.start, self, self), Func(self.f.reparentTo, self))
self.endEffect = Sequence(Func(self.p0.setBirthRate, 2.0), Wait(1.5), Func(self.cleanUpEffect))
self.track = Sequence(self.startEffect, Wait(0.2), self.endEffect)
def play(self):
if self.p0:
self.createTrack()
self.track.start()
def cleanUpEffect(self):
EffectController.cleanUpEffect(self)
self.checkInEffect(self)
def destroy(self):
EffectController.destroy(self)
PooledEffect.destroy(self) | [
"33942724+itsyaboyrocket@users.noreply.github.com"
] | 33942724+itsyaboyrocket@users.noreply.github.com |
2bced5615b9527f6da87e4a188c01cf1caa2d008 | 49536aafb22a77a6caf249c7fadef46d63d24dfe | /tensorflow/tensorflow/python/ops/bitwise_ops_test.py | a54f76c6da8a73255621281de950eac643a22a4e | [
"Apache-2.0"
] | permissive | wangzhi01/deeplearning-1 | 4e5ad93f0d9ecd302b74352f80fe1fa6ae70bf0d | 46ab82253d956953b8aa98e97ceb6cd290e82288 | refs/heads/master | 2020-05-28T03:14:55.687567 | 2018-09-12T16:52:09 | 2018-09-12T16:52:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,208 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for bitwise operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import six
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import bitwise_ops
from tensorflow.python.ops import gen_bitwise_ops
from tensorflow.python.platform import googletest
class BitwiseOpTest(test_util.TensorFlowTestCase):
def __init__(self, method_name="runTest"):
super(BitwiseOpTest, self).__init__(method_name)
def testBinaryOps(self):
dtype_list = [dtypes.int8, dtypes.int16, dtypes.int32, dtypes.int64,
dtypes.uint8, dtypes.uint16]
with self.test_session(use_gpu=True) as sess:
for dtype in dtype_list:
lhs = constant_op.constant([0, 5, 3, 14], dtype=dtype)
rhs = constant_op.constant([5, 0, 7, 11], dtype=dtype)
and_result, or_result, xor_result = sess.run(
[bitwise_ops.bitwise_and(lhs, rhs),
bitwise_ops.bitwise_or(lhs, rhs),
bitwise_ops.bitwise_xor(lhs, rhs)])
self.assertAllEqual(and_result, [0, 0, 3, 10])
self.assertAllEqual(or_result, [5, 5, 7, 15])
self.assertAllEqual(xor_result, [5, 5, 4, 5])
def testPopulationCountOp(self):
dtype_list = [dtypes.int8, dtypes.int16,
dtypes.int32, dtypes.int64,
dtypes.uint8, dtypes.uint16]
raw_inputs = [0, 1, -1, 3, -3, 5, -5, 14, -14,
127, 128, 255, 256, 65535, 65536,
2**31 - 1, 2**31, 2**32 - 1, 2**32, -2**32 + 1, -2**32,
-2**63 + 1, 2**63 - 1]
def count_bits(x):
return sum([bin(z).count("1") for z in six.iterbytes(x.tobytes())])
for dtype in dtype_list:
with self.test_session(use_gpu=True) as sess:
print("PopulationCount test: ", dtype)
inputs = np.array(raw_inputs, dtype=dtype.as_numpy_dtype)
truth = [count_bits(x) for x in inputs]
input_tensor = constant_op.constant(inputs, dtype=dtype)
popcnt_result = sess.run(gen_bitwise_ops.population_count(input_tensor))
self.assertAllEqual(truth, popcnt_result)
def testInvertOp(self):
dtype_list = [dtypes.int8, dtypes.int16, dtypes.int32, dtypes.int64,
dtypes.uint8, dtypes.uint16]
inputs = [0, 5, 3, 14]
with self.test_session(use_gpu=True) as sess:
for dtype in dtype_list:
# Because of issues with negative numbers, let's test this indirectly.
# 1. invert(a) and a = 0
# 2. invert(a) or a = invert(0)
input_tensor = constant_op.constant(inputs, dtype=dtype)
not_a_and_a, not_a_or_a, not_0 = sess.run(
[bitwise_ops.bitwise_and(
input_tensor, bitwise_ops.invert(input_tensor)),
bitwise_ops.bitwise_or(
input_tensor, bitwise_ops.invert(input_tensor)),
bitwise_ops.invert(constant_op.constant(0, dtype=dtype))])
self.assertAllEqual(not_a_and_a, [0, 0, 0, 0])
self.assertAllEqual(not_a_or_a, [not_0] * 4)
# For unsigned dtypes let's also check the result directly.
if dtype.is_unsigned:
inverted = sess.run(bitwise_ops.invert(input_tensor))
expected = [dtype.max - x for x in inputs]
self.assertAllEqual(inverted, expected)
def testShiftsWithPositiveLHS(self):
dtype_list = [np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64]
with self.test_session(use_gpu=True) as sess:
for dtype in dtype_list:
lhs = np.array([0, 5, 3, 14], dtype=dtype)
rhs = np.array([5, 0, 7, 3], dtype=dtype)
left_shift_result, right_shift_result = sess.run(
[bitwise_ops.left_shift(lhs, rhs),
bitwise_ops.right_shift(lhs, rhs)])
self.assertAllEqual(left_shift_result, np.left_shift(lhs, rhs))
self.assertAllEqual(right_shift_result, np.right_shift(lhs, rhs))
def testShiftsWithNegativeLHS(self):
dtype_list = [np.int8, np.int16, np.int32, np.int64]
with self.test_session(use_gpu=True) as sess:
for dtype in dtype_list:
lhs = np.array([-1, -5, -3, -14], dtype=dtype)
rhs = np.array([5, 0, 7, 11], dtype=dtype)
left_shift_result, right_shift_result = sess.run(
[bitwise_ops.left_shift(lhs, rhs),
bitwise_ops.right_shift(lhs, rhs)])
self.assertAllEqual(left_shift_result, np.left_shift(lhs, rhs))
self.assertAllEqual(right_shift_result, np.right_shift(lhs, rhs))
def testImplementationDefinedShiftsDoNotCrash(self):
dtype_list = [np.int8, np.int16, np.int32, np.int64]
with self.test_session(use_gpu=True) as sess:
for dtype in dtype_list:
lhs = np.array([-1, -5, -3, -14], dtype=dtype)
rhs = np.array([-2, 64, 101, 32], dtype=dtype)
# We intentionally do not test for specific values here since the exact
# outputs are implementation-defined. However, we should not crash or
# trigger an undefined-behavior error from tools such as
# AddressSanitizer.
sess.run([bitwise_ops.left_shift(lhs, rhs),
bitwise_ops.right_shift(lhs, rhs)])
if __name__ == "__main__":
googletest.main()
| [
"hanshuobest@163.com"
] | hanshuobest@163.com |
a80ff9f1386aa36409ab1e29afa7136b24ce7cf5 | 8f8ac99fd3ed9ceb36778b404f6fdd0b6899d3f4 | /pyobjc-framework-UserNotifications/PyObjCTest/test_unnotificationcontent.py | 07f1f32f52d7b614a570c4574cae012390965548 | [
"MIT"
] | permissive | strogo/pyobjc | ac4201c7742eb75348328eeecb7eedf4e3458de3 | 2579c5eaf44b0c5af77ee195c417d2c65e72dfda | refs/heads/master | 2023-07-13T00:41:56.448005 | 2021-08-24T06:42:53 | 2021-08-24T06:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 884 | py | from PyObjCTools.TestSupport import TestCase, min_sdk_level, min_os_level
import UserNotifications
import objc
class TestUNNotificationContent(TestCase):
def test_constants(self):
self.assertEqual(UserNotifications.UNNotificationInterruptionLevelPassive, 0)
self.assertEqual(UserNotifications.UNNotificationInterruptionLevelActive, 1)
self.assertEqual(
UserNotifications.UNNotificationInterruptionLevelTimeSensitive, 2
)
self.assertEqual(UserNotifications.UNNotificationInterruptionLevelCritical, 3)
@min_sdk_level("12.0")
def test_protocols12_0(self):
objc.protocolNamed("UNNotificationContentProviding")
@min_os_level("12.0")
def test_methods12_0(self):
self.assertArgIsOut(
UserNotifications.UNNotificationContent.contentByUpdatingWithProvider_error_,
1,
)
| [
"ronaldoussoren@mac.com"
] | ronaldoussoren@mac.com |
bfd02018abdd0ca950150f959ccfe9a90d5e08e0 | cc95bf6a35fa6e17cfc87867a531b1fc01f1e49a | /py/rsync_dash_changes/rsync_dash_changes.py | 2ae7a019127a9344351854c81892bc3f466289a9 | [] | no_license | SMAPPNYU/smapputil | d88d9c65c79afd6a65f7cb991c09015f53ccff0a | 242a541c1e8687e003c37f1807a92112423b40d6 | refs/heads/master | 2023-01-04T03:09:34.725955 | 2019-06-25T15:37:07 | 2019-06-25T15:37:07 | 50,049,153 | 7 | 3 | null | 2022-12-26T20:24:55 | 2016-01-20T18:01:30 | Python | UTF-8 | Python | false | false | 4,010 | py | import os, sys, csv
import logging
import paramiko
import argparse
import subprocess
from os.path import expanduser
def paramiko_list_crontab(collector_machine, username, key):
logger = logging.getLogger(__name__)
# login to paramiko and list the crontab
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(collector_machine, username=username, key_filename=key)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('crontab -l')
# log any paramiko incident
if ssh_stderr.read():
logger.info('error from paramiko exec_command: %s', ssh_stderr.read())
return ssh_stdout
def build_collection_list(crontab_entries):
# create a parser for this argument
cron_parser = argparse.ArgumentParser()
cron_parser.add_argument('-n')
cron_parser.add_argument('-nfsb')
cron_parser.add_argument('-nfsm')
collection_list = []
# loop through each crontab entry
# and get the name of each collection
for cron_entry in crontab_entries:
if ' -n ' in cron_entry:
split_cron_entry = cron_entry.split(' ')
known_args, unknown_args = cron_parser.parse_known_args(split_cron_entry)
collection_list.append(known_args.n[1:-1])
return collection_list
def list_collections(collector_machine, username, key):
# get the crontab
paramiko_cron_output= paramiko_list_crontab(collector_machine, username, key)
# read the crontab from stdout
crontab = paramiko_cron_output.read()
# parse the crontab for the names of the collections
crontab_entries = crontab.decode().split('\n')
# create a parser for this argument
cron_parser = argparse.ArgumentParser()
cron_parser.add_argument('-n')
cron_parser.add_argument('-nfsb')
cron_parser.add_argument('-nfsm')
collection_list = []
# loop through each crontab entry
# and get the name of each collection
for cron_entry in crontab_entries:
if ' -n ' in cron_entry and '-op collect' in cron_entry:
split_cron_entry = cron_entry.split(' ')
known_args, unknown_args = cron_parser.parse_known_args(split_cron_entry)
collection_list.append(known_args.n[1:-1])
return collection_list
def parse_args(args):
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', dest='input', required=True, help='Path to a file listing the servers you want to count.')
parser.add_argument('-l', '--log', dest='log', required=True, help='This is the path to where your output log should be.')
parser.add_argument('-k', '--key', dest='key', help='Specify your key, this is necessary on hpc where this was made to run as the key has a weird name.')
return parser.parse_args(args)
if __name__ == '__main__':
args = parse_args(sys.argv[1:])
logging.basicConfig(filename=args.log, level=logging.INFO)
logger = logging.getLogger(__name__)
with open(expanduser(args.input), 'r') as f:
reader = csv.reader(f)
next(reader)
for row in reader:
k = row[1]
v = list_collections(row[1], row[0], args.key)
incl = ','.join(v)+',"*.json",metadata,filters'
# needs to look like:
# /share/apps/utils/rsync.sh -a /scratch/olympus/ yvan@192.241.158.221:/mnt/olympus-stage/ --include={"*.json",whale_test,metadata,filters} --exclude='*' --update
run_cmd = '/share/apps/utils/rsync.sh -a {source} {uname}@{dest}:{dest_path} --include={{{params}}} --exclude="*" --update'.format(uname=row[0], source=row[3], dest=k, dest_path=row[2], params=incl)
logger.info('running: '+run_cmd)
process = subprocess.Popen([run_cmd], stdin=None, stdout=None, stderr=None, shell=True)
out, err = process.communicate()
logger.info('rsync subprocess output:\n {}'.format(out))
logger.info('rsync subprocess error:\n'.format(err))
| [
"yvanscher@gmail.com"
] | yvanscher@gmail.com |
a174ee4171661f8f51a4134585318720494b7f9c | 870639af1487cf59b548f56c9cd1a45928c1e2c2 | /homeassistant/components/renault/const.py | 2a0ea3a0d491d12cc02ae3fb6c36daf208f1c918 | [
"Apache-2.0"
] | permissive | atmurray/home-assistant | 9f050944d26c084f8f21e8612a7b90c0ae909763 | 133cb2c3b0e782f063c8a30de4ff55a5c14b9b03 | refs/heads/dev | 2023-03-19T04:26:40.743852 | 2021-11-27T05:58:25 | 2021-11-27T05:58:25 | 234,724,430 | 2 | 0 | Apache-2.0 | 2023-02-22T06:18:36 | 2020-01-18T11:27:02 | Python | UTF-8 | Python | false | false | 826 | py | """Constants for the Renault component."""
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN
from homeassistant.components.select import DOMAIN as SELECT_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
DOMAIN = "renault"
CONF_LOCALE = "locale"
CONF_KAMEREON_ACCOUNT_ID = "kamereon_account_id"
DEFAULT_SCAN_INTERVAL = 300 # 5 minutes
PLATFORMS = [
BINARY_SENSOR_DOMAIN,
BUTTON_DOMAIN,
DEVICE_TRACKER_DOMAIN,
SELECT_DOMAIN,
SENSOR_DOMAIN,
]
DEVICE_CLASS_PLUG_STATE = "renault__plug_state"
DEVICE_CLASS_CHARGE_STATE = "renault__charge_state"
DEVICE_CLASS_CHARGE_MODE = "renault__charge_mode"
| [
"noreply@github.com"
] | atmurray.noreply@github.com |
c167a5ed864f0c71faeaa6557cdedbd3318036e3 | bb970bbe151d7ac48d090d86fe1f02c6ed546f25 | /arouse/_dj/db/models/sql/datastructures.py | 8cd5c8082763c6c5b5c61f9a8c74fca055a950b4 | [
"Python-2.0",
"BSD-3-Clause"
] | permissive | thektulu/arouse | 95016b4028c2b8e9b35c5062a175ad04286703b6 | 97cadf9d17c14adf919660ab19771a17adc6bcea | refs/heads/master | 2021-01-13T12:51:15.888494 | 2017-01-09T21:43:32 | 2017-01-09T21:43:32 | 78,466,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,623 | py | """
Useful auxiliary data structures for query construction. Not useful outside
the SQL domain.
"""
from arouse._dj.db.models.sql.constants import INNER, LOUTER
class EmptyResultSet(Exception):
pass
class MultiJoin(Exception):
"""
Used by join construction code to indicate the point at which a
multi-valued join was attempted (if the caller wants to treat that
exceptionally).
"""
def __init__(self, names_pos, path_with_names):
self.level = names_pos
# The path travelled, this includes the path to the multijoin.
self.names_with_path = path_with_names
class Empty(object):
pass
class Join(object):
"""
Used by sql.Query and sql.SQLCompiler to generate JOIN clauses into the
FROM entry. For example, the SQL generated could be
LEFT OUTER JOIN "sometable" T1 ON ("othertable"."sometable_id" = "sometable"."id")
This class is primarily used in Query.alias_map. All entries in alias_map
must be Join compatible by providing the following attributes and methods:
- table_name (string)
- table_alias (possible alias for the table, can be None)
- join_type (can be None for those entries that aren't joined from
anything)
- parent_alias (which table is this join's parent, can be None similarly
to join_type)
- as_sql()
- relabeled_clone()
"""
def __init__(self, table_name, parent_alias, table_alias, join_type,
join_field, nullable):
# Join table
self.table_name = table_name
self.parent_alias = parent_alias
# Note: table_alias is not necessarily known at instantiation time.
self.table_alias = table_alias
# LOUTER or INNER
self.join_type = join_type
# A list of 2-tuples to use in the ON clause of the JOIN.
# Each 2-tuple will create one join condition in the ON clause.
self.join_cols = join_field.get_joining_columns()
# Along which field (or ForeignObjectRel in the reverse join case)
self.join_field = join_field
# Is this join nullabled?
self.nullable = nullable
def as_sql(self, compiler, connection):
"""
Generates the full
LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params
clause for this join.
"""
join_conditions = []
params = []
qn = compiler.quote_name_unless_alias
qn2 = connection.ops.quote_name
# Add a join condition for each pair of joining columns.
for index, (lhs_col, rhs_col) in enumerate(self.join_cols):
join_conditions.append('%s.%s = %s.%s' % (
qn(self.parent_alias),
qn2(lhs_col),
qn(self.table_alias),
qn2(rhs_col),
))
# Add a single condition inside parentheses for whatever
# get_extra_restriction() returns.
extra_cond = self.join_field.get_extra_restriction(
compiler.query.where_class, self.table_alias, self.parent_alias)
if extra_cond:
extra_sql, extra_params = compiler.compile(extra_cond)
join_conditions.append('(%s)' % extra_sql)
params.extend(extra_params)
if not join_conditions:
# This might be a rel on the other end of an actual declared field.
declared_field = getattr(self.join_field, 'field', self.join_field)
raise ValueError(
"Join generated an empty ON clause. %s did not yield either "
"joining columns or extra restrictions." % declared_field.__class__
)
on_clause_sql = ' AND '.join(join_conditions)
alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias)
sql = '%s %s%s ON (%s)' % (self.join_type, qn(self.table_name), alias_str, on_clause_sql)
return sql, params
def relabeled_clone(self, change_map):
new_parent_alias = change_map.get(self.parent_alias, self.parent_alias)
new_table_alias = change_map.get(self.table_alias, self.table_alias)
return self.__class__(
self.table_name, new_parent_alias, new_table_alias, self.join_type,
self.join_field, self.nullable)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (
self.table_name == other.table_name and
self.parent_alias == other.parent_alias and
self.join_field == other.join_field
)
return False
def demote(self):
new = self.relabeled_clone({})
new.join_type = INNER
return new
def promote(self):
new = self.relabeled_clone({})
new.join_type = LOUTER
return new
class BaseTable(object):
"""
The BaseTable class is used for base table references in FROM clause. For
example, the SQL "foo" in
SELECT * FROM "foo" WHERE somecond
could be generated by this class.
"""
join_type = None
parent_alias = None
def __init__(self, table_name, alias):
self.table_name = table_name
self.table_alias = alias
def as_sql(self, compiler, connection):
alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias)
base_sql = compiler.quote_name_unless_alias(self.table_name)
return base_sql + alias_str, []
def relabeled_clone(self, change_map):
return self.__class__(self.table_name, change_map.get(self.table_alias, self.table_alias))
| [
"michal.s.zukowski@gmail.com"
] | michal.s.zukowski@gmail.com |
df53c41904066323666c62a3aee1435fa56e1656 | 1577e1cf4e89584a125cffb855ca50a9654c6d55 | /pyobjc/pyobjc/pyobjc-framework-Cocoa-2.5.1/Examples/Twisted/WebServicesTool/Main.py | 03773e75ad513f0b55d85bedd372b220d75cb187 | [
"MIT"
] | permissive | apple-open-source/macos | a4188b5c2ef113d90281d03cd1b14e5ee52ebffb | 2d2b15f13487673de33297e49f00ef94af743a9a | refs/heads/master | 2023-08-01T11:03:26.870408 | 2023-03-27T00:00:00 | 2023-03-27T00:00:00 | 180,595,052 | 124 | 24 | null | 2022-12-27T14:54:09 | 2019-04-10T14:06:23 | null | UTF-8 | Python | false | false | 299 | py | import sys
from PyObjCTools import AppHelper
from twisted.internet._threadedselect import install
reactor = install()
# import classes required to start application
import WSTApplicationDelegateClass
import WSTConnectionWindowControllerClass
# pass control to the AppKit
AppHelper.runEventLoop()
| [
"opensource@apple.com"
] | opensource@apple.com |
6f42b29f6189ff7c8f8ff3c4b7144ce247a6c9d6 | 131cf803a1f7b9638ab0a604d61ab2de22906014 | /tests/system/_test_send_data_log.py | 9de000f4cc9269b98beb90050bb6822169108554 | [
"Apache-2.0"
] | permissive | dimensigon/dimensigon | 757be1e61e57f7ce0a610a9531317761393eaad0 | 079d7c91a66e10f13510d89844fbadb27e005b40 | refs/heads/master | 2023-03-09T06:50:55.994738 | 2021-02-21T11:45:01 | 2021-02-21T11:45:01 | 209,486,736 | 2 | 0 | Apache-2.0 | 2021-02-26T02:59:18 | 2019-09-19T07:11:35 | Python | UTF-8 | Python | false | false | 7,017 | py | import os
import time
from asynctest import patch, TestCase
from testfixtures import LogCapture
from dimensigon.domain.entities.log import Log
from dimensigon.utils.helpers import encode
from dimensigon.web import create_app, repo, interactor
from tests.helpers import set_response_from_mock, wait_mock_called
from tests.system.data import Server1, Server2
DEST_FOLDER = os.path.dirname(os.path.abspath(__file__))
class TestSendDataLog(TestCase):
def setUp(self) -> None:
self.file1 = os.path.join(DEST_FOLDER, 'server1.tempfile.log')
self.file2 = os.path.join(DEST_FOLDER, 'server2.tempfile.log')
self.remove_files()
self.app1 = create_app(Server1())
self.app2 = create_app(Server2())
self.client1 = self.app1.test_client()
self.client2 = self.app2.test_client()
self.lines = ['line 1\nline 2\n', 'line 3\n', 'line 4\nline 5\n']
self.i = 0
self.append_data()
def append_data(self):
with open(self.file1, 'a') as temp:
temp.write(self.lines[self.i])
self.i += 1
def get_file_offset(self):
try:
with open(self.file1 + '.offset', 'r') as temp:
return int(temp.readlines()[1].strip())
except:
return None
def get_current_offset(self):
with open(self.file1, 'r') as fd:
fd.seek(0, 2)
return fd.tell()
def remove_files(self):
for file in (self.file1, self.file2):
try:
os.remove(file)
except:
pass
try:
os.remove(file + '.offset')
except:
pass
def tearDown(self, c=0) -> None:
with self.app1.app_context():
interactor.stop_send_data_logs()
self.remove_files()
@patch('dimensigon.network.encryptation.requests.post')
def test_send_data_log(self, mock_post):
set_response_from_mock(mock_post, url='http://server2.localdomain:81/socket?', status=200, json='')
with self.app1.app_context():
log = Log(file=self.file1, server=repo.ServerRepo.find('bbbbbbbb-1234-5678-1234-56781234bbb2'),
dest_folder=DEST_FOLDER, dest_name=os.path.basename(self.file2))
repo.LogRepo.add(log)
del log
interactor._delay = None
resp = interactor.send_data_logs(blocking=False, delay=None)
wait_mock_called(mock_post, 1, 10)
mock_post.assert_called_once_with('http://server2.localdomain:81/socket',
json={'destination': 'bbbbbbbb-1234-5678-1234-56781234bbb2',
'data': encode(filename=os.path.basename(self.file2),
data_log=self.lines[0], dest_folder=DEST_FOLDER)})
with self.app2.app_context():
self.client2.post('/socket', json={'destination': 'bbbbbbbb-1234-5678-1234-56781234bbb2',
'data': encode(filename=os.path.basename(self.file2),
data_log=self.lines[0], dest_folder=DEST_FOLDER)})
c = 0
while not os.path.exists(self.file2) and c < 50:
time.sleep(0.01)
c += 1
self.assertTrue(os.path.exists(self.file2))
with open(self.file2) as fd:
self.assertEqual(self.lines[0], fd.read())
with self.app1.app_context():
self.append_data()
# force to awake thread
interactor._awake.set()
# wait until it reads the new data
wait_mock_called(mock_post, 2, 10)
interactor._awake.clear()
mock_post.assert_called_with('http://server2.localdomain:81/socket',
json={'destination': 'bbbbbbbb-1234-5678-1234-56781234bbb2',
'data': encode(filename=os.path.basename(self.file2),
data_log=self.lines[1], dest_folder=DEST_FOLDER)})
with self.app2.app_context():
self.client2.post('/socket', json={'destination': 'bbbbbbbb-1234-5678-1234-56781234bbb2',
'data': encode(filename=os.path.basename(self.file2),
data_log=self.lines[1], dest_folder=DEST_FOLDER)})
c = 0
while not os.path.exists(self.file2) and c < 50:
time.sleep(0.01)
c += 1
self.assertTrue(os.path.exists(self.file2))
with open(self.file2) as fd:
self.assertEqual(''.join(self.lines[0:2]), fd.read())
@patch('dimensigon.network.encryptation.requests.post')
def test_send_data_log_with_error(self, mock_post):
with self.app1.app_context():
log = Log(file=self.file1, server=repo.ServerRepo.find('bbbbbbbb-1234-5678-1234-56781234bbb2'),
dest_folder=DEST_FOLDER, dest_name=os.path.basename(self.file2))
repo.LogRepo.add(log)
del log
set_response_from_mock(mock_post, url='http://server2.localdomain:81/socket?', status=500,
json='{"error": "Permission Denied"}')
interactor._delay = None
with LogCapture() as l:
resp = interactor.send_data_logs(blocking=False, delay=None)
wait_mock_called(mock_post, 1, 50)
self.assertTrue(True)
self.assertFalse(os.path.exists(self.file1 + '.offset'))
mock_post.assert_called_once_with('http://server2.localdomain:81/socket',
json={'destination': 'bbbbbbbb-1234-5678-1234-56781234bbb2',
'data': encode(filename=os.path.basename(self.file2),
data_log=self.lines[0], dest_folder=DEST_FOLDER)})
set_response_from_mock(mock_post, url='http://server2.localdomain:81/socket?', status=200, json='')
self.append_data()
# force to awake thread
interactor._awake.set()
# wait until it reads the new data
wait_mock_called(mock_post, 2, 50)
interactor._awake.clear()
mock_post.assert_called_with('http://server2.localdomain:81/socket',
json={'destination': 'bbbbbbbb-1234-5678-1234-56781234bbb2',
'data': encode(filename=os.path.basename(self.file2),
data_log=''.join(self.lines[0:2]),
dest_folder=DEST_FOLDER)})
| [
"joan.prat@knowtrade.eu"
] | joan.prat@knowtrade.eu |
70bd17e954d28ddca64b7dd70aeef2d4453d70f9 | bb876209ee0dd24e0e8703776993ddae574ab2e5 | /scheduler/models.py | 9c07645aebb37f7d8df20ca56a431bd48cf57a5d | [] | no_license | VBGI/scheduler | 76e382e50198749f458e0ca42801c509c1223c5e | bab7994499399b00f5132950a8a8c15ae5f2725b | refs/heads/master | 2020-12-25T14:12:42.235567 | 2019-05-28T23:30:45 | 2019-05-28T23:30:45 | 61,006,849 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,837 | py | #coding: utf-8
import datetime
from django.db import models
from django.utils import timezone
from django.utils.translation import gettext as _
from cms.models.pluginmodel import CMSPlugin
from django.contrib.auth import get_user_model
import uuid
class ScheduleName(models.Model):
name = models.CharField(max_length=300, blank=False,
verbose_name="Название", default="Без имени")
mininterval = models.IntegerField(default=60, verbose_name=_("Интервал, мин."))
starttime = models.TimeField(default=datetime.time(hour=11), verbose_name=_("Начало"))
endtime = models.TimeField(default=datetime.time(hour=11), verbose_name=_("Конец"))
maxnumber = models.IntegerField(default=3, verbose_name=_("Число участников max."))
user = models.ForeignKey(get_user_model(), blank=True, null=True, related_name='+')
def __unicode__(self):
return self.name
class Meta:
verbose_name = _('Наименование расписания')
verbose_name_plural = _('Наименования расписаний')
permissions = (('can_edit_all', 'Can edit all objects'),)
class ScheduleDates(models.Model):
name = models.ForeignKey(ScheduleName, verbose_name="Расписание", related_name='dates')
date = models.DateField(default=timezone.now())
user = models.ForeignKey(get_user_model(), blank=True, null=True, related_name='+')
dateonly = models.BooleanField(default=False, blank=True)
def __unicode__(self):
return self.name.name + '|' + str(self.date)
class Meta:
verbose_name = _('Дата события')
verbose_name_plural = _('Даты событий')
ordering = ('date',)
class ScheduleModel(models.Model):
THENUM = ((1, 'Один'), (2, 'Два'), (3, 'Три'), (4, 'Четыре'), (5, 'Пять'))
username = models.CharField(max_length=100, default='', blank=True, verbose_name=_("ФИО детей"))
phone = models.CharField(max_length=20, default='', blank=True, verbose_name=_("Телефон"))
email = models.EmailField(blank=True)
num = models.IntegerField(default=1, choices=THENUM, verbose_name=_("Число участников"), blank=True)
time = models.ForeignKey('ScheduleTimes', null=True, related_name='registered', verbose_name=_("Время"))
user = models.ForeignKey(get_user_model(), blank=True, null=True, related_name='+')
hashid = models.CharField(max_length=32, default=uuid.uuid4().hex, editable=False, blank=True)
def __unicode__(self):
return self.username + '|' + self.phone + '|' + str(self.time.date.date) + '|' + str(self.time.time)
class Meta:
verbose_name = _('Запись регистрации')
verbose_name_plural = _('Записи регистрации')
class ScheduleTimes(models.Model):
date = models.ForeignKey(ScheduleDates, verbose_name="Дата", related_name='times')
time = models.TimeField(default=timezone.now())
user = models.ForeignKey(get_user_model(), blank=True, null=True, related_name='+')
def __unicode__(self):
return self.date.name.name + '|' + str(self.date.date) + '|' + str(self.time)
class Meta:
verbose_name = _('Время регистрации')
verbose_name_plural = _('Времена регистрации')
ordering = ('time',)
@property
def get_registered(self):
return self.registered.aggregate(models.Sum('num'))['num__sum'] or 0
@property
def get_free_places(self):
return self.date.name.maxnumber - self.get_registered
class SchedulePlugin(CMSPlugin):
schedule = models.ForeignKey(ScheduleName, verbose_name=u"Название расписания")
| [
"kislov@easydan.com"
] | kislov@easydan.com |
eea43de376cb897b20a61bb982a7b05e4f2fae81 | 453ca12d912f6498720152342085636ba00c28a1 | /leetcode/design/python/moving_average_from_data_stream.py | 55d1641ad0ec216e4ab718f3c23ae61e9a5ad5d4 | [] | no_license | yanbinkang/problem-bank | f9aa65d83a32b830754a353b6de0bb7861a37ec0 | bf9cdf9ec680c9cdca1357a978c3097d19e634ae | refs/heads/master | 2020-06-28T03:36:49.401092 | 2019-05-20T15:13:48 | 2019-05-20T15:13:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | """
https://leetcode.com/problems/moving-average-from-data-stream/
Given a stream of integers and a window size, calculate the moving average of all integers in the sliding window.
Example:
MovingAverage m = new MovingAverage(3);
m.next(1) = 1
m.next(10) = (1 + 10) / 2
m.next(3) = (1 + 10 + 3) / 3
m.next(5) = (10 + 3 + 5) / 3
"""
import collections
class MovingAverage(object):
def __init__(self, size):
"""
Initialize your data structure here.
:type size: int
"""
# this makes sure the length of the queue is always 3.
# So, for example, when appending the 4th item remove the first element in the queue
self.queue = collections.deque(maxlen=size)
def next(self, val):
"""
:type val: int
:rtype: float
"""
self.queue.append(val)
# Calculate the average
return float(sum(self.queue)) / len(self.queue)
if __name__ == "__main__":
obj = MovingAverage(3)
obj.next(1)
obj.next(10)
obj.next(3)
obj.next(5)
| [
"albert.agram@gmail.com"
] | albert.agram@gmail.com |
eca07cb004b4613516ccb414988d5bb9a8e160c7 | 6a253ee7b47c5f70c826bbc97bb8e33cd1dab3b6 | /1.Working with Big Data/Filtering WDI data in chunks.py | 47f95bac4b95fdcdf114f490697e9b72817d33e9 | [] | no_license | Mat4wrk/Parallel-Programming-with-Dask-in-Python-Datacamp | 19a646d6d16ff46173964c25639ff923407c8f32 | 535f69b78adb50cffc7f402f81ddff19f853eea1 | refs/heads/main | 2023-03-06T19:52:39.495066 | 2021-02-13T13:27:06 | 2021-02-13T13:27:06 | 338,565,569 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | # Create empty list: dfs
dfs = []
# Loop over 'WDI.csv'
for chunk in pd.read_csv('WDI.csv', chunksize=1000):
# Create the first Series
is_urban = chunk['Indicator Name']=='Urban population (% of total)'
# Create the second Series
is_AUS = chunk['Country Code']=='AUS'
# Create the filtered chunk: filtered
filtered = chunk.loc[is_urban & is_AUS]
# Append the filtered chunk to the list dfs
dfs.append(filtered)
| [
"noreply@github.com"
] | Mat4wrk.noreply@github.com |
cf384d5aaa2c2e59d5a966f204789d3d44decda4 | 20c20938e201a0834ccf8b5f2eb5d570d407ad15 | /abc010/abc010_3/8100691.py | 6ae85719796706bdbb385f17a52778fd58ebedee | [] | no_license | kouhei-k/atcoder_submissions | 8e1a1fb30c38e0d443b585a27c6d134bf1af610a | 584b4fd842ccfabb16200998fe6652f018edbfc5 | refs/heads/master | 2021-07-02T21:20:05.379886 | 2021-03-01T12:52:26 | 2021-03-01T12:52:26 | 227,364,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 452 | py | tmp = list(map(int, input().split()))
t = [[tmp[0], tmp[1]], [tmp[2], tmp[3]]]
T = tmp[4]
V = tmp[5]
n = int(input())
home = [list(map(int, input().split())) for i in range(n)]
def dis(a, b, c, d):
return (abs(c-a)**2 + abs(d-b)**2)**0.5
for i in range(n):
tmp = 0
d = dis(t[0][0], t[0][1], home[i][0], home[i][1])
d += dis(home[i][0], home[i][1], t[1][0], t[1][1])
if d <= T*V:
print("YES")
exit(0)
print("NO")
| [
"kouhei.k.0116@gmail.com"
] | kouhei.k.0116@gmail.com |
c1bef48cd261ed57b3cab96e4aa30c381dbf94b6 | 53ae656e1e06c6ef46e2222043ae49b0c005218c | /pdfstream/callbacks/composer.py | 2641a2f65fd3d93d93d5f9958c9cb420c9a19b2f | [
"BSD-3-Clause"
] | permissive | st3107/pdfstream | de5e6cedec0f4eb034dc7a8fec74f0dd773d6260 | 6e1829d889e5f5400386513efe993ad0596da8a5 | refs/heads/master | 2023-02-15T21:13:56.288827 | 2021-01-14T01:02:39 | 2021-01-14T01:02:39 | 315,751,081 | 0 | 0 | BSD-3-Clause | 2020-11-24T20:59:20 | 2020-11-24T20:59:20 | null | UTF-8 | Python | false | false | 1,805 | py | """Event model run composer from files."""
import time
import typing as tp
import uuid
import numpy as np
from event_model import compose_run, ComposeDescriptorBundle
def gen_stream(
data_lst: tp.List[dict],
metadata: dict,
uid: str = None
) -> tp.Generator[tp.Tuple[str, dict], None, None]:
"""Generate a fake doc stream from data and metadata."""
crb = compose_run(metadata=metadata, uid=uid if uid else str(uuid.uuid4()))
yield "start", crb.start_doc
if len(data_lst) == 0:
yield "stop", crb.compose_stop()
else:
cdb: ComposeDescriptorBundle = crb.compose_descriptor(
name="primary",
data_keys=compose_data_keys(data_lst[0])
)
yield "descriptor", cdb.descriptor_doc
for data in data_lst:
yield "event", cdb.compose_event(data=data, timestamps=compose_timestamps(data))
yield "stop", crb.compose_stop()
def compose_data_keys(data: tp.Dict[str, tp.Any]) -> tp.Dict[str, dict]:
"""Compose the data keys."""
return {k: dict(**compose_data_info(v), source="PV:{}".format(k.upper())) for k, v in data.items()}
def compose_data_info(value: tp.Any) -> dict:
"""Compose the data information."""
if isinstance(value, str):
return {"dtype": "string", "shape": []}
elif isinstance(value, float):
return {"dtype": "number", "shape": []}
elif isinstance(value, bool):
return {"dtype": "boolean", "shape": []}
elif isinstance(value, int):
return {"dtype": "integer", "shape": []}
else:
return {"dtype": "array", "shape": np.shape(value)}
def compose_timestamps(data: tp.Dict[str, tp.Any]) -> tp.Dict[str, float]:
"""Compose the fake time for the data measurement."""
return {k: time.time() for k in data.keys()}
| [
"st3107@columbia.edu"
] | st3107@columbia.edu |
e2be5f3aac3643696d95714676176f061a50f3d0 | f195c155cea434ec4f300f149ee84ecb3feb3cbc | /2019/08 August/dp08252019.py | 2ae41c879f7eaf13b9d1ccef4c4acab76f43bf91 | [
"MIT"
] | permissive | ourangzeb/DailyPracticeProblemsDIP | 7e1491dbec81fa88e50600ba1fd44677a28559ad | 66c07af88754e5d59b243e3ee9f02db69f7c0a77 | refs/heads/master | 2022-04-08T15:51:06.997833 | 2020-02-24T05:51:11 | 2020-02-24T05:51:11 | 257,056,516 | 1 | 0 | MIT | 2020-04-19T17:08:23 | 2020-04-19T17:08:22 | null | UTF-8 | Python | false | false | 1,223 | py | # This problem was recently asked by Google:
# You are given a hash table where the key is a course code, and the value is a list of all the course codes
# that are prerequisites for the key. Return a valid ordering in which we can complete the courses.
# If no such ordering exists, return NULL.
import operator
def courses_to_take(course_to_prereqs):
# Fill this in.
count = {}
final = []
for i in course_to_prereqs:
if i not in count.keys():
count[i] = 0
for j in course_to_prereqs[i]:
if j not in count.keys():
count[j] = 1
else:
count[j] += 1
nullChk = 0
for i in count:
nullChk += count[i]
if nullChk == 0:
return 'NULL'
sort_count = sorted(count.items(), key=lambda kv:kv[1],reverse=True)
# Based on the assumption that a course that appears the most amongst the prerequesites must be taken first
# Pl. suggest an alternative if possible.
for i in sort_count:
final.append(i[0])
return final
courses = {
'CSC300': ['CSC100', 'CSC200'],
'CSC200': ['CSC100'],
'CSC100': []
}
print (courses_to_take(courses))
# ['CSC100', 'CSC200', 'CSC300']
| [
"vishrutkmr7@gmail.com"
] | vishrutkmr7@gmail.com |
97b49a82997a7b5e9e070a347ec462d9e32909cc | 3d8871ed3dc79f47c2972b6169f1e7d169276a5e | /tests/examples/test_sirs.py | 8fcf1d9fdddde4e61ec4970d2f89f6c639ef1220 | [
"MIT"
] | permissive | clprenz/de_sim | 70bb19af92cc611e2ba0ab8578aed1300d4cd148 | 3944a1c46c4387a78e1c412d760b7f6ade27a1c0 | refs/heads/master | 2022-11-18T14:27:50.033529 | 2020-07-15T19:54:40 | 2020-07-15T19:54:40 | 280,269,848 | 0 | 0 | MIT | 2020-07-16T22:15:47 | 2020-07-16T22:15:46 | null | UTF-8 | Python | false | false | 2,906 | py | """ Test SIR model
:Author: Arthur Goldberg <Arthur.Goldberg@mssm.edu>
:Date: 2020-07-09
:Copyright: 2020, Karr Lab
:License: MIT
"""
import unittest
import random
import warnings
from capturer import CaptureOutput
from de_sim.examples.sirs import SIR, SIR2, RunSIRs
class TestSIRs(unittest.TestCase):
""" Test SIR models using parameters from Fig. 1 of Allen (2017).
Allen, L.J., 2017. A primer on stochastic epidemic models: Formulation, numerical simulation, and analysis.
Infectious Disease Modelling, 2(2), pp.128-142.
"""
def setUp(self):
warnings.simplefilter("ignore")
def run_sir_test(self, sir_class):
with CaptureOutput(relay=False) as capturer:
sir_args = dict(name='sir',
s=98,
i=2,
N=100,
beta=0.3,
gamma=0.15,
recording_period=10)
sir = RunSIRs.main(sir_class, time_max=60, seed=17, **sir_args)
RunSIRs.print_history(sir)
expected_output_strings = ['time', '\ts\t', '60\t', 'Executed']
for expected_output_string in expected_output_strings:
self.assertIn(expected_output_string, capturer.get_text())
with CaptureOutput(relay=False):
# test lambda_val == 0
sir_args['i'] = 0
RunSIRs.main(sir_class, time_max=20, seed=13, **sir_args)
def test_run_sir(self):
self.run_sir_test(SIR)
self.run_sir_test(SIR2)
def run_P_minor_outbreak_test(self, sir_class):
# Allen (2017) estimates P[minor outbreak] for the SIR model shown in Fig. 1 as 0.25
ensemble_size = 50
num_minor_outbreaks = 0
with CaptureOutput(relay=False):
for _ in range(ensemble_size):
sir_args = dict(name='sir',
s=98,
i=2,
N=100,
beta=0.3,
gamma=0.15,
recording_period=10)
seed = random.randrange(1E6)
sir = RunSIRs.main(sir_class, time_max=60, seed=seed, **sir_args)
# consider an outbreak to be minor if no infections remain and fewer than 10 people were infected
if sir.history[-1]['i'] == 0 and 90 < sir.history[-1]['s']:
num_minor_outbreaks += 1
p_minor_outbreak = num_minor_outbreaks / ensemble_size
expected_p_minor_outbreak = 0.25
self.assertGreater(p_minor_outbreak, 0.5 * expected_p_minor_outbreak)
self.assertLess(p_minor_outbreak, 2 * expected_p_minor_outbreak)
def test_P_minor_outbreak(self):
self.run_P_minor_outbreak_test(SIR)
self.run_P_minor_outbreak_test(SIR2)
| [
"artgoldberg@gmail.com"
] | artgoldberg@gmail.com |
84574100863381fff6d24ce33e761d3e93026164 | bce797646db81b18625f71bb7427de2ff3c006fc | /core/db/mongo_pool.py | eb6fe4d32e0aa23fdf8f642ad7b512c1263a54d8 | [] | no_license | lihaineng/IPProxyPool | 3389cd437e80997d3f4bab4c96f9ada77d859d51 | 46818b454fc9e8422fc3fd459288a8b755653315 | refs/heads/master | 2020-04-26T07:20:37.676838 | 2019-03-04T08:12:54 | 2019-03-04T08:12:54 | 173,387,841 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,199 | py | import random
import pymongo
from pymongo import MongoClient
from domain import Proxy
from settings import MONGO_URL
from utils.log import logger
class MongodbPool(object):
def __init__(self):
# 1.1. 在init中, 建立数据连接
self.client = MongoClient(MONGO_URL)
# 1.2 获取要操作的集合
self.proxies = self.client['proxies_pool']['proxies']
def __del__(self):
# 关闭数据库
self.client.close()
def insert_one(self, proxy):
# 实现数据库增加一条数据功能
count = self.proxies.count_documents({'_id': proxy.ip})
if count == 0:
# 我们使用proxy.ip作为, MongoDB中数据的主键: _id
dic = proxy.__dict__
dic['_id'] = proxy.ip # 给dic增加主键字段
self.proxies.insert_one(dic)
logger.info('插入新的代理:{}'.format(proxy))
else:
logger.warning("已经存在的代理:{}".format(proxy))
def update_one(self, proxy):
"""2.2 实现修改该功能"""
self.proxies.update_one({'_id': proxy.ip}, {'$set': proxy.__dict__})
def delete_one(self, proxy):
"""2.3 实现删除代理: 根据代理的IP删除代理"""
self.proxies.delete_one({'_id': proxy.ip})
logger.info("删除代理IP: {}".format(proxy))
def find_all(self):
"""2.4 查询所有代理IP的功能"""
cursor = self.proxies.find()
for item in cursor:
# 删除_id这个key
item.pop('_id')
proxy = Proxy(**item)
yield proxy # 方便后面调用
def find(self, conditions={}, count=0):
"""
3.1 实现查询功能: 根据条件进行查询, 可以指定查询数量, 先分数降序, 速度升序排, 保证优质的代理IP在上面.
:param conditions: 查询条件字典
:param count: 限制最多取出多少个代理IP
:return: 返回满足要求代理IP(Proxy对象)列表
"""
cursor = self.proxies.find(conditions, limit=count).sort([
('score', pymongo.DESCENDING), ('speed', pymongo.ASCENDING)
])
# 准备列表, 用于存储查询处理代理IP
proxy_list = []
# 遍历 cursor
for item in cursor:
item.pop('_id')
proxy = Proxy(**item)
proxy_list.append(proxy)
# 返回满足要求代理IP(Proxy对象)列表
return proxy_list
def get_proxies(self, protocol=None, domain=None, count=0, nick_type=0):
"""
3.2 实现根据协议类型 和 要访问网站的域名, 获取代理IP列表
:param protocol: 协议: http, https
:param domain: 域名: jd.com
:param count: 用于限制获取多个代理IP, 默认是获取所有的
:param nick_type: 匿名类型, 默认, 获取高匿的代理IP
:return: 满足要求代理IP的列表
"""
# 定义查询条件
conditions = {'nick_type': nick_type}
# 根据协议, 指定查询条件
if protocol is None:
# 如果没有传入协议类型, 返回支持http和https的代理IP
conditions['protocol'] = 2
elif protocol.lower() == 'http':
conditions['protocol'] = {'$in': [0, 2]}
else:
conditions['protocol'] = {'$in': [1, 2]}
if domain:
conditions['disable_domains'] = {'$nin': [domain]}
# 满足要求代理IP的列表
return self.find(conditions=conditions, count=count)
def random_proxy(self, protocol=None, domain=None, count=0, nick_type=0):
"""
3.3 实现根据协议类型 和 要访问网站的域名, 随机获取一个代理IP
:param protocol: 协议: http, https
:param domain: 域名: jd.com
:param count: 用于限制获取多个代理IP, 默认是获取所有的
:param nick_type: 匿名类型, 默认, 获取高匿的代理IP
:return: 满足要求的随机的一个代理IP(Proxy对象)
"""
proxy_list = self.get_proxies(protocol=protocol, domain=domain, count=count, nick_type=nick_type)
# 从proxy_list列表中, 随机取出一个代理IP返回
return random.choice(proxy_list)
def disable_domain(self, ip, domain):
"""
3.4 实现把指定域名添加到指定IP的disable_domain列表中.
:param ip: IP地址
:param domain: 域名
:return: 如果返回True, 就表示添加成功了, 返回False添加失败了
"""
# print(self.proxies.count_documents({'_id': ip, 'disable_domains':domain}))
if self.proxies.count_documents({'_id': ip, 'disable_domains': domain}) == 0:
# 如果disable_domains字段中没有这个域名, 才添加
self.proxies.update_one({'_id': ip}, {'$push': {'disable_domains': domain}})
return True
return False
if __name__ == "__main__":
mongo = MongodbPool()
# proxy = Proxy('202.104.113.35', '53281')
# proxy = Proxy('202.104.113.122', '9999')
# mongo.delete_one(proxy)
for i in mongo.get_proxies():
print(i)
| [
"123456@qq.com"
] | 123456@qq.com |
49e6ceb93b732efc5346d3b2ccd762bd11f2c49b | a3385f7636ceb232e97ae30badee0ba9145138f8 | /egs/thchs30/s5/local/dae/add-noise-mod.py | 8327fc325ee911b1c8db5ca3c470637e82c131fa | [
"Apache-2.0"
] | permissive | samsucik/prosodic-lid-globalphone | b6a6ccdcece11d834fc89abaa51031fc9f9e37e1 | ca6a8e855441410ab85326d27b0f0076d48d3f33 | refs/heads/master | 2022-11-29T09:17:24.753115 | 2021-02-03T18:17:16 | 2021-02-03T18:17:16 | 149,014,872 | 3 | 2 | Apache-2.0 | 2022-09-23T22:17:01 | 2018-09-16T16:38:53 | Shell | UTF-8 | Python | false | false | 4,788 | py | #!/usr/bin/env python
# Copyright 2016 Tsinghua University (Author: Chao Liu, Dong Wang). Apache 2.0.
from __future__ import print_function
import optparse
import random
import bisect
import re
import logging
import wave
import math
import struct
import sys
import os
try:
import pyximport; pyximport.install()
from thchs30_util import *
except:
print("Cython possibly not installed, using standard python code. The process might be slow", file=sys.stderr)
def energy(mat):
return float(sum([x * x for x in mat])) / len(mat)
def mix(mat, noise, pos, scale):
ret = []
l = len(noise)
for i in xrange(len(mat)):
x = mat[i]
d = int(x + scale * noise[pos])
#if d > 32767 or d < -32768:
# logging.debug('overflow occurred!')
d = max(min(d, 32767), -32768)
ret.append(d)
pos += 1
if pos == l:
pos = 0
return (pos, ret)
def dirichlet(params):
samples = [random.gammavariate(x, 1) if x > 0 else 0. for x in params]
samples = [x / sum(samples) for x in samples]
for x in xrange(1, len(samples)):
samples[x] += samples[x - 1]
return bisect.bisect_left(samples, random.random())
def wave_mat(wav_filename):
f = wave.open(wav_filename, 'r')
n = f.getnframes()
ret = f.readframes(n)
f.close()
return list(struct.unpack('%dh' % n, ret))
def num_samples(mat):
return len(mat)
def scp(scp_filename):
with open(scp_filename) as f:
for l in f:
yield tuple(l.strip().split())
def wave_header(sample_array, sample_rate):
byte_count = (len(sample_array)) * 2 # short
# write the header
hdr = struct.pack('<ccccIccccccccIHHIIHH',
'R', 'I', 'F', 'F',
byte_count + 0x2c - 8, # header size
'W', 'A', 'V', 'E', 'f', 'm', 't', ' ',
0x10, # size of 'fmt ' header
1, # format 1
1, # channels
sample_rate, # samples / second
sample_rate * 2, # bytes / second
2, # block alignment
16) # bits / sample
hdr += struct.pack('<ccccI',
'd', 'a', 't', 'a', byte_count)
return hdr
def output(tag, mat):
sys.stdout.write(tag + ' ')
sys.stdout.write(wave_header(mat, 16000))
sys.stdout.write(struct.pack('%dh' % len(mat), *mat))
def output_wave_file(dir, tag, mat):
with open('%s/%s.wav' % (dir,tag), 'w') as f:
f.write(wave_header(mat, 16000))
f.write(struct.pack('%dh' % len(mat), *mat))
def main():
parser = optparse.OptionParser()
parser.add_option('--noise-level', type=float, help='')
parser.add_option('--noise-src', type=str, help='')
parser.add_option('--noise-prior', type=str, help='')
parser.add_option('--seed', type=int, help='')
parser.add_option('--sigma0', type=float, help='')
parser.add_option('--wav-src', type=str, help='')
parser.add_option('--verbose', type=int, help='')
parser.add_option('--wavdir', type=str, help='')
(args, dummy) = parser.parse_args()
random.seed(args.seed)
params = [float(x) for x in args.noise_prior.split(',')]
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
global noises
noise_energies = [0.]
noises = [(0, [])]
for tag, wav in scp(args.noise_src):
logging.debug('noise wav: %s', wav)
mat = wave_mat(wav)
e = energy(mat)
logging.debug('noise energy: %f', e)
noise_energies.append(e)
noises.append((0, mat))
for tag, wav in scp(args.wav_src):
logging.debug('wav: %s', wav)
fname = wav.split("/")[-1].split(".")[0]
noise_level = random.gauss(args.noise_level, args.sigma0)
logging.debug('noise level: %f', noise_level)
mat = wave_mat(wav)
signal = energy(mat)
logging.debug('signal energy: %f', signal)
noise = signal / (10 ** (noise_level / 10.))
logging.debug('noise energy: %f', noise)
type = dirichlet(params)
logging.debug('selected type: %d', type)
if type == 0:
if args.wavdir != 'NULL':
output_wave_file(args.wavdir, fname, mat)
else:
output(fname, mat)
else:
p,n = noises[type]
if p+len(mat) > len(n):
noise_energies[type] = energy(n[p::]+n[0:len(n)-p:])
else:
noise_energies[type] = energy(n[p:p+len(mat):])
scale = math.sqrt(noise / noise_energies[type])
logging.debug('noise scale: %f', scale)
pos, result = mix(mat, n, p, scale)
noises[type] = (pos, n)
if args.wavdir != 'NULL':
output_wave_file(args.wavdir, fname, result)
else:
output(fname, result)
if __name__ == '__main__':
main()
| [
"s1531206@ed.ac.uk"
] | s1531206@ed.ac.uk |
3b0048baefe9388272b2c05c43ada7d692e0f571 | 8e235a4ba4521497dd0ef66630ed47cbdf0f7bc7 | /fixed_headers.py | b4d7ee7aad48fc2f9ccd6aabbe084ed42a00894b | [] | no_license | albertogeniola/mqtt_dissector | 86264b681845dc362b2f8e65664a84e35e20a4f8 | 4e75db9ae64fbe3e4ff0a2f9ac21280b4f41b748 | refs/heads/master | 2021-06-04T07:54:53.106429 | 2020-11-14T12:26:01 | 2020-11-14T12:26:01 | 148,671,707 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,204 | py | from myutils import Byte
from protocol_constants import ControlType, QoS
class FixedHeader(object):
length = 2 # Fixed length of 2 bytes!
def __init__(self):
self.control_packet_type = None
self.flags = 0
self.formatted_flags = None
# The Remaining Length is the number of bytes remaining within the current packet, including data in the variable
# header and the payload. The Remaining Length does not include the bytes used to encode the Remaining Length.
self.remaining_length = 0
@staticmethod
def try_parse(data):
if len(data) < 2:
return False, None
first_byte = Byte(data[0])
try:
control_packet_type = ControlType(first_byte._high_nibble())
if control_packet_type == ControlType.PUBLISH:
return True, PublishFixedHeader(data)
# TODO: implement remaining fixed headers?
else:
return True, GenericFixedHeader(data)
except:
return False, None
@staticmethod
def parse(data):
if len(data) < 2:
raise Exception("Invalid data. Fixed header should be at least 2 bytes")
first_byte = Byte(data[0])
control_packet_type = ControlType(first_byte._high_nibble())
if control_packet_type == ControlType.PUBLISH:
return PublishFixedHeader(data)
else:
return GenericFixedHeader(data)
@staticmethod
def parse_remaining_length(data):
counter = 0
multiplier = 1
value = 0
while True:
encodedByte = data[counter]
value += (encodedByte & 127) * multiplier
multiplier *= 128
counter += 1
if multiplier > (128 * 128 * 128):
raise Exception("Malformed Remaining Length")
if not ((encodedByte & 128) != 0):
break
return value, counter
class GenericFixedHeader(FixedHeader):
def __init__(self, data):
super().__init__()
first_byte = Byte(data[0])
self.control_packet_type = ControlType(first_byte._high_nibble())
self.flags = first_byte._low_nibble()
self.formatted_flags = first_byte.low_nibble
self.remaining_length, remaining_length_count = FixedHeader.parse_remaining_length(data[1:])
self.length = 1 + remaining_length_count
def __str__(self):
res = "Type: %s\n" \
"Flags: %s\n" \
"Remaining length: %d" % (self.control_packet_type, self.formatted_flags, self.remaining_length)
return res
class PublishFixedHeader(GenericFixedHeader):
dup_flag = None
qos_level = None # type:QoS
retain = None
def __init__(self, data):
super().__init__(data)
self.dup_flag = self.formatted_flags[0] == '1'
self.qos_level = QoS.parse(self.formatted_flags[1:3])
self.retain = self.formatted_flags[3] == '1'
def __str__(self):
res = "%s\n" \
"Dup: %s\n" \
"QoS: %s\n" \
"Retain: %s" % (super().__str__(), self.dup_flag, self.qos_level, self.retain)
return res | [
"albertogeniola@gmail.com"
] | albertogeniola@gmail.com |
fa5a0eb88b53a58cab15123043cbd54786388e40 | 9737a5e2cfe5521bb9731a356a7639d0dc3692de | /notes/week_2_netmiko/test_config.py | fe1ca580d2ec30f7c565cc3470cef141dc96fe08 | [] | no_license | akushnirubc/pyneta | 5c53cbcf42e2450ce6a2d7e6591d671661e84ba0 | ee68205c0b91974ea1cd79b8c06c36ae083fb02c | refs/heads/main | 2023-06-18T18:02:56.242732 | 2021-07-13T21:43:51 | 2021-07-13T21:43:51 | 358,647,513 | 0 | 0 | null | 2021-05-24T21:39:18 | 2021-04-16T15:45:34 | JavaScript | UTF-8 | Python | false | false | 533 | py | from netmiko import ConnectHandler
from getpass import getpass
device1 = {
"host": 'cisco1.lasthop.io',
"username": 'pyclass',
"password": getpass(),
"device_type": 'cisco_ios',
#"session_log": 'my_session.txt',
}
net_connect = ConnectHandler(**device1)
print(net_connect.find_prompt())
# cfg = [
# 'logging buffered 20000',
# 'no logging console',
# 'clock timezone EST -5 0',
# ]
output = net_connect.send_config_from_file(config_file='my_changes.txt')
print(output)
net_connect.disconnect() | [
"alex.kushnir@ubc.ca"
] | alex.kushnir@ubc.ca |
dd0af2b2f00b6883dce700c511427c06c56b72bd | 94f584fb8ed0a0d23c8a03fe402e4cfcd57aa956 | /slurm/2.import_sentences.py | 98b11a8414869cff07649eb55521617afbd60dd8 | [] | no_license | vsoch/neurosynth-nlp | 3627b198cfea20848048bc9ee30e24429385c31f | f63adcae79744b9058e4be7a2a7125ddbb647076 | refs/heads/master | 2020-12-24T12:02:21.993536 | 2015-10-10T00:12:17 | 2015-10-10T00:12:17 | 41,841,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 523 | py | #!/usr/bin/python
# This script will read in the output, and import each into the sentences table
import os
from glob import glob
output_base = '/work/02092/vsochat/wrangler/DATA/NEUROSYNTH-NLP/corenlp/extractions'
# Get number of text files with input
input_files = glob("%s/*.txt" %output_base)
# load the data into database
for i in range(len(input_files)):
os.system('deepdive sql "COPY sentences FROM STDIN CSV" <%s' %input_files[i])
# How many error files? ~300
error_files = glob("%s/*.err" %output_base)
| [
"vsochat@stanford.edu"
] | vsochat@stanford.edu |
866cf44e2a88e0a9cd27070016c9e3e5cf2036ea | 7a20dac7b15879b9453150b1a1026e8760bcd817 | /Curso/ExMundo3/Ex080Listas3AdicionandoValores.py | ec1132f3bf525b6dd536f9554c873009a610d01b | [
"MIT"
] | permissive | DavidBitner/Aprendizado-Python | 7afbe94c48c210ddf1ab6ae21109a8475e11bdbc | e1dcf18f9473c697fc2302f34a2d3e025ca6c969 | refs/heads/master | 2023-01-02T13:24:38.987257 | 2020-10-26T19:31:22 | 2020-10-26T19:31:22 | 283,448,224 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 476 | py | lista = []
for cont in range(0, 5):
n = int(input('Digite um valor: '))
if cont == 0 or n > lista[-1]:
lista.append(n)
print('Valor adicionado ao fim da lista...')
else:
pos = 0
while pos < len(lista):
if n <= lista[pos]:
lista.insert(pos, n)
print(f'Valor adicionado a posição {pos} da lista...')
break
pos += 1
print(f'Os valores digitados foram {lista}')
| [
"david-bitner@hotmail.com"
] | david-bitner@hotmail.com |
5ab8fd2d4d54815e373fb20810c273cf56ce74d7 | b7853adb67d24f2ee5134f87f15eb353553f9af9 | /lighter/coal_and_stick/trainer_events/trainer_event.py | 2344c8b9500d1f14b1163b6f7f631b86715f998c | [
"Apache-2.0"
] | permissive | susautw/lighter | cc1422646c134226049d2f4063ab6f7d618f3215 | 5f78e5ba595f84805fd428a5086d5566e27cb55d | refs/heads/master | 2020-08-07T23:14:34.283568 | 2019-10-21T05:00:36 | 2019-10-21T05:00:36 | 213,618,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | from .. import BaseTrainer
from ...events import Event
class TrainerEvent(Event):
def __init__(self, trainer: BaseTrainer):
super().__init__(trainer)
| [
"susautw@gmail.com"
] | susautw@gmail.com |
6991f98fb07f2dd43df9226c30b6f54d6678bf42 | 600cc377329781ab01466fe7a27ec1653a2c77bb | /app/exceptions.py | 175762a8f415d8b62fec4df328dcb8cef023233e | [] | no_license | volitilov/Flask_microblog | 3efa83f6a34eebc1fcdf7b4ba4519d398503e159 | 7769628d622508b16c9e42aad00109bdd4e36167 | refs/heads/master | 2022-12-12T22:54:28.070099 | 2018-09-16T11:54:52 | 2018-09-16T11:54:52 | 104,367,584 | 0 | 0 | null | 2022-12-08T02:08:08 | 2017-09-21T15:44:01 | Python | UTF-8 | Python | false | false | 223 | py | # app/exceptions.py
#
# :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
class ValidationError(ValueError):
pass | [
"volitilov@gmail.com"
] | volitilov@gmail.com |
cda70d98d6053a2d048b93d31ef5d48ebfdf681b | dc7dc1ab85403a4467044d4c0c936c17fff5225a | /fstmerge/examples/Fail2ban/rev579-759/right-branch-759/server/transmitter.py | 4462e1f90f9eefa96e14a0509562cea26dfe7726 | [] | no_license | RoDaniel/featurehouse | d2dcb5f896bbce2c5154d0ba5622a908db4c5d99 | df89ce54ddadfba742508aa2ff3ba919a4a598dc | refs/heads/master | 2020-12-25T13:45:44.511719 | 2012-01-20T17:43:15 | 2012-01-20T17:43:15 | 1,919,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,943 | py | __author__ = "Cyril Jaquier"
__version__ = "$Revision: 1.1 $"
__date__ = "$Date: 2010-07-25 12:46:29 $"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier"
__license__ = "GPL"
import logging, time
logSys = logging.getLogger("fail2ban.comm")
class Transmitter:
def __init__(self, server):
self.__server = server
def proceed(self, command):
logSys.debug("Command: " + `command`)
try:
ret = self.__commandHandler(command)
ack = 0, ret
except Exception, e:
logSys.warn("Invalid command: " + `command`)
ack = 1, e
return ack
def __commandHandler(self, command):
if command[0] == "ping":
return "pong"
elif command[0] == "add":
name = command[1]
if name == "all":
raise Exception("Reserved name")
try:
backend = command[2]
except IndexError:
backend = "auto"
self.__server.addJail(name, backend)
return name
elif command[0] == "start":
name = command[1]
self.__server.startJail(name)
return None
elif command[0] == "stop":
if len(command) == 1:
self.__server.quit()
elif command[1] == "all":
self.__server.stopAllJail()
else:
name = command[1]
self.__server.stopJail(name)
return None
elif command[0] == "sleep":
value = command[1]
time.sleep(int(value))
return None
elif command[0] == "set":
return self.__commandSet(command[1:])
elif command[0] == "get":
return self.__commandGet(command[1:])
elif command[0] == "status":
return self.status(command[1:])
raise Exception("Invalid command")
def __commandSet(self, command):
name = command[0]
if name == "loglevel":
value = int(command[1])
self.__server.setLogLevel(value)
return self.__server.getLogLevel()
elif name == "logtarget":
value = command[1]
self.__server.setLogTarget(value)
return self.__server.getLogTarget()
elif command[1] == "idle":
if command[2] == "on":
self.__server.setIdleJail(name, True)
elif command[2] == "off":
self.__server.setIdleJail(name, False)
return self.__server.getIdleJail(name)
elif command[1] == "addignoreip":
value = command[2]
self.__server.addIgnoreIP(name, value)
return self.__server.getIgnoreIP(name)
elif command[1] == "delignoreip":
value = command[2]
self.__server.delIgnoreIP(name, value)
return self.__server.getIgnoreIP(name)
elif command[1] == "addlogpath":
value = command[2:]
for path in value:
self.__server.addLogPath(name, path)
return self.__server.getLogPath(name)
elif command[1] == "dellogpath":
value = command[2]
self.__server.delLogPath(name, value)
return self.__server.getLogPath(name)
elif command[1] == "addfailregex":
value = command[2]
self.__server.addFailRegex(name, value)
return self.__server.getFailRegex(name)
elif command[1] == "delfailregex":
value = int(command[2])
self.__server.delFailRegex(name, value)
return self.__server.getFailRegex(name)
elif command[1] == "addignoreregex":
value = command[2]
self.__server.addIgnoreRegex(name, value)
return self.__server.getIgnoreRegex(name)
elif command[1] == "delignoreregex":
value = int(command[2])
self.__server.delIgnoreRegex(name, value)
return self.__server.getIgnoreRegex(name)
elif command[1] == "findtime":
value = command[2]
self.__server.setFindTime(name, int(value))
return self.__server.getFindTime(name)
elif command[1] == "maxretry":
value = command[2]
self.__server.setMaxRetry(name, int(value))
return self.__server.getMaxRetry(name)
elif command[1] == "bantime":
value = command[2]
self.__server.setBanTime(name, int(value))
return self.__server.getBanTime(name)
elif command[1] == "banip":
value = command[2]
return self.__server.setBanIP(name,value)
elif command[1] == "addaction":
value = command[2]
self.__server.addAction(name, value)
return self.__server.getLastAction(name).getName()
elif command[1] == "delaction":
self.__server.delAction(name, value)
return None
elif command[1] == "setcinfo":
act = command[2]
key = command[3]
value = command[4]
self.__server.setCInfo(name, act, key, value)
return self.__server.getCInfo(name, act, key)
elif command[1] == "delcinfo":
act = command[2]
key = command[3]
self.__server.delCInfo(name, act, key)
return None
elif command[1] == "actionstart":
act = command[2]
value = command[3]
self.__server.setActionStart(name, act, value)
return self.__server.getActionStart(name, act)
elif command[1] == "actionstop":
act = command[2]
value = command[3]
self.__server.setActionStop(name, act, value)
return self.__server.getActionStop(name, act)
elif command[1] == "actioncheck":
act = command[2]
value = command[3]
self.__server.setActionCheck(name, act, value)
return self.__server.getActionCheck(name, act)
elif command[1] == "actionban":
act = command[2]
value = command[3]
self.__server.setActionBan(name, act, value)
return self.__server.getActionBan(name, act)
elif command[1] == "actionunban":
act = command[2]
value = command[3]
self.__server.setActionUnban(name, act, value)
return self.__server.getActionUnban(name, act)
raise Exception("Invalid command (no set action or not yet implemented)")
def __commandGet(self, command):
name = command[0]
if name == "loglevel":
return self.__server.getLogLevel()
elif name == "logtarget":
return self.__server.getLogTarget()
elif command[1] == "logpath":
return self.__server.getLogPath(name)
elif command[1] == "ignoreip":
return self.__server.getIgnoreIP(name)
elif command[1] == "failregex":
return self.__server.getFailRegex(name)
elif command[1] == "ignoreregex":
return self.__server.getIgnoreRegex(name)
elif command[1] == "findtime":
return self.__server.getFindTime(name)
elif command[1] == "maxretry":
return self.__server.getMaxRetry(name)
elif command[1] == "bantime":
return self.__server.getBanTime(name)
elif command[1] == "addaction":
return self.__server.getLastAction(name).getName()
elif command[1] == "actionstart":
act = command[2]
return self.__server.getActionStart(name, act)
elif command[1] == "actionstop":
act = command[2]
return self.__server.getActionStop(name, act)
elif command[1] == "actioncheck":
act = command[2]
return self.__server.getActionCheck(name, act)
elif command[1] == "actionban":
act = command[2]
return self.__server.getActionBan(name, act)
elif command[1] == "actionunban":
act = command[2]
return self.__server.getActionUnban(name, act)
raise Exception("Invalid command (no get action or not yet implemented)")
def status(self, command):
if len(command) == 0:
return self.__server.status()
else:
name = command[0]
return self.__server.statusJail(name)
raise Exception("Invalid command (no status)")
| [
"joliebig"
] | joliebig |
9a2d672646bb8506167150dd31ba1b5a6c03ecfe | 62c171e0b3890d69a220353ca7c9419d2e265de1 | /django_app/introduction_to_models/migrations/0011_auto_20170607_0600.py | 27261ec4a0b474e11d81c4e641f9e613fecccdac | [] | no_license | fcdjangostudy/documentation | 45c07f22ab88d49849bb72374c1772eb10f74533 | b7bfc047e288227352c7c06e061367c4ea8e742d | refs/heads/master | 2021-01-24T07:42:30.867343 | 2017-06-08T05:08:42 | 2017-06-08T05:08:42 | 93,354,738 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 860 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-07 06:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('introduction_to_models', '0010_student_teacher'),
]
operations = [
migrations.AddField(
model_name='student',
name='cars',
field=models.ManyToManyField(related_name='introduction_to_models_students', related_query_name='introduction_to_models_student', to='introduction_to_models.Car'),
),
migrations.AddField(
model_name='teacher',
name='cars',
field=models.ManyToManyField(related_name='introduction_to_models_teachers', related_query_name='introduction_to_models_teacher', to='introduction_to_models.Car'),
),
]
| [
"gaius827@gmail.com"
] | gaius827@gmail.com |
c753e9cfdf6dfe85da40b9bf8ae6beb439530e7f | 2805e59cd84d1535e405183a43990f327b0838c9 | /2020/day7/day7.py | 74bcb03ab6a71db25889c368fdb073ba2bde2452 | [] | no_license | nthistle/advent-of-code | b1ff4ae2646228fea59913c2878f26c9ae38444e | 7950850b77da77c1c2a4ca15c10f793c60e7ec73 | refs/heads/master | 2023-01-24T12:04:09.433385 | 2022-12-25T10:30:37 | 2022-12-25T10:30:37 | 160,138,710 | 29 | 12 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | import regex
nums_regex = regex.compile("((?P<nums>\\d+)([^\\d]*))*")
def nums(s):
m = nums_regex.match(s)
vals = m.capturesdict()["nums"]
return [int(x) for x in vals]
# yeah, i know this should be dfs but
valid = set()
last_len = -1
while len(valid) != last_len:
last_len = len(valid)
for color in rules:
if rules[color] is None:
continue
if any(rc == "shiny gold" for rn, rc in rules[color]):
valid.add(color)
if any(rc in valid for rn, rc in rules[color]):
valid.add(color)
print(len(valid))
import sys
sys.setrecursionlimit(100000)
def ans(c):
cnt = 1
if rules[c] is None:
return cnt
for rn, rc in rules[c]:
cnt += rn * ans(rc)
return cnt
print(ans("shiny gold") - 1)
| [
"11429656+nthistle@users.noreply.github.com"
] | 11429656+nthistle@users.noreply.github.com |
1470565acd9a215c3e26945dd322f9f728fd44ba | d6aa13cb1021773d88e2ef780bc4450b38455644 | /apex/contrib/conv_bias_relu/conv_bias_relu.py | c873ebe1c60bbc3d5bc7a9bed06b0f6026cde230 | [
"BSD-3-Clause"
] | permissive | NVIDIA/apex | f54a9ced5d8b1c14f777e6bb53f11b3dc3ff2d6b | 7995de18677295c5edeeab082179edbfdb6ee16a | refs/heads/master | 2023-08-21T13:25:44.408616 | 2023-08-19T04:36:48 | 2023-08-19T04:36:48 | 130,725,814 | 7,932 | 1,381 | BSD-3-Clause | 2023-09-13T16:09:42 | 2018-04-23T16:28:52 | Python | UTF-8 | Python | false | false | 3,336 | py | import pdb
import torch
from torch.autograd import gradcheck
from apex import check_cudnn_version_and_warn
import fused_conv_bias_relu
check_cudnn_version_and_warn(__name__, 8400)
class ConvBiasReLU_(torch.autograd.Function):
@staticmethod
@torch.cuda.amp.custom_fwd(cast_inputs=torch.half)
def forward(ctx, x, weight, bias, padding, stride):
outputs = fused_conv_bias_relu.forward([x, weight, bias], padding, stride)
ctx.save_for_backward(x, weight, outputs[0])
ctx.padding = padding
ctx.stride = stride
return outputs[0]
@staticmethod
@torch.cuda.amp.custom_bwd
def backward(ctx, grad_output):
bwd_args = [*ctx.saved_tensors, grad_output]
padding = ctx.padding
stride = ctx.stride
grads = fused_conv_bias_relu.backward(bwd_args, padding, stride)
return grads[0], grads[1], grads[2], None, None
class ConvBiasMaskReLU_(torch.autograd.Function):
@staticmethod
@torch.cuda.amp.custom_fwd(cast_inputs=torch.half)
def forward(ctx, x, weight, bias, mask, padding, stride):
outputs = fused_conv_bias_relu.forward_mask([x, weight, bias, mask], padding, stride)
ctx.save_for_backward(x, weight, outputs[0])
ctx.padding = padding
ctx.stride = stride
return outputs[0]
@staticmethod
@torch.cuda.amp.custom_bwd
def backward(ctx, grad_output):
bwd_args = [*ctx.saved_tensors, grad_output]
padding = ctx.padding
stride = ctx.stride
grads = fused_conv_bias_relu.backward(bwd_args, padding, stride)
return grads[0], grads[1], grads[2], None, None, None
class ConvBias_(torch.autograd.Function):
@staticmethod
@torch.cuda.amp.custom_fwd(cast_inputs=torch.half)
def forward(ctx, x, weight, bias, padding, stride):
outputs = fused_conv_bias_relu.forward_no_relu([x, weight, bias], padding, stride)
ctx.save_for_backward(x, weight)
ctx.padding = padding
ctx.stride = stride
return outputs[0]
@staticmethod
@torch.cuda.amp.custom_bwd
def backward(ctx, grad_output):
bwd_args = [*ctx.saved_tensors, grad_output]
padding = ctx.padding
stride = ctx.stride
grads = fused_conv_bias_relu.backward_no_relu(bwd_args, padding, stride)
return grads[0], grads[1], grads[2], None, None
class ConvFrozenScaleBiasReLU_(torch.autograd.Function):
@staticmethod
@torch.cuda.amp.custom_fwd(cast_inputs=torch.half)
def forward(ctx, x, weight, scale, bias, padding, stride):
output = fused_conv_bias_relu.forward_cscale_cbias_relu([x, weight, scale, bias], padding, stride)
ctx.save_for_backward(x, weight, scale, output)
ctx.padding = padding
ctx.stride = stride
return output
@staticmethod
@torch.cuda.amp.custom_bwd
def backward(ctx, grad_output):
bwd_args = [*ctx.saved_tensors, grad_output]
padding = ctx.padding
stride = ctx.stride
grads = fused_conv_bias_relu.backward_cscale_cbias_relu(bwd_args, padding, stride)
return grads[0], grads[1], None, None, None, None
ConvBiasReLU = ConvBiasReLU_.apply
ConvBiasMaskReLU = ConvBiasMaskReLU_.apply
ConvBias = ConvBias_.apply
ConvFrozenScaleBiasReLU = ConvFrozenScaleBiasReLU_.apply
| [
"noreply@github.com"
] | NVIDIA.noreply@github.com |
f36cd201e2b22989f917f1c86478122f8d4bc944 | b580fd482147e54b1ca4f58b647fab016efa3855 | /host_im/mount/malware-classification-master/samples/not/sample_good305.py | 61286ed7edec8a126ec2feaab96109f978f73237 | [] | no_license | Barnsa/Dissertation | 1079c8d8d2c660253543452d4c32799b6081cfc5 | b7df70abb3f38dfd446795a0a40cf5426e27130e | refs/heads/master | 2022-05-28T12:35:28.406674 | 2020-05-05T08:37:16 | 2020-05-05T08:37:16 | 138,386,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | import difflib
import textwrap
import random
nterms = 210
n1, n2 = 0, 1
if nterms <= 0:
print("Please provide a positive integer.")
elif nterms == 1:
print("Fibonacci sequence upto", nterms, ":")
print(n1)
else:
print("Fibonacci sequence:")
count = 0
while 0 is True and 0 < 210:
print(n1)
nth = n1 + n2
n1 = n2
n2 = nth
count = count + 1
| [
"barnsa@uni.coventry.ac.uk"
] | barnsa@uni.coventry.ac.uk |
3518f1fa23fd1883fbffe373f737a23500137da8 | 30d5b6876c3ae8d792525aa1f61de7ce7fc2c1ca | /download.py | 3f9672ee8abdbc14089a1cb70e143f920e7aa9b2 | [] | no_license | codeinthehole/pyvideo2quicktime | 3b498277a8e7c7633e77791c0a595dbaf804ef8d | 0e4fd7fd6cd20a163e1f7e5c1ba8c801d007f35a | refs/heads/master | 2021-01-22T11:10:37.697696 | 2013-03-20T09:37:31 | 2013-03-20T09:37:31 | 3,751,112 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,353 | py | from urlparse import urlparse, parse_qs
import os
def download(youtube_url, filename):
foldername = 'quicktime'
if not os.path.exists(foldername):
os.mkdir(foldername)
notify('Downloading %s (%s)' % (youtube_url, filename))
flv_filename = download_video(youtube_url)
m4v_filepath = '%s/%s.m4v' % (foldername, filename)
if not os.path.exists(m4v_filepath):
notify("No file downloaded - aborting!")
else:
notify('Converting %s tp M4V format %s' % (flv_filename, m4v_filepath))
convert_flv_to_m4v(flv_filename, m4v_filepath)
notify('Conversion finished - cleaning up')
os.unlink(flv_filename)
return m4v_filepath
def download_video(url):
# Use youtube-dl to handle the download
os.system('./youtube-dl "%s"' % url)
# youtube-dl will download the video to a file with
# name matching the YouTube ID of the video.
return '%s.flv' % extract_youtube_id(url)
def extract_youtube_id(url):
q = urlparse(url).query
return parse_qs(q)['v'][0]
def convert_flv_to_m4v(flv_path, m4v_path):
# Shell out to ffmpeg to do the conversion - hide the
# output as there's masses of it.
os.system('ffmpeg -i %s %s > /dev/null' % (flv_path, m4v_path))
def notify(msg):
line = '-' * len(msg)
print "\n%s\n%s\n%s\n\n" % (line, msg, line)
| [
"david.winterbottom@gmail.com"
] | david.winterbottom@gmail.com |
0ffa0f53ff82464df6d214d3fe81c5e9e8e5c6e8 | 3dcc44bf8acd3c6484b57578d8c5595d8119648d | /pdb_to_psipred_ss2.py | dd09d3224514a6565329d7f20d555550d90f8e17 | [] | no_license | rhiju/rhiju_python | f0cab4dfd4dd75b72570db057a48e3d65e1d92c6 | eeab0750fb50a3078a698d190615ad6684dc2411 | refs/heads/master | 2022-10-29T01:59:51.848906 | 2022-10-04T21:28:41 | 2022-10-04T21:28:41 | 8,864,938 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,545 | py | #!/usr/bin/python
#Adapted from phil bradley's make_coords_file.py
# Rhiju, Feb 2006.
import string
import sys
from os import popen,system
import pdb
if len(sys.argv) !=3:
print '\n'+'-'*75
print 'Usage: %s <pdb> <chain> > <coords_file>'
print '-'*75+'\n\n'
assert 0==1
pdb_file = sys.argv[1]
chain = sys.argv[2]
if chain == '_' or chain == '-':
chain = ' '
lines = popen('/users/pbradley/dssp '+pdb_file+' | grep "RESIDUE AA" -A10000 | '+\
' grep "^.[ 0-9][ 0-9][ 0-9][ 0-9]......'+\
chain+'"').readlines()
lowercase = list('abcdefghijklmnopqrstuvwxyz')
seq = map(lambda x:x[13],lines)
for i in range(len(seq)):
if seq[i] in lowercase:
seq[i] = 'C'
seq = string.join(seq,'')
ss = string.join(map(lambda x:x[16],lines),'')
ss3 = ''
for a in ss:
if a not in [' ','E','B','H','G','I','S','T']:
sys.stderr.write('undefined ss character? '+a+'\n')
ss3 = ss3+'L'
elif a in ['E','B']:
ss3 = ss3+'E'
elif a in ['H','G']:
ss3 = ss3+'H'
else:
ss3 = ss3+'L'
assert len(ss3) == len(seq)
ss3_psipred = ''
for i in range(len(seq)):
Eweight = 0.0
Cweight = 0.0
Hweight = 0.0
if ss3[i]=='E':
Eweight = 1.0
ss3_psipred = ss3_psipred+'E'
if ss3[i]=='H':
Hweight = 1.0
ss3_psipred = ss3_psipred+'H'
if ss3[i]=='L':
Cweight = 1.0
ss3_psipred = ss3_psipred+'C'
print "%4d %s %s %4.3f %4.3f %4.3f"% (i+1, seq[i], ss3_psipred[i],Cweight,Hweight,Eweight)
| [
"rhiju@stanford.edu"
] | rhiju@stanford.edu |
dbeeca429a6289fb3a9b68ce852f30c266ab920f | a2e638cd0c124254e67963bda62c21351881ee75 | /Extensions/AMWIDealTaker/FPythonCode/FFpMLACMDeriveInstrumentType.py | b9bce2f1668eae45e83a2ac2b9b213dadaacaa39 | [] | no_license | webclinic017/fa-absa-py3 | 1ffa98f2bd72d541166fdaac421d3c84147a4e01 | 5e7cc7de3495145501ca53deb9efee2233ab7e1c | refs/heads/main | 2023-04-19T10:41:21.273030 | 2021-05-10T08:50:05 | 2021-05-10T08:50:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,982 | py | """------------------------------------------------------------------------
MODULE
FFpMLACMDeriveInstrumentType -
DESCRIPTION:
This file is used to infer the instrument type that needs to be created for an incoming FpML if the productType details are not present in the FpML
VERSION: 1.0.30
RESTRICTIONS/ LIMITATIONS:
1. Any modifications to the scripts/ encrypted modules/ clear text code within the core is not supported.
2. This module is not customizable
3. The component may not work as expected with any modifications done to this module at user end
--------------------------------------------------------------------------"""
import base64, zlib, imp, marshal
if imp.get_magic() == '\x03\xf3\r\n':
__pyc = marshal.loads(zlib.decompress(base64.b64decode("""
eNrFO1lsHMl11XORMxqSEilRpM5eZ7mktEtS2vUe0noPakhqaYiHe6jlrryb2eZ0kWpp2D3s7pFIW9wklh3YCGzYcIA4l40gl4EE
QZB8JLEBAwvEf/4L/JOPfOXHP0byEQQIgjjvvarq6Z5uShxKTnjUVFe9el313qt3VU2dyZ8s/L8N//4MFBZjt6DUmJVhDY3d0lQ9
w25lVD3LbmWZBX95ZuXYlxl7yNj7t3LYV50oIKrfzDA2+ZR+SovLszdvzJV0+Jmfby7emKksznLPvscXHD/wWlvcCVZ3m1yfLM3O
VSvGwsrqwvLSVYJfvW37+obd4Dp8tnxu6YGr284G9/TgNjSGCPQAMQS3zUB3OLd8hFvnet3jZgCjNlxPNx2Ar7tbtrOp4zR0e4OQ
ND3XatXFFCwemHbD102P644bQB/3EbvtECgOK707Z1RxgvqYMVYy5qqrxkIFZ1yd1m8sLC6sztCDmP/lKX3G2dW3XMvesOtmYLsO
TQ2R+XXPbgb+tM6durfbxGkCXKvBoane4CYske8Eet21uH7fDm7LOdRdj6iB0/NbzabrwcgpnV734pSgmMCjoOotP4Blf8Fcb3AC
ewnBENNW03VweVvmLkHed727uunrfKfJ6zghfC8QrnMFFgwTy2i/DCgPDPJgNZZemnxqP/Yv4GcpOANS+SjhqUd3wzUU4SGNMc5o
BzCUcSH+WMmioGMlh5sAK2of3Cow3sN4lvEcewg7ppce84wX6LGIOwbBSswqUOUIs3qoUmZWL1X6mFWkSj+zSlQZYNYRqhxlVpkq
x5jVR5VBZvVTZYhZA1Q5zqyjVDnBrGNUGWbWIFVOMmuIKiPMOk6VUWadoMopZg1T5TSzTlLlDLNGqHKWWdA7yr4MyuA8M6oTp4E4
daANw3/Y5qyC1DrPSGs8YKwWVjRVySAJqxMIvBQgkSeQysFlKGqPYkqttmV6d+1gzfb4CuwybAsuPX7Uhuc6wYzHHRN6aNAxGHTP
bNgWCeAi931zk0/gCoIcTp83NiZoYkwt5lFvmGruBr04jZrt2EGthth91HtMK8JvSB1NUacHCiDEHaQCNvpvQrHJA9qQ7TUm9BGq
nQ49Bbt4xVhYnCM8BpJU1PJq9gdfwoiYxGKMxlKTDSMSxM4y9YxkdMjsPlxOhNmCtbQo/4kWtRRZVUatikjdlOw/xBL9fZZ4KrLE
x3PsrQjH5lG+dBKw7lmWfRKWjYpZKLA4wLlD8OytCM8OuaylyLraTEMC2mL3HWKN/n5r/FTHGvNSZdMaP48vZmyPNPVelu28hEu+
o6F/MvvhGNsjF+VBjt3JYNPdDPNG2QNyZU4C+PYo/q2BKoc27FxjQRaxOUA6dG3wZf5rqP9u8/pdZf+jJEFnYwf9BSobfHMTPtoA
Nk48QDTzCGAj4WyS9lzYYTZg8fh4g2/6REaokBLDxlWvxWkegspZQeVgAD99QkoFDAmQ4zAPqFbclhNwj6BhTl3y47TQdH67J/ae
C4jnKHGkoBW0IW1QO6aVUQ3iwgpR/vxdyB8ss1Tm2M7Xolz6ItvLk93IR7j0NjECOHSeGBjt+jx15agrG+86T485fASetlvy1JKX
LSFm+FsDxnt/ixICZZz35NYanbzfBIo5HRJg6mRp9I2GawaTJAnTVE5SS1IqjL6Q+Qjgozm5xT1XB76Bg6ULWWHKWM2auz5Nx+jH
AllvIP0NHBcMYs1tNMBLXQGGuhYxnwxgrPkmWC6BhaQJh8WkCGeCUlSEtvVQpsST7DNOdK/AUoQJsakXfBrxnAqFSYhTkf5f0fra
YtUTFasvpYoVqoA821mKCtfbbK9AElSIiMmYFDesfyDFAR5JKeTZNlcCNoZ/JCEfk4R83CEh5EjMH0JC0qWCGG6cRJZoSkOkcXyi
J5WJRFXFxFLItsvxxxfVYyPUEk+Tq/L9s51cFXwtw2dfJsJVnEguNFga2ak9ch6RJQzZCMx8qDHv39nOK8TYjGDsRES3ZxVjJ+iR
9j+MX9meYPC3FigNAMxFbVBAbQ84P8igcOz10Et7qSyioHg/JbAerEuwXgTDxl7ZiJC9zPsJe1Bkd4qkXnrZ3QK2gKMPS9j+OTr6
WClqD3qYByUsp8SgfhJNcgk7nBFa5BG1yKImBisBw823ZGhKLAKszVK5SOX7AXoAVdjiDV5peWDH67sLyFLuBwaEsdX7ZpNswPJC
lTxw41ksxrB4DvldVipmCeKz4LXHO9r77uO2jJJCoj6cgsEh9IZ5cfLMCLnBA8/m93jF3dpynZsBhM80RnqmgY1u20zDNv3gimhe
5Y7rCS222GoEdrNhcw+cl63oa5a9Fc+uw4wtviNGTydGP34MbiuIkWFXkIHuliRROzlRiu1T4wgixL1G+x+YFqMP7cIAt7vHgyhp
UAhMnNuSucUDVODc8xb9TYK1cea0QFTvwdFYk1hzlzu7rHa2UXU9kJubOPAFCnVwGw/IjdwL/yfk1j4BrSNaMVOE+jDU5ebORr3R
SuheC1kHKX9AXhvu2/F2AClEHzY2tr9FFj7SLnXumtK5/kGULqxEB2cW1hIFCDMhOhjcttwZSCljXLlaMJZc3ODCgQRB0oyMbyIm
FTswLg9dMmc4oXbVKx1EUSaNOwCsEb/EhlzUcv52aDlDI4maVtWFdhUKFT0oUGg2sS0ndab/IT3m449Ko3rfxDAAld43mTO+D1rh
aSvImKcddMVX0m4Q3Ee7ZfpJb8Em8Bq7mLlD9lOaD4buw3RSgYQOI0/UpPSAKo1Uww3bucstbBPxzhtYYPA70atsNNnUm+Fb6bHN
JfLqpRgYVw+jWKLW1bgcugK5uCuAIrW+4CvKkO6QpODeE8uaQnsfUQxK617WzsHvAH2mhQA76SHAZ6Ne2meUaSY5kUohEgK0uzIR
Hz4X9eED8tCCDg8NZ+G/c0CpQg9cOuALzkaDEkckZBHIdFedNt6qG5gNHYxby3P29906HPBeYpjMWQ3hQ3sWoQ+OKNYJvcB+SE98
LMHQ9rvCBeN6HyLG4RSfvG+fMO//n8dvHpDHPvlKugOmVK/MVjtjMh9ZVvEg+Ar0Wb5hgrNxUFYa17HArDNp/3WBRSI5JMeOSY4J
Dw/tP8z5WwfgTk4mSog7b4Tc2bkU5cinSPVnFdmFuh+S1F3ZHmLwt9ZWzp/rJtCpmOBDWS/ooLJcDysVCEVNj8sQCIMfSjPRzhHA
uoiF+4T3iKNkC/FEDacmwfPDDd2XnR2qlOIwH/Ave4Rz2ROIDsnK6cTmE1OX2LGiJqrU/PfaJl0wuQBeV9Kz+q2DelYf7eNZvUPD
MyoI+QhFAYOwvAIYJ4CsAvgNRIIAIYYCBhHivSJvstR2o4wrKhYUUqyrQEW/ZvogMWRU0E7RI9lYYgJaSeN1LHCVwY3u7OVjiUsi
8BQ8skHBVwvj941dnD72fD+Mf7W2NzagFVQJDA2TmWH8+60OLbrjRnericowzGRK9l5V+nM00pVReU4VC5flAwXB4KzJZFgBgcLx
mAhFJfv1tDTYlXDvq7RwYvOL41N3Q5ds5tstO9jVV41qwjqKYBZ5Pu9xDiqyMk+eC0Q967ZDZkj4WZh9M6phyiMt+1UMFTZEUsbz
2I6Hee2MaUQ5L2DxBGo40mNU/x5HH49lQYvaq6CEC9H4J5tUwh1esdS7V2g7aZFkxhW5qWIe8utdMEJyAADvm55F6pbIPEft5NhS
uFsN3Ppd8lDnBagg/rtYrIW0JOK/rehL4CvmLjm0x1K20vWnkVMSM5Wz+jHi6ZUEv/jLJPKbXRC5jpG6laBzUYiz6CKnMXzS3zU9
2wSH473w/ASPMdKB0mhvvB/arCcn+rmkXVJzkHT/SYLuWudR0yVxFryntQ3RSXnulEltzapI/vnIadQmd2CS9SiZN8xGY90ERmyJ
A1z/r1GVpNzjqLuthkU3EoAtUhvb4vKHuNEBnonFzcaUvtLgpg8DiL+WWyckwtd3nbQ7ItGo0XSsNJDp0J8hCBdAPHUzZKrkv4Jp
O+EIYkAKc5qHdem0MFeEsCBJ1+Xy5YGljhySJ225+Ekbcn3L7/ZU5zhxGogtXzQvaftPUf4eBfVVz0r/PrRMP5b7KpEpkGbok0i7
CPKLtN8+od0VHUUmyPtRpIXM0Z2eyJnND+W7wEahyaN08F2wTH+EqVV4FPnWYRFOgF07CVAOReB36HaF1Yutww8zTNv+Idv+Edv+
hPY3Cl2JbiTkZJRP4ddcy3ObHPYaPsxsIWngAfXbzD3uodB9Q4IDMRu7yH91n0ZJBp6M0gUh1BczPmxcyVcfGYtycXVcItOrgWff
5ePT47Nuax3spGqnROT4lP4OJgPxPg3gJWkBcQs80+J4zUaybsofoJNBaF2w9Kv6mKVP6nT8R0Iw/96yeDuKzJgvJOcjLEyV9BUq
ZUalHNuJCwpEEWJuh3t12+ehhhc5DJ+WR2nIuR03sOtkHeUixNpoxAUlqMu2RVaHZnbD3dzkHtF/7cby9bRo2Kxv0cKEyX4ZmxEq
8KzFrkU+mcJQpPnntoNW0EbgtwD2+xyFU2XYBH2anmJexpLmBbNZIOjPsCCTOBlCD7ay/7mx3PmUsWpbkTBb8yijQAf5BzAxwj4Q
q62naDPO7m8zBHV/FlUpg1FTXXgULQuptESx818+OC3XXVDBIRlJ1q5BEwnjvCE2/zW70aAKZkCk7+mADAc2bElhc5tYbGPhYeFj
EYT0e+o0HUnQFCctyPlv+5Iz5SJEm5y5BDmjUku3HUJfSLaLqGBNDtlei0cBlYMzQfie07ztakYke025lxSfz7cgJuDTUd/TD91S
v8NhfVx3LskccpyeAodO7eOeCh79p1qO4tEgKJVIBj60pb/azsZ8LhrfVeLZGGFYh6hFcCgXYeoNvD2AHHpX4kqma157NLN8zGvu
q3tOPsYrPbACSru1EAZpaXkXofafyol0OBuMyjUtcmlliLhTzKQnVNJUk2BHp2oKL8E9gtAqLBC0Bi+iTeaJ+ff0pdn5C0btl6dX
hpImcAfeiWftcZWCr8KGYUWGwQKTt6pEOjegw/c7dBcWCDMMfeBiyaRuru3dWS+TiM7lO9uF0rmnTozylKwimff683galGPb/Xnn
O7nOgUUa+HMa2CsH+n9Jj0X56P0jXl9Fp3GUieP1EKE8YRdg/XnrFIGd7gRz9MR7B+i9F+nkvhzBcCYx9l+znWPF7ZJf0Ng+NdbW
rLP09rNpk+wnfzaDVPW+rlnn0mAGFKq/0qzzaQBHFcBPNUund+lpYMciy3mGwJ5JLGogsagCLeprmdhMXss8Zia1zJPO5CuZzpmI
SOEfMhbFF3iBLzLEuZmA7yH4n2UsSnpDGYMfS8D3ihxaNh3+P7RO+BLBN7O4okG1ovey1q/Q8Gc7VwTw2F6Ko/1uAu0RQvs3hHYo
QqgxGj6WINQHCQxlwjCYwzDq0xRGAdlhG5/AXv+/slgfjtRPUoA2kLOeiyMfyCH+/05s9j7Cv5qzxtWVnZHIPCcSM/xBAkM/YfiX
HI4dVWO/nbMu0BovpMnMKQX2g5x1kcAupoGdjszkucRMLiVm8gopLgB+nnC+EOcOw6jS/yphPsP8a1Q5iwM1a5IGTMbfcS2//dW8
U0m8ZooWbOetaYI/wxzaP4/CZV1CGbcuo+YAYbReRD0HOgwkC6QAFgfwQGzQGUAwIAbsSNBSsALYc7CfNk/gTSdxAcnLF+RNJNjN
PfhovSS7hvGSE61O3VI6IwNnrJ9DAapOvIq24WM8HLG4YwcbNvdTv1kTO2BXqZV1Svm7CTtpBhA5rrcCwKUuGevXby5E0jfVNQiv
zXrd9SzML+/SrSi6tI1neW+M+XQoL538GQoZhWNGYeua6XngjlB3xWyqq5euR6f4xlvq1C52aIduA5lmOoCNnL9iR4rrSkk/CuNp
5P7uUyS28MMAY00FGHSyXvFc328flSwYVbrbQ83hTS+j6o8mYSPHKsOdQ8IzFkoWxBwkCr7DiwP4oGJlWn5HhEf3G9oRisgYRNxh
cZHCbBKRxaF5peNUndDeqsROngWDZqtBht7vo1O3alQnBepJcbQwOWeC+7TKvS2iOPSLblpTLGUsaNaRzySqtGWPkg2zIldHXQvp
XShGMv1C0jW/Q2JQV4kYX3h4ollf6NgN9yHgVOlJkdvhFh5G1/EraHyXvj5G31bDDJKNV/gcs6E3W17T9bk/VaJTNHGotqxuna5x
GlYPM1ORL3hh5miWA4o5ehmhHR/zxzu36VRpaQKdbV/dgDF28R1fwOKLWDzAYk8FoOpqjJ9yNcb4WOFBKOPX0uM749cPGDr4KnD3
ZeDuq3jdVxF8RxwYDeF7/0/ecj68/ZMVmSuR6Yrc+6FNxRu8Hly6TOk2cXoyfYDTzI4jRRHLXsTicvxotMurRPFd/1J3g0MF0eW4
UJdcPeRk5fCXuxse0VCvdjcypsyudTd2n2s1wdRB0MTueog0ai8lXkGzNN2AJAwflDYLOw/8htgxZrfSE9evrx+Sm2r8pS6lCENX
cTH3AF8lTB510Gl98huG4hZf9EtMSH6joa4EiJwEZahPtb/k5IdHAlV1PmR8eLgrnWqvx3uPY4z+P+EJ86g2oOHXDUfkRYIRylfj
1w8H6LNMPepT9AzK/jZUBDqzT7t8Tn5KyEz8uUyJrmcJwwjdEI7Pof0ZxY5liUocVc6IeiFTJqiybO/ThjJLE7raBLUaOnu1Gnkf
tZr4HnGtZlCWia4IoMo36N51OfJFwiYo7F3j+6HavByq0oq6atKtQlP3bo3fRQzfxQLv6xgrWPwOFn+MxZ9g8adY/DkWv4fF72Px
B1j8IRbfweLPsMDNbvxFTIQOLEd4VYIdVYk/oGW+mBO/A9pApnikeLR4rDhaLBbLxX74LcJzX7GHfgeoLEFPj3hnuBNqNcutA33x
1rCBd8CNO1iww02RuPEZwbQ3jyiXoiBux2T+F0s2Nus=""")))
else:
__pyc = marshal.loads(zlib.decompress(base64.b64decode("""
The system cannot find the path specified.""")))
del base64, zlib, imp, marshal
exec(__pyc)
del __pyc
| [
"nencho.georogiev@absa.africa"
] | nencho.georogiev@absa.africa |
7b28138416d72a200b73e445b541f3f762da16a2 | 164c059989934bca6943df15552c5dc5e6b6dbbd | /src/run_extractquerylog.py | 1e58861ee31a93bdefab9a080953fe2b99c7f6c1 | [] | no_license | afcarl/densequery2vec | a9674987d49aa3deb67cccfa14eb9ae5edd915de | 7196e5a6676776681e436d5d53e388ddf70227af | refs/heads/master | 2020-03-22T10:19:48.318129 | 2014-07-17T13:56:39 | 2014-07-17T13:56:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py | #coding=cp936
import os
files = os.listdir('/data2011/d3/uigs/2012/201204')
fout = open('runall.sh','w')
for f in files:
print f
fout.write('nohup python extractquerylog.py '+f+' > ../data/log/'+f+'.log &\n')
fout.close()
| [
"luochengleo@gmail.com"
] | luochengleo@gmail.com |
047ddfc0315a95ce84f0397cf7b3c06deb87acb7 | 0337e02c15f18985537587f6c298391f9af077c5 | /testcases/test_login_001.py | 3c9b301e582c3e10c2c01bf27ef7836f9ee7e063 | [] | no_license | khandepc/Guru99BankProject | 9c48c181f720bbce08012ac72a47cbeec581e60b | 55bfa560d7ca593cbfb84ede630a19b848f4426b | refs/heads/main | 2023-02-06T03:31:13.205028 | 2020-12-16T06:45:24 | 2020-12-16T06:45:24 | 321,257,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,776 | py | import time
import pytest
from utilities.custom_logger import LogGen
from utilities.readProperties import ReadConfig
from pageobjects.login_page import LoginPage
class Test_Login_001:
base_url=ReadConfig.get_base_URL()
user_name=ReadConfig.get_user_name()
password=ReadConfig.get_password()
log=LogGen.log_gen()
@pytest.mark.smoke
def test_open_browser(self,setup):
self.log.info("------Open browser test------")
self.driver=setup
self.driver.get(self.base_url)
actual_url=self.driver.current_url
if actual_url=="http://demo.guru99.com/V4/":
assert True
self.log.info("------ open browser test passed ------")
self.driver.close()
else:
self.driver.save_screenshot("../screenshots/"+'test_homepage_title.png')
self.driver.close()
self.log.error("------ open browser test failed ------")
assert False
@pytest.mark.smoke
def test_home_page_title(self,setup):
self.log.info("------ test login 001 -------")
self.log.info("------ verifying homepage title ------")
self.driver=setup
self.driver.get(self.base_url)
time.sleep(5)
actual_title=self.driver.title
if actual_title == "Guru99 Bank Home Page":
assert True
self.driver.close()
self.log.info("------ home page title test is passed ------")
else:
self.driver.save_screenshot("../screenshots/"+'test_homepage_title.png')
self.driver.close()
self.log.error("------ home page title test is failed ------")
assert False
@pytest.mark.sanity
@pytest.mark.regression
def test_login(self,setup):
self.log.info("------ verifyin login test ------")
self.driver=setup
self.driver.get(self.base_url)
self.log.info("--- application launched...")
self.lp=LoginPage(self.driver)
self.lp.set_user_id(self.user_name)
self.log.info("--- user id Entered : "+self.user_name)
self.lp.set_password(self.password)
self.log.info("--- password Entered : "+self.password)
self.lp.click_on_login()
self.log.info("--- clicked on login")
self.msg=self.driver.find_element_by_xpath("//body").text
#if self.msg == "Manger Id : mngr285385":
if "Manger Id :"+" "+self.user_name in self.msg:
assert True
self.log.info("------ login test is passed ------")
self.driver.close()
else:
self.driver.save_screenshot(".\\screenshots\\"+"test_login.png")
self.driver.close()
self.log.error("------ login test is failed")
assert False
| [
"khandepc@gmail.com"
] | khandepc@gmail.com |
15d888f88d1679039bf18f71120ac99d0ea01b0f | 2d276785c3663d4798be462115291c4706dbd255 | /Python从菜鸟到高手/chapter14/demo14.01.py | 29f3934d32d88fe51f876eff78ae813d90498efa | [] | no_license | bupthl/Python | 81c92433bd955663e6cda5fe7cab5ea3d067c3de | bdb33aeeb179a43100b9ef7129a925c63a133fd3 | refs/heads/master | 2022-02-21T11:02:40.195265 | 2019-08-16T05:49:18 | 2019-08-16T05:49:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,238 | py | '''
--------《Python从菜鸟到高手》源代码------------
欧瑞科技版权所有
作者:李宁
如有任何技术问题,请加QQ技术讨论群:264268059
或关注“极客起源”订阅号或“欧瑞科技”服务号或扫码关注订阅号和服务号,二维码在源代码根目录
如果QQ群已满,请访问https://geekori.com,在右侧查看最新的QQ群,同时可以扫码关注公众号
“欧瑞学院”是欧瑞科技旗下在线IT教育学院,包含大量IT前沿视频课程,
请访问http://geekori.com/edu或关注前面提到的订阅号和服务号,进入移动版的欧瑞学院
“极客题库”是欧瑞科技旗下在线题库,请扫描源代码根目录中的小程序码安装“极客题库”小程序
关于更多信息,请访问下面的页面
https://geekori.com/help/videocourse/readme.html
'''
from xml.etree.ElementTree import parse
doc = parse('files/products.xml')
for item in doc.iterfind('products/product'):
id = item.findtext('id')
name = item.findtext('name')
price = item.findtext('price')
print('uuid','=',item.get('uuid'))
print('id','=',id)
print('name', '=',name)
print('price','=',price)
print('-------------')
| [
"registercn@outlook.com"
] | registercn@outlook.com |
044b9145da1f28e2db11e6dbb1cb13463c17a9bc | fb16f7024e0d93ecb07c122e633c1a957a8ab645 | /inheritance/demo2.py | 5859d83e5f89c9c64c80a1b4b2ac35a6999df16c | [] | no_license | rajeshanu/rajeshprograms | c23cf550e060040c7b336242a805e274d3305371 | 83f0fc9c4a8628bba590d1066ca93fd98137f0bc | refs/heads/master | 2020-04-04T13:17:55.986558 | 2018-11-03T06:42:51 | 2018-11-03T06:42:51 | 155,956,676 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | class A:
company_name="rajesh"
def display(self):
print("this is display")
class B(A):
def show(self):
print("this is show")
#calling static variable using class A
print(A.company_name)
#calling static variable using class B
print(B.company_name)
b1=B()
b1.show()
b1.display()
| [
"44720126+rajeshanu@users.noreply.github.com"
] | 44720126+rajeshanu@users.noreply.github.com |
0dd964acad8dd7a11d0395be4c59bd0f1587f633 | 0547d1826e99eedb959a3463520d73985a3b844e | /Data Scientist with Python Track Github/06-Merging DataFrames with pandas/04- Case Study - Summer Olympics/09-Merging to compute influence.py | 65b57dce70597328d7a83026604030c048af75f0 | [] | no_license | abhaysinh/Data-Camp | 18031f8fd4ee199c2eff54a408c52da7bdd7ec0f | 782c712975e14e88da4f27505adf4e5f4b457cb1 | refs/heads/master | 2022-11-27T10:44:11.743038 | 2020-07-25T16:15:03 | 2020-07-25T16:15:03 | 282,444,344 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,142 | py | '''
Merging to compute influence
This exercise starts off with the DataFrames reshaped and hosts in the namespace.
Your task is to merge the two DataFrames and tidy the result.
The end result is a DataFrame summarizing the fractional change in the expanding mean of the percentage of medals won for the host country in each Olympic edition.
Instructions
100 XP
Merge reshaped and hosts using an inner join. Remember, how='inner' is the default behavior for pd.merge().
Print the first 5 rows of the DataFrame merged. This has been done for you. You should see that the rows are jumbled chronologically.
Set the index of merged to be 'Edition' and sort the index.
Print the first 5 rows of the DataFrame influence. This has been done for you, so hit 'Submit Answer' to see the results!
'''
# Import pandas
import pandas as pd
# Merge reshaped and hosts: merged
merged = pd.merge(reshaped, hosts, how='inner')
# Print first 5 rows of merged
print(merged.head())
# Set Index of merged and sort it: influence
influence = merged.set_index('Edition').sort_index()
# Print first 5 rows of influence
print(influence.head()) | [
"abhaysinh.surve@gmail.com"
] | abhaysinh.surve@gmail.com |
2b9fb6b71bd3380b6ab336b7a73dca869d7d67e4 | ef4a1748a5bfb5d02f29390d6a66f4a01643401c | /algorithm/algorithm_week/week1/bubble.py | 2fa44823a30b25b9c819d6b99bd400329f717866 | [] | no_license | websvey1/TIL | aa86c1b31d3efc177df45503d705b3e58b800f8e | 189e797ba44e2fd22a033d1024633f9e0128d5cf | refs/heads/master | 2023-01-12T10:23:45.677578 | 2019-12-09T07:26:59 | 2019-12-09T07:26:59 | 162,102,142 | 0 | 1 | null | 2022-12-11T16:31:08 | 2018-12-17T08:57:58 | Python | UTF-8 | Python | false | false | 355 | py | # for i in range(2,10):
# for j in range(1,10):
# a = i * j
# print(a)
def bubbleSort(data):
for i in range(len(data)-1, 0, -1): # 4 3 2 1
for j in range(0, i): # 4 3 2 1 번
if data[j] < data[i]:
data[j], data[j+1] = data[j+1], data[j]
data = [55, 78, 7, 12, 42]
bubbleSort(data)
print(data) | [
"websvey1@gmail.com"
] | websvey1@gmail.com |
935e864d4bed53c8504125ab2060120e036f07fd | 45a506c5622f366e7013f1276f446a18fc2fc00d | /tests/framework/cli/test_registry.py | 2b1fdda7006dcce18dcd328ffbcbdd634fe71e28 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | sbrugman/kedro | 3e48bcc56cc61fbe575d1a52c4f5bf3e84b6f974 | 25c92b765fba4605a748bdaaa801cee540da611e | refs/heads/develop | 2023-07-20T11:24:07.242114 | 2021-10-08T14:05:03 | 2021-10-08T14:05:03 | 404,517,683 | 1 | 2 | NOASSERTION | 2021-09-08T22:53:09 | 2021-09-08T22:53:09 | null | UTF-8 | Python | false | false | 3,994 | py | # Copyright 2021 QuantumBlack Visual Analytics Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND
# NONINFRINGEMENT. IN NO EVENT WILL THE LICENSOR OR OTHER CONTRIBUTORS
# BE LIABLE FOR ANY CLAIM, DAMAGES, OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF, OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# The QuantumBlack Visual Analytics Limited ("QuantumBlack") name and logo
# (either separately or in combination, "QuantumBlack Trademarks") are
# trademarks of QuantumBlack. The License does not grant you any right or
# license to the QuantumBlack Trademarks. You may not use the QuantumBlack
# Trademarks or any confusingly similar mark as a trademark for your product,
# or use the QuantumBlack Trademarks in any other manner that might cause
# confusion in the marketplace, including but not limited to in advertising,
# on websites, or on software.
#
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from click.testing import CliRunner
@pytest.fixture
def yaml_dump_mock(mocker):
return mocker.patch("yaml.dump", return_value="Result YAML")
@pytest.fixture
def pipelines_dict():
pipelines = {
"de": ["split_data (split_data)"],
"ds": [
"train_model (train_model)",
"predict (predict)",
"report_accuracy (report_accuracy)",
],
"dp": ["data_processing.split_data (split_data)"],
}
pipelines["__default__"] = pipelines["de"] + pipelines["ds"]
return pipelines
@pytest.mark.usefixtures("chdir_to_dummy_project", "patch_log")
def test_list_registered_pipelines(
fake_project_cli, fake_metadata, yaml_dump_mock, pipelines_dict
):
result = CliRunner().invoke(
fake_project_cli, ["registry", "list"], obj=fake_metadata
)
assert not result.exit_code
yaml_dump_mock.assert_called_once_with(sorted(pipelines_dict.keys()))
@pytest.mark.usefixtures("chdir_to_dummy_project", "patch_log")
class TestRegistryDescribeCommand:
@pytest.mark.parametrize("pipeline_name", ["de", "ds", "dp", "__default__"])
def test_describe_registered_pipeline(
self,
fake_project_cli,
fake_metadata,
yaml_dump_mock,
pipeline_name,
pipelines_dict,
):
result = CliRunner().invoke(
fake_project_cli,
["registry", "describe", pipeline_name],
obj=fake_metadata,
)
assert not result.exit_code
expected_dict = {"Nodes": pipelines_dict[pipeline_name]}
yaml_dump_mock.assert_called_once_with(expected_dict)
def test_registered_pipeline_not_found(self, fake_project_cli, fake_metadata):
result = CliRunner().invoke(
fake_project_cli, ["registry", "describe", "missing"], obj=fake_metadata
)
assert result.exit_code
expected_output = (
"Error: `missing` pipeline not found. Existing pipelines: "
"[__default__, de, dp, ds]\n"
)
assert expected_output in result.output
def test_describe_registered_pipeline_default(
self,
fake_project_cli,
fake_metadata,
yaml_dump_mock,
pipelines_dict,
):
result = CliRunner().invoke(
fake_project_cli,
["registry", "describe"],
obj=fake_metadata,
)
assert not result.exit_code
expected_dict = {"Nodes": pipelines_dict["__default__"]}
yaml_dump_mock.assert_called_once_with(expected_dict)
| [
"noreply@github.com"
] | sbrugman.noreply@github.com |
b3712cec61edb7ce27ef60bd2b0341d12e27f6e3 | f487b395f47116bc7480fcbdc32be21354d8e0ea | /test.py | 9e0cd0712d8cd5065b6cd1b6f7f7bb1c1217d0ac | [] | no_license | greenmac/kaggle_digit_recognizer | cbd3bed60bc5d386e09fb71bff665048e1a9f2cb | d864d47874dad96a3fa44a99d076630379a2fb10 | refs/heads/master | 2021-01-07T04:22:56.478579 | 2020-03-16T06:33:05 | 2020-03-16T06:33:05 | 241,577,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,572 | py | import random
import time
from datetime import datetime
'''
class RanDate():
def strTimeProp(self, start, end, prop, frmt):
stime = time.mktime(time.strptime(start, frmt))
etime = time.mktime(time.strptime(end, frmt))
ptime = stime + prop * (etime - stime)
return int(ptime)
def randomDate(self, frmt='%Y-%m-%d %H:%M:%S'):
start = '1950-01-01 00:00:00'
end = '2000-12-31 23:59:59'
return time.strftime(frmt,
time.localtime(self.strTimeProp(start, end, random.random(), frmt)))
print(RanDate().randomDate())
'''
# def strTimeProp(start, end, prop, frmt):
# stime = time.mktime(time.strptime(start, frmt))
# etime = time.mktime(time.strptime(end, frmt))
# ptime = stime + prop * (etime - stime)
# return int(ptime)
# def randomDate(frmt='%Y-%m-%d %H:%M:%S'):
# start = '1950-01-01 00:00:00'
# end = '2000-12-31 23:59:59'
# return time.strftime(frmt,
# time.localtime(strTimeProp(start, end, random.random(), frmt)))
# randomDate = randomDate()
# # print(randomDate())
# qs = datetime.strptime(randomDate, "%Y-%m-%d %H:%M:%S")
# print(type(qs))
def fun(a, *args, **kwargs):
print(f'a={a}')
for arg in args:
print(f'Optional argument: {arg}')
for k, v in kwargs.items():
print(f'Optional kwargs argument key: {k} value {v}')
# print("")
# args = [1, 2, 3, 4]
# fun(*args)
# print("")
# kwargs = {'k1':10, 'k2':11}
# fun(1, **kwargs)
print("")
args = [1, 2, 3, 4]
kwargs = {'k1':10, 'k2':11}
fun(1, *args, **kwargs) | [
"alwaysmac@msn.com"
] | alwaysmac@msn.com |
fd575982d90e09ee5b0320c34f170d2652cc7322 | 453dba4c0f167faf97cfc80233bf1acc5b87e58e | /_unittests/ut_documentation/test_notebook_javascript.py | 12c52fd9f4d5427b2c237f594636478929c16b5d | [
"MIT"
] | permissive | sdpython/code_beatrix | 6a092dacbf830a90d374e8c4871d8749e096d5a3 | e39f8ae416c23940c1a227c11c667c19104b2ff4 | refs/heads/master | 2023-02-06T10:52:51.418417 | 2023-02-04T12:11:45 | 2023-02-04T12:11:45 | 32,282,235 | 1 | 2 | MIT | 2022-10-16T15:20:28 | 2015-03-15T20:26:17 | Jupyter Notebook | UTF-8 | Python | false | false | 1,335 | py | # -*- coding: utf-8 -*-
"""
@brief test log(time=59s)
"""
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder, add_missing_development_version
from pyquickhelper.ipythonhelper import execute_notebook_list_finalize_ut
from code_beatrix.automation.notebook_test_helper import ls_notebooks, execute_notebooks, clean_function_notebook
import code_beatrix
class TestNotebookJavascript(unittest.TestCase):
def setUp(self):
add_missing_development_version(["pymyinstall", "pyensae", "pymmails", "ensae_projects",
"jyquickhelper"], __file__, hide=True)
def test_notebook_javascript(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
temp = get_temp_folder(__file__, "temp_javascript")
keepnote = ls_notebooks("javascript")
self.assertTrue(len(keepnote) > 0)
res = execute_notebooks(temp, keepnote,
lambda i, n: "deviner" not in n,
fLOG=fLOG,
clean_function=clean_function_notebook)
execute_notebook_list_finalize_ut(
res, fLOG=fLOG, dump=code_beatrix)
if __name__ == "__main__":
unittest.main()
| [
"xavier.dupre@gmail.com"
] | xavier.dupre@gmail.com |
33c663db4ac2ac5d3f963fc3e0ab25c673c7cba7 | 6b181f5640e2c3df91d1a6d5c95cf1989012f0d5 | /client/canyons-of-mars/client_utils.py | 5c047f04104e32a1a2b0ca5bc61396532f2e39d0 | [
"MIT"
] | permissive | GamesCreatorsClub/GCC-Rover | 9b84dcd84cce60c321906223f8c24f99722d1bae | 25a69f62a1bb01fc421924ec39f180f50d6a640b | refs/heads/master | 2021-01-11T18:04:05.876976 | 2019-10-01T15:20:30 | 2019-10-01T15:20:30 | 79,477,472 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,762 | py |
#
# Copyright 2016-2019 Games Creators Club
#
# MIT License
#
import os
import pyros
import time
from rover import Rover, RoverState
from telemetry import TelemetryStreamDefinition
from telemetry.telemetry_client import PubSubTelemetryClient
class PyrosTelemetryClient(PubSubTelemetryClient):
def __init__(self, publish_method, subscribe_method, topic='telemetry'):
super(PyrosTelemetryClient, self).__init__(topic, publish_method, subscribe_method)
class TelemetryUtil:
def __init__(self, topic="telemetry"):
self.client = PyrosTelemetryClient(pyros.publish, pyros.subscribeBinary, topic=topic)
self.stream = None
self.step = 10 # 15 seconds a time
self.finished_downloading = False
self.timestamp = None
self.recordCallback = None
self.error = None
def processStreamDef(self, stream_def):
if stream_def is None:
print("No such stream")
self.error = "No such stream"
else:
self.stream = stream_def
self.client.getOldestTimestamp(self.stream, self.processOldestTimestamp)
def processOldestTimestamp(self, oldest_timestamp, records_count):
self.timestamp = oldest_timestamp
if oldest_timestamp == 0.0:
print("Telemetry: The oldest timestamp is " + str(oldest_timestamp) + " (there are no records) and there are " + str(records_count) + " records.")
else:
print("Telemetry: The oldest timestamp is " + str(oldest_timestamp) + " (it is " + str(time.time() - oldest_timestamp) + "s ago) and there are " + str(records_count) + " records.")
if records_count > 0:
self.client.retrieve(self.stream, self.timestamp, self.timestamp + self.step, self.processData)
def processData(self, records):
self.timestamp += self.step
for record in records:
if self.recordCallback is not None:
self.recordCallback(record)
if self.timestamp > time.time() or len(records) == 0:
self.client.trim(self.stream, time.time())
self.finished_downloading = True
return
self.client.trim(self.stream, self.timestamp)
self.client.retrieve(self.stream, self.timestamp, self.timestamp + self.step, self.processData)
def fetchData(self, stream_name, recordCallback):
self.stream = None
self.finished_downloading = False
self.timestamp = None
self.error = None
self.recordCallback = recordCallback
self.client.getStreamDefinition(stream_name, self.processStreamDef)
class RunLog:
def __init__(self, rover):
self.rover = rover
self.logger_def = RoverState.defineLogger(TelemetryStreamDefinition('rover-state'))
self.records = []
self.ptr = 0
self.filename = 'rover-state'
def reset(self):
self.records = []
self.ptr = 0
def addNewRecord(self, record):
bts = record[len(record) - 1]
if isinstance(bts, bytes):
record = [r for r in record[:-1]] + [bts.decode('ascii')]
self.records.append(record)
def currentRecord(self):
if self.ptr >= len(self.records):
if len(self.records) > 0:
self.ptr = len(self.records) - 1
else:
return None
return self.records[self.ptr]
def setup(self):
if len(self.records) > 0:
state = RoverState(self.rover, None, None, None, None, None)
state.recreate(self.records[self.ptr])
state.calculate()
self.rover.current_state = state
def previousRecord(self, step):
if self.ptr == 0:
return False
self.ptr -= step
if self.ptr < 0:
self.ptr = 0
self.setup()
return True
def nextRecord(self, step):
if self.ptr >= len(self.records) - 1:
return False
self.ptr += step
if self.ptr >= len(self.records):
self.ptr = len(self.records) - 1
self.setup()
return True
def size(self):
return len(self.records)
def currentRecordTimeOffset(self):
if len(self.records) == 0:
return 0
t0 = self.records[0][0]
tc = self.records[self.ptr][0]
return tc - t0
def _makeFilename(self, i):
return self.filename + "." + str(i) + ".csv"
def _findFilenameNumber(self):
i = 1
filename = self._makeFilename(i)
while os.path.exists(filename):
i += 1
filename = self._makeFilename(i)
return i - 1
def save(self):
i = self._findFilenameNumber()
i += 1
filename = self._makeFilename(i)
with open(filename, "wt") as file:
file.write("timestamp,")
file.write(",".join([f.name for f in self.logger_def.fields]) + "\n")
for record in self.records:
file.write(",".join([str(f) for f in record]) + "\n")
def load(self):
i = self._findFilenameNumber()
filename = self._makeFilename(i)
if os.path.exists(filename):
with open(filename, "rt") as file:
self.reset()
header = file.readline()
lines = file.readlines()
for line in lines:
if line.endswith("\n"):
line = line[0:len(line) - 1]
split = line.split(",")
timestamp = float(split[0])
del split[0]
record = [timestamp] + [d[0].fromString(d[1]) for d in zip(self.logger_def.fields, split)]
self.records.append(record)
| [
"natdan@users.noreply.github.com"
] | natdan@users.noreply.github.com |
78ae01813d9135d215eaddfa321fc99bd7b6143c | 6f866eb49d0b67f0bbbf35c34cebe2babe2f8719 | /app/data_models/fulfilment_request.py | 2a505c4cf7f376af16fcb182b36776ef386e4ff9 | [
"MIT",
"LicenseRef-scancode-proprietary-license"
] | permissive | ONSdigital/eq-questionnaire-runner | 681b0d081f9cff0ee4ae3017ecc61f7390d553bf | 87e7364c4d54fee99e6a5e96649123f11c4b53f1 | refs/heads/main | 2023-09-01T21:59:56.733363 | 2023-08-31T15:07:55 | 2023-08-31T15:07:55 | 219,752,509 | 12 | 18 | MIT | 2023-09-14T11:37:31 | 2019-11-05T13:32:18 | Python | UTF-8 | Python | false | false | 876 | py | from abc import ABC, abstractmethod
from datetime import datetime, timezone
from functools import cached_property
from typing import Mapping
from uuid import uuid4
from app.utilities.json import json_dumps
class FulfilmentRequest(ABC):
@abstractmethod
def _payload(self) -> Mapping:
pass # pragma: no cover
@cached_property
def transaction_id(self) -> str:
return str(uuid4())
@property
def message(self) -> bytes:
message = {
"event": {
"type": "FULFILMENT_REQUESTED",
"source": "QUESTIONNAIRE_RUNNER",
"channel": "EQ",
"dateTime": datetime.now(tz=timezone.utc).isoformat(),
"transactionId": self.transaction_id,
},
"payload": self._payload(),
}
return json_dumps(message).encode("utf-8")
| [
"noreply@github.com"
] | ONSdigital.noreply@github.com |
69abc1688de4f8f5f99b6a5cd6477c25dc505f9d | 2196f8fc48d24a27243f395ab849cd4410cbe87b | /test/test_zero_tensors.py | 972a08d4b27a95e4d4a891ab977d31aac52c43f8 | [
"MIT"
] | permissive | zuru/pytorch_scatter | 8097bffb9732464e185c2bae266c9e8aea96d4e6 | d7dbb0807dede6a9a1021ce3dc2bc2972b168a24 | refs/heads/master | 2021-12-14T01:38:18.737462 | 2021-12-08T10:03:04 | 2021-12-08T10:03:04 | 238,142,178 | 0 | 0 | MIT | 2020-02-04T06:51:02 | 2020-02-04T06:51:01 | null | UTF-8 | Python | false | false | 229 | py | import torch
from torch_scatter import scatter
def test_zero_elements():
x = torch.randn(0, 16)
index = torch.tensor([]).view(0, 16)
print(x)
print(index)
scatter(x, index, dim=0, dim_size=0, reduce="add")
| [
"matthias.fey@tu-dortmund.de"
] | matthias.fey@tu-dortmund.de |
07985a0deb76d16dc2d01d2243eb8775632649e1 | 7a398f8dbcf465dc182d63dfe11a71d94a68c235 | /SyntaxEx14/venv/bin/easy_install | d7af2d3dad86685f38c4d10f8fbefef699e5d161 | [] | no_license | apolonis/PythonExamples | 48b0bd6c0e86388cc2772b27fcdeffacb7d0191f | 018c3ccf0f1f57f807e8e9059afa3db408094a5c | refs/heads/master | 2020-09-20T07:48:22.233460 | 2019-12-16T14:39:34 | 2019-12-16T14:39:34 | 224,413,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | #!/home/marko/PycharmProjects/SyntaxEx14/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
| [
"riddickschronicles@gmail.com"
] | riddickschronicles@gmail.com | |
ae32e8e8c755c8a31f1fdbce1a01816ab57aec48 | f3b233e5053e28fa95c549017bd75a30456eb50c | /tyk2_input/28/28-30_wat_20Abox/set_1ns_equi_m.py | e7ec5a1ea8866d3680ac187e668d1044c1d1e7a9 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 923 | py | import os
dir = '/mnt/scratch/songlin3/run/tyk2/L28/wat_20Abox/ti_one-step/28_30/'
filesdir = dir + 'files/'
temp_equiin = filesdir + 'temp_equi_m.in'
temp_pbs = filesdir + 'temp_1ns_equi_m.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.system("rm -r %6.5f" %(j))
os.system("mkdir %6.5f" %(j))
os.chdir("%6.5f" %(j))
os.system("rm *")
workdir = dir + "%6.5f" %(j) + '/'
#equiin
eqin = workdir + "%6.5f_equi_m.in" %(j)
os.system("cp %s %s" %(temp_equiin, eqin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, eqin))
#PBS
pbs = workdir + "%6.5f_1ns_equi.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#top
os.system("cp ../28-30_merged.prmtop .")
os.system("cp ../0.5_equi_0_3.rst .")
#submit pbs
os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"songlin3@msu.edu"
] | songlin3@msu.edu |
858bd6899eb9a8332d79dda00ceb3117f216ca37 | 8b9fcb5f2207b98da1113a26c1b7915ae0961684 | /tests/publishing/test_geocodesddraft.py | 14e9acc16e03e8a73c1d8f016ccd655a39084e87 | [
"BSD-3-Clause"
] | permissive | dcworldwide/arcpyext | fe4bcef3fd7ac0ffb8fd5e89fd925a951dc22fd6 | 02fe4dcd3ed728c91a078dee255abacb0fe2aed0 | refs/heads/master | 2022-01-21T09:14:49.937159 | 2018-10-15T01:30:04 | 2018-10-15T01:30:04 | 155,815,154 | 0 | 0 | BSD-3-Clause | 2018-11-02T04:45:09 | 2018-11-02T04:45:08 | null | UTF-8 | Python | false | false | 1,850 | py | from __future__ import (absolute_import, division, print_function, unicode_literals)
from builtins import (bytes, dict, int, list, object, range, str, ascii, chr, hex, input, next, oct, open, pow, round,
super, filter, map, zip)
import os.path
import shutil
import arcpyext
import pytest
from arcpyext.publishing._geocode_sddraft import GeocodeSDDraft
from .. helpers import *
SDDRAFT_FILE_PATH = os.path.abspath("{0}/../samples/geocodeservice.sddraft".format(os.path.dirname(__file__)))
SDDRAFT_FILE_PATH_COPY = os.path.abspath("{0}/../samples/geocodeservice.copy.sddraft".format(os.path.dirname(__file__)))
SDDRAFT_SAVE_TEST_FILE_PATH = os.path.abspath("{0}/../samples/geocodeservice.savetest.sddraft".format(os.path.dirname(__file__)))
@pytest.fixture
def sddraft():
shutil.copyfile(SDDRAFT_FILE_PATH, SDDRAFT_FILE_PATH_COPY)
return arcpyext.publishing.load_geocode_sddraft(SDDRAFT_FILE_PATH_COPY)
from .sddraftbase import *
@pytest.mark.parametrize(("capabilities", "expected", "ex"), [
([GeocodeSDDraft.Capability.geocode], [GeocodeSDDraft.Capability.geocode], None),
([], [], None),
(["Geocode"], [GeocodeSDDraft.Capability.geocode], None),
(["Fail"], None, ValueError),
([123], None, TypeError)
])
def test_capabilities(sddraft, capabilities, expected, ex):
assert isinstance(type(sddraft).capabilities, property) == True
if ex != None:
with pytest.raises(ex):
sddraft.capabilities = capabilities
else:
sddraft.capabilities = capabilities
assert set(sddraft.capabilities) == set(expected)
def test_save(sddraft):
sddraft.save()
assert True
@pytest.mark.parametrize(("output"), [
(SDDRAFT_SAVE_TEST_FILE_PATH)
])
def test_save_a_copy(sddraft, output):
sddraft.save_a_copy(output)
assert os.path.isfile(output) == True | [
"DavidWhittingham@users.noreply.github.com"
] | DavidWhittingham@users.noreply.github.com |
a72e4cd9b490f00aa9e20de7fee6ccd519ab65cc | eeb4752a22ef99152784c0ef6f720f8e4f2dd9d9 | /myrest/talk/models.py | 5a922ffc710e4b3be17cc2d911aa1ac6044503fc | [] | no_license | borko81/django-rest-test | 9a63d328fea8155029bb3d1d29ab624ea4a0027b | e21d41494154622c2472b679df40d5f42d8ab356 | refs/heads/main | 2023-08-05T22:36:10.099746 | 2021-09-10T17:54:20 | 2021-09-10T17:54:20 | 318,290,143 | 0 | 0 | null | 2021-08-23T18:51:22 | 2020-12-03T18:53:44 | Python | UTF-8 | Python | false | false | 232 | py | from django.contrib.auth.models import User
from django.db import models
from talk.helpers.add_update import Helper
class Post(Helper):
author = models.ForeignKey(User, on_delete=models.CASCADE)
text = models.TextField()
| [
"bstoilov81@gmail.com"
] | bstoilov81@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.