blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2e8e22eccf577adb8f50383b8185e493ecca6916 | eea70db78a214217ba41801d870aba127ba56c56 | /Code/Tested TD Policy-Iteration/ARL_package/CodeFramework/main.py | 713e3872ac1424be5f414871c9f0572523da5a91 | [] | no_license | 356255531/poppyProject | 191b9a9e29817e3d6ce8c85dd5c0702982dd7157 | 678044afffa6390fac8cb402099bd32ae72d8a33 | refs/heads/master | 2021-01-21T14:32:47.373344 | 2016-07-12T19:42:25 | 2016-07-12T19:42:25 | 58,334,432 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,842 | py | __author__ = 'erik'
from . import Actor, LearningAlgorithm, Reward, StateActionSpace, StateObserver
def run_learning(actor, learning_algorithm, reward, state_observer):
"""Run the main loop!"""
# Check that the variables given have the right superclasses
# assert (isinstance(actor, Actor))
# assert (isinstance(learning_algorithm, LearningAlgorithm))
# assert (isinstance(reward, Reward))
# assert (isinstance(state_observer, StateObserver))
current_state = state_observer.get_current_state()
while current_state:
next_action = learning_algorithm.get_next_action(current_state)
actor.perform_action(next_action)
next_state = state_observer.get_current_state()
reward_given = reward.get_rewards(current_state, next_action, next_state)
learning_algorithm.receive_reward(current_state, next_action, next_state, reward_given)
current_state = next_state
def run_episode(actor, learning_algorithm, reward, state_observer, state_action_space, max_num_iterations = 1000000):
# assert (isinstance(actor, Actor))
# assert (isinstance(learning_algorithm, LearningAlgorithm))
# assert (isinstance(reward, Reward))
# assert (isinstance(state_observer, StateObserver))
# assert isinstance(state_action_space, StateActionSpace)
actor.initialise_episode(state_action_space)
current_state = state_observer.get_current_state()
current_iter = 0
while current_iter < max_num_iterations:
next_action = learning_algorithm.get_next_action(current_state)
actor.perform_action(next_action)
next_state = state_observer.get_current_state()
reward_given = reward.get_rewards(current_state, next_action, next_state)
learning_algorithm.receive_reward(current_state, next_action, next_state, reward_given)
current_state = next_state
current_iter += 1
if state_action_space.is_terminal_state(current_state):
reward_given = reward.get_rewards(current_state, (0, 0), next_state)
learning_algorithm.receive_reward(current_state, (0, 0), next_state, reward_given)
break
learning_algorithm.finalise_episode()
print "run_episode: Episode ended after " + str(current_iter) + " iterations."
if __name__ == '__main__':
from dummy_classes import *
dummy_states_actions = DummyStateActionSpace()
dummy_states_actions.states[0] = 2 #because I defined the loop to run while current state wasn't 0/false
dummy_actor = DummyActor()
dummy_observer = DummyObserver(dummy_states_actions, dummy_actor)
dummy_learner = DummyLearner(dummy_states_actions)
dummy_reward = DummyReward(dummy_states_actions)
run_learning(dummy_actor, dummy_learner, dummy_reward, dummy_observer)
print "Values: " + str(dummy_learner.values) | [
"hanzw356255531@icloud.com"
] | hanzw356255531@icloud.com |
f8b3978cb753019e9ee6fec4c16e624de7034c9b | d047fed56a7d1de1d7c32ce83b8d62646fa7d19e | /leapyearQuestion.py | 6b7641183cdad87321077e16411144ded6c16051 | [] | no_license | shantinavgurukul/listQuestions | 508b6bd489731d5b8a9ba1a27e5b88b1bb27341a | 21f413f65b374e5fa63e0366591895757146d7c7 | refs/heads/master | 2022-11-23T07:56:15.392836 | 2020-08-02T03:51:46 | 2020-08-02T03:51:46 | 284,384,225 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | year=int (input("enter the year="))
if(year%4==0 and year%100!=0):
print(year,"it's leap year.")
elif(year%400==0):
print(year,"it's leap year-")
else:
print(year, "it's not leap year") | [
"you@example.com"
] | you@example.com |
83a1fbfb23b45c442e115f2747f1d9c39fafda6f | 86177bf66a3d4f6ffcd8972a1eb7305eb25d5301 | /lesson_10_2.py | e919169a0b0c65293af3e625cb67b05a6f8bef07 | [] | no_license | Mameluke8888/QA_Automation_Lesson_10_2 | 941dd99c5fb65d98407124514e146e6b51f528c1 | 175858d4b4d189926633d5323f4ac724e73f52c7 | refs/heads/main | 2023-05-07T01:36:35.742796 | 2021-05-24T12:15:35 | 2021-05-24T12:15:35 | 370,340,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,236 | py | from selenium.webdriver.common.by import By
import time
from browser import Browser
from UIElement import UIElement as Element
from dropdown import Dropdown
from header import Header
from right_menu import RightMenu
from login_page import LoginPage
from registration_page import RegistrationPage
URL = "https://techskillacademy.net/brainbucket"
# May 23rd, 2021
# student Evgeny Abdulin
def test_registration_through_dropdown():
browser = Browser(URL, "Firefox")
driver = browser.get_driver()
login_page = LoginPage(browser)
login_page.open_registration_from_account_dropdown()
registration_form = RegistrationPage(browser)
assert registration_form.get_form_title() == 'Register Account'
registration_form.enter_first_name("Svetlana")
registration_form.enter_last_name("Match")
registration_form.enter_email("svetlana.match2@gmail.com")
registration_form.enter_telephone("3123405555")
registration_form.enter_first_line_address("175 W Jackson St")
registration_form.enter_city("Chicago")
registration_form.select_state("Illinois")
registration_form.enter_password("qwerty123")
registration_form.confirm_password("qwerty123")
registration_form.subscribe_to_newsletters()
registration_form.agree_to_privacy_policy()
registration_form.submit_form()
successful_registration_title = Element(browser, By.XPATH, "//*[@id='content']/h1")
assert successful_registration_title.get_text() == 'Your Account Has Been Created!'
successful_registration_subtitle = Element(browser, By.XPATH, "//*[@id='content']/p")
assert successful_registration_subtitle.get_text() == 'Congratulations! ' \
'Your new account has been successfully created!'
time.sleep(5)
browser.shutdown()
def test_registration_from_right_menu():
browser = Browser(URL, "Firefox")
driver = browser.get_driver()
# in Account dropdown select Login option
header = Header(browser)
header.open_login_page()
# click on Register btn in the right menu
right_menu = RightMenu(browser)
right_menu.click_registration()
registration_form = RegistrationPage(browser)
assert registration_form.get_form_title() == 'Register Account'
registration_form.enter_first_name("Svetlana")
registration_form.enter_last_name("Match")
registration_form.enter_email("svetlana.match2@gmail.com")
registration_form.enter_telephone("3123405555")
registration_form.enter_first_line_address("175 W Jackson St")
registration_form.enter_city("Chicago")
registration_form.select_state("Illinois")
registration_form.enter_password("qwerty123")
registration_form.confirm_password("qwerty123")
registration_form.subscribe_to_newsletters()
registration_form.agree_to_privacy_policy()
registration_form.submit_form()
successful_registration_title = Element(browser, By.XPATH, "//*[@id='content']/h1")
assert successful_registration_title.get_text() == 'Your Account Has Been Created!'
successful_registration_subtitle = Element(browser, By.XPATH, "//*[@id='content']/p")
assert successful_registration_subtitle.get_text() == 'Congratulations! ' \
'Your new account has been successfully created!'
time.sleep(5)
browser.shutdown()
def test_header():
browser = Browser(URL, "Firefox")
driver = browser.get_driver()
# in Account dropdown select Login option
header = Header(browser)
# header.open_wishlist()
header.search_for('laptop')
header.change_currency("eur")
time.sleep(3)
browser.shutdown()
def test_login():
browser = Browser(URL, "Firefox")
driver = browser.get_driver()
header = Header(browser)
header.open_login_page()
login_page = LoginPage(browser)
login_page.email_input_type("svetlana.match2@gmail.com")
login_page.password_input_type("qwerty123")
time.sleep(1)
login_page.login_btn.click()
time.sleep(3)
browser.shutdown()
if __name__ == "__main__":
# test_registration_through_dropdown()
# test_registration_from_right_menu()
# test_header()
test_login()
| [
"evgenyabdulin@Evgenys-Mac-mini.local"
] | evgenyabdulin@Evgenys-Mac-mini.local |
1607d69f94ab941b55a795973d3de6ce2af8080b | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /mDuDhhMWrdHJSGdtm_11.py | d5fda998b13ee4c0e40bf595960ffe6cd51bc475 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py |
def is_exactly_three(n):
sqrt = n**0.5
if sqrt.is_integer() and sqrt != 1:
for i in range(2,int(sqrt)):
if sqrt % i == 0:
return False
return True
return False
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
78619db4bd98a279210cb7f4d217a84cfbeb1701 | 5d0fe4a9e026234fe15e6c4380355061bb4dac64 | /tests/browser/pages/invest/hpo.py | d5ba9be3f4f278923f41b32b53e671e09ebc9516 | [
"MIT"
] | permissive | uktrade/directory-tests | 37e243862da8ac594cf1ea06ade714db5e1aba03 | 39ec6c26203580238e65566a472cbd80916e6726 | refs/heads/master | 2022-08-09T16:58:56.248982 | 2022-08-01T12:25:10 | 2022-08-01T12:25:10 | 71,367,747 | 4 | 3 | MIT | 2022-08-01T12:26:09 | 2016-10-19T14:48:57 | Python | UTF-8 | Python | false | false | 8,015 | py | # -*- coding: utf-8 -*-
"""Invest in Great - HPO Page Object."""
import logging
from typing import List
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webdriver import WebDriver
from directory_tests_shared import URLs
from directory_tests_shared.enums import PageType, Service
from pages import ElementType, common_selectors
from pages.common_actions import (
Selector,
assertion_msg,
check_for_sections,
check_if_element_is_not_visible,
check_url,
find_element,
get_selectors,
go_to_url,
scroll_to,
)
NAME = "HPO"
NAMES = [
"Aquaculture",
"High productivity food production",
"Lightweight structures",
"Photonics and microelectronics",
"Rail infrastructure",
"Space",
"Sustainable packaging",
]
SERVICE = Service.INVEST
TYPE = PageType.HPO
URL = URLs.INVEST_HPO.absolute
PAGE_TITLE = "high potential"
SubURLs = {
"aquaculture": URLs.INVEST_HPO_AQUACULTURE.absolute,
"high productivity food production": URLs.INVEST_HPO_HIGH_PRODUCTIVITY_FOOD.absolute,
"lightweight structures": URLs.INVEST_HPO_LIGHTWEIGHT.absolute,
"photonics and microelectronics": URLs.INVEST_HPO_PHOTONICS.absolute,
"rail infrastructure": URLs.INVEST_HPO_RAIL.absolute,
"space": URLs.INVEST_HPO_SPACE.absolute,
"sustainable packaging": URLs.INVEST_HPO_SUSTAINABLE_PACKAGING.absolute,
}
SELECTORS = {
"hero": {
"self": Selector(By.ID, "hero"),
"heading": Selector(By.CSS_SELECTOR, "#hero h1"),
},
"contact us": {
"self": Selector(By.ID, "contact-section"),
"heading": Selector(By.CSS_SELECTOR, "#contact-section h2"),
"get in touch": Selector(By.CSS_SELECTOR, "#contact-section a"),
},
"proposition one": {
"self": Selector(By.ID, "proposition-one"),
"heading": Selector(By.CSS_SELECTOR, "#proposition-one h2"),
"view video transcript": Selector(
By.CSS_SELECTOR, "#proposition-one details summary", type=ElementType.BUTTON
),
"video transcript": Selector(By.CSS_SELECTOR, "#proposition-one details p"),
},
"opportunity list": {"self": Selector(By.ID, "opportunity-list")},
"proposition two": {
"self": Selector(By.ID, "proposition-two"),
"heading": Selector(By.CSS_SELECTOR, "#proposition-two div:nth-child(1) h2"),
"list of propositions": Selector(By.CSS_SELECTOR, "#proposition-two ul"),
},
"competitive advantages": {
"self": Selector(By.ID, "competitive-advantages"),
"first - icon": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(1) img"
),
"first - heading": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(1) div ~ div > h3"
),
"first - list": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(1) div ~ div > ul"
),
"second - icon": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(2) img"
),
"second - heading": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(2) div ~ div > h3"
),
"second - list": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(2) div ~ div > ul"
),
"third - icon": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(3) img"
),
"third - heading": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(3) div ~ div > h3"
),
"third - list": Selector(
By.CSS_SELECTOR, "#competitive-advantages li:nth-child(3) div ~ div > ul"
),
},
"testimonial": {
"self": Selector(By.ID, "testimonial"),
"quote": Selector(By.CSS_SELECTOR, "#testimonial p"),
},
"company list": {
"self": Selector(By.ID, "company-list"),
"heading": Selector(By.CSS_SELECTOR, "#company-list p"),
"list": Selector(By.CSS_SELECTOR, "#company-list ul"),
"images": Selector(By.CSS_SELECTOR, "#company-list ul img"),
},
"case studies": {
"self": Selector(By.ID, "case-studies"),
"heading": Selector(By.CSS_SELECTOR, "#case-studies h2"),
"first case study": Selector(
By.CSS_SELECTOR,
"#case-studies details:nth-child(1)",
type=ElementType.BUTTON,
),
"first - heading": Selector(
By.CSS_SELECTOR, "#case-studies details:nth-child(1) h3"
),
"first - text": Selector(
By.CSS_SELECTOR, "#case-studies details:nth-child(1) p"
),
"second case study": Selector(
By.CSS_SELECTOR,
"#case-studies details:nth-child(2)",
type=ElementType.BUTTON,
),
"second - heading": Selector(
By.CSS_SELECTOR, "#case-studies details:nth-child(2) h3"
),
"second - text": Selector(
By.CSS_SELECTOR, "#case-studies details:nth-child(2) p"
),
"third case study": Selector(
By.CSS_SELECTOR,
"#case-studies details:nth-child(3)",
type=ElementType.BUTTON,
),
"third - heading": Selector(
By.CSS_SELECTOR, "#case-studies details:nth-child(3) h3"
),
"third - text": Selector(
By.CSS_SELECTOR, "#case-studies details:nth-child(3) p"
),
},
"other opportunities": {
"self": Selector(By.ID, "other-opportunities"),
"first opportunity": Selector(
By.CSS_SELECTOR, "#other-opportunities div:nth-child(1) > div > a"
),
"second opportunity": Selector(
By.CSS_SELECTOR, "#other-opportunities div:nth-child(2) > div > a"
),
},
}
SELECTORS.update(common_selectors.INTERNATIONAL_HEADER_WO_LANGUAGE_SELECTOR)
SELECTORS.update(common_selectors.BETA_BAR)
SELECTORS.update(common_selectors.ERROR_REPORTING)
SELECTORS.update(common_selectors.INTERNATIONAL_FOOTER)
UNEXPECTED_ELEMENTS = {
"breadcrumbs": {"itself": Selector(By.CSS_SELECTOR, "div.breadcrumbs")}
}
def visit(driver: WebDriver, *, page_name: str = None):
url = SubURLs[page_name] if page_name else URL
go_to_url(driver, url, page_name or NAME)
def should_be_here(driver: WebDriver, *, page_name: str):
url = SubURLs[page_name] if page_name else URL
check_url(driver, url)
logging.debug("All expected elements are visible on '%s' page", PAGE_TITLE)
def should_see_sections(driver: WebDriver, names: List[str]):
check_for_sections(driver, all_sections=SELECTORS, sought_sections=names)
def clean_name(name: str) -> str:
return name.split(" - ")[1].strip()
def should_see_content_for(driver: WebDriver, hpo_name: str):
source = driver.page_source
hpo_name = clean_name(hpo_name)
logging.debug("Looking for: {}".format(hpo_name))
with assertion_msg(
"Expected to find term '%s' in the source of the page %s",
hpo_name,
driver.current_url,
):
assert hpo_name.lower() in source.lower()
def should_not_see_section(driver: WebDriver, name: str):
section = UNEXPECTED_ELEMENTS[name.lower()]
for key, selector in section.items():
check_if_element_is_not_visible(
driver, selector, element_name=key, wait_for_it=False
)
def unfold_elements_in_section(driver: WebDriver, section_name: str):
section_selectors = SELECTORS[section_name]
folded_elements = get_selectors(section_selectors, ElementType.BUTTON)
logging.debug(f"Found {len(folded_elements)} selectors for elements to unfold")
for name, selector in folded_elements.items():
element = find_element(driver, selector, element_name=name)
scroll_to(driver, element)
if element.get_attribute("open"):
logging.debug(f"Element: '{name}' is already unfolded")
else:
logging.debug(f"Unfolding closed element: {name}")
element.click()
| [
"kowalczykjanusz@gmail.com"
] | kowalczykjanusz@gmail.com |
9d74f6b7a1e32fffedd71a210f2a9009d0e57aa4 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/D/drj/multi.py | f1d9aa4aa940a0aa681026328186957517059a79 | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | import scraperwiki
import random
import time
n = random.choice(range(100))
i=0
while True:
print n, i
i += 1
time.sleep(1.4)import scraperwiki
import random
import time
n = random.choice(range(100))
i=0
while True:
print n, i
i += 1
time.sleep(1.4) | [
"pallih@kaninka.net"
] | pallih@kaninka.net |
8fe1a907db67146e05e04ef7aaa745788c55b202 | 5f25f45b7fac762b989563ca1270d37d31b749e8 | /tests/regnet_tests/conftest.py | b654d49b7f598cb10f8e5d08de01ab4861cf089a | [
"MIT"
] | permissive | thomaseizinger/cryptonote-library | 587c18d8e6662cc2920a896efbeab5fa49ee38b5 | c39d956847e5ba2b46c2c4c90ab289ae1c04fe03 | refs/heads/master | 2023-04-02T21:29:39.423776 | 2021-04-02T17:51:40 | 2021-04-02T17:51:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,785 | py | # Types.
from typing import Dict, List, Tuple, Any
# urandom standard function.
from os import urandom
# sleep standard function.
from time import sleep
# JSON standard lib.
import json
# pytest lib.
import pytest
# Transaction/Block classes.
from cryptonote.classes.blockchain import OutputIndex
# Crypto classes.
from cryptonote.crypto.monero_crypto import InputState, OutputInfo, MoneroCrypto
from cryptonote.crypto.monero_payment_id_crypto import MoneroPaymentIDCrypto
# Address and Wallet classes.
from cryptonote.classes.wallet.address import Address
from cryptonote.classes.wallet.wallet import Wallet, WatchWallet
# RPC classes.
from cryptonote.rpc.monero_rpc import RPC, MoneroRPC
# 1 XMR.
ATOMIC_XMR: int = 1000000000000
class Harness:
def __init__(self):
"""Construct a new test environment."""
self.rpc: RPC = MoneroRPC("127.0.0.1", 18081)
self.crypto: MoneroCrypto = MoneroCrypto()
self.crypto.oldest_txo_property = 1
key: bytes = urandom(32)
self.wallet: Wallet = Wallet(self.crypto, key)
self.watch: WatchWallet = WatchWallet(
self.crypto,
self.rpc,
self.wallet.private_view_key,
self.wallet.public_spend_key,
1,
)
self.inputs: Dict[OutputIndex, OutputInfo] = {}
self.rpc.generate_blocks(100, self.watch.new_address((0, 0)).address)
def verify_inputs(self):
"""Verify our inputs is the same as the WatchWallet's."""
assert self.inputs == self.watch.inputs
def poll_blocks(self):
"""Update our WatchWallet with the latest inputs."""
# Update with the newly found inputs.
self.inputs = {**self.inputs, **self.watch.poll_blocks()}
# Verify inputs.
self.verify_inputs()
def wait_for_unlock(self):
"""Wait for any new Transactions to unlock."""
sleep(2)
self.rpc.generate_blocks(
self.crypto.confirmations + 1, self.watch.new_address((0, 0)).address
)
self.poll_blocks()
def send(self, address: Address, amount: int) -> bytes:
"""Provide the specified address with the specified amount."""
# Update the available inputs.
self.poll_blocks()
# Prepare the spend.
context: Dict[str, Any] = self.watch.prepare_send(
address, amount, ATOMIC_XMR // 10
)
# Mark the spent inputs as spent in our copy of inputs.
for input_i in context["inputs"]:
self.inputs[
OutputIndex(bytes.fromhex(input_i["hash"]), input_i["index"])
].state = InputState.Spent
# Sign it.
publishable: List[str] = json.loads(
json.dumps(self.wallet.sign(json.loads(json.dumps(context))))
)
# Publish it.
self.watch.finalize_send(True, context, publishable[1])
# Verify the WatchWallet's inputs equals our list.
self.verify_inputs()
# Wait for the outputs to unlock.
self.wait_for_unlock()
# Return the hash.
return bytes.fromhex(publishable[0])
def return_funds(
self, test_wallet: Wallet, test_watch: WatchWallet, amount: int
) -> None:
"""Return sent funds back to the master wallet."""
if amount != 0:
context: Dict[str, Any] = test_watch.prepare_send(
self.watch.new_address((0, 0)),
amount - (ATOMIC_XMR // 10),
(ATOMIC_XMR // 10) - 1,
)
publishable: List[str] = test_wallet.sign(json.loads(json.dumps(context)))
test_watch.finalize_send(True, context, publishable[1])
# Wait for the return TXs to unlock.
self.wait_for_unlock()
# Verify we can spend the returned funds.
returned: Tuple[
List[bytes], Dict[OutputIndex, OutputInfo]
] = self.watch.can_spend(
self.rpc.get_transaction(bytes.fromhex(publishable[0]))
)
assert not returned[0]
assert len(returned[1]) == 1
assert list(returned[1].keys())[0].tx_hash == bytes.fromhex(publishable[0])
assert returned[1][list(returned[1].keys())[0]].amount == amount - (
ATOMIC_XMR // 10
)
# Poll the blockchain.
# This gets us the miner Transactions and change outputs.
# Since inputs are stored as a Dict, this will not create duplicates.
self.poll_blocks()
# Verify inputs.
self.verify_inputs()
@pytest.fixture(scope="session")
def harness() -> Harness:
return Harness()
@pytest.fixture
def monero_payment_id_crypto() -> MoneroPaymentIDCrypto:
return MoneroPaymentIDCrypto()
| [
"lukeparker5132@gmail.com"
] | lukeparker5132@gmail.com |
6ea8213e82dd3532a7da3312a90f6e1c64f353d4 | 63248ee10e29c11acebc9d3d977fe145cdfc156b | /tests/test_checker/test_noqa.py | 53763cb8d3b8fb977b42a21944992f06bc461240 | [
"MIT"
] | permissive | bekemaydin/wemake-python-styleguide | 68e6517d6cb70ad34c11706760a7042b2b84877d | fad6a1d2b66012d623fe0e0bba9b5561622deeb0 | refs/heads/master | 2020-04-03T02:07:08.558327 | 2018-10-27T12:09:35 | 2018-10-27T12:09:35 | 154,947,414 | 0 | 0 | MIT | 2018-10-27T12:09:12 | 2018-10-27T09:31:34 | Python | UTF-8 | Python | false | false | 1,919 | py | # -*- coding: utf-8 -*-
import re
import subprocess
from collections import Counter
ERROR_PATTERN = re.compile(r'(Z\d{3})')
def _assert_errors_count_in_output(output, errors):
found_errors = Counter((
match.group(0) for match in ERROR_PATTERN.finditer(output)
))
for found_error, found_count in found_errors.items():
assert found_error in errors, 'Error without a noqa count'
assert found_count == errors.pop(found_error)
assert len(errors) == 0
def test_noqa_fixture_disabled(absolute_path):
"""End-to-End test to check that all violations are present."""
errors = {
'Z110': 2,
'Z111': 1,
'Z112': 1,
'Z113': 1,
'Z220': 1,
'Z224': 1,
'Z300': 1,
'Z302': 1,
'Z303': 1,
'Z304': 1,
'Z305': 1,
'Z306': 2,
'Z307': 1,
'Z308': 1,
'Z309': 1,
'Z310': 4,
'Z312': 1,
'Z410': 1,
'Z420': 1,
'Z421': 1,
'Z422': 1,
'Z423': 1,
'Z430': 1,
'Z431': 2,
'Z432': 2,
'Z433': 2,
'Z434': 1,
'Z435': 1,
'Z436': 1,
'Z437': 1,
}
process = subprocess.Popen(
[
'flake8',
'--disable-noqa',
'--select',
'Z',
absolute_path('fixtures', 'noqa.py'),
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, _ = process.communicate()
_assert_errors_count_in_output(stdout.decode('utf8'), errors)
def test_noqa_fixture(absolute_path):
"""End-to-End test to check that `noqa` works."""
process = subprocess.Popen(
['flake8', '--select', 'Z', absolute_path('fixtures', 'noqa.py')],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, _ = process.communicate()
assert stdout.count(b'Z') == 0
| [
"mail@sobolevn.me"
] | mail@sobolevn.me |
83da29e85a3f4fb8c21958106bea5955ac87b29c | 03d99657f557e37694f7aa4eeb6dc90a918ea647 | /Cyberbook/XBlocks/simstudent-xblock/simstudent/simstudent.py | fb1fd2cce4a51c0b2285d3599a2cd015f5602985 | [] | no_license | pawans1994/QUADL | 5c3c824f2c813fd287d7135e878f5f962797bc2e | 3f03eabb54e295ceaa9d4add1329ddc5eeafdec3 | refs/heads/master | 2023-05-11T21:25:07.046794 | 2020-03-18T16:41:15 | 2020-03-18T16:41:15 | 248,282,752 | 1 | 0 | null | 2023-05-01T20:19:11 | 2020-03-18T16:20:11 | JavaScript | UTF-8 | Python | false | false | 4,077 | py | """TO-DO: Write a description of what this XBlock is."""
import pkg_resources
from web_fragments.fragment import Fragment
from xblock.core import XBlock
from xblock.fields import Integer, Scope, String
class SimStudentXBlock(XBlock):
"""
TO-DO: document what your XBlock does.
"""
# Fields are defined on the class. You can access them in your code as
# self.<fieldname>.
href = String(help="URL for SimStudent HTML rendring",
default="",
scope=Scope.content)
display_name = String(help="Name of the component in the edxPlatform",
default="Watson Tutor",
scope=Scope.settings)
name = String(help="Name of the brd file to run",
default="if_p_or_q_then_r.brd",
scope=Scope.content)
host = String(help="Ip Address or the domain name of the host",
default="localhost",
scope=Scope.content)
port = String(help="Port where the SimStudentServlet is running",
default="8080",
scope=Scope.content)
folder_name = String(help="Name of the folder containing the bundle",
default="informallogic",
scope=Scope.content)
problem_name = String(help="Name of the problem file (wme)",
default="if_p_or_q_then_r",
scope=Scope.content)
value_type_checker_name = String(help="Class containing the valueTypeChecker",
default="informallogic.MyFeaturePredicate.valueTypeChecker",
scope=Scope.content)
backend_name = String(help="Backend Class to use",
default="interaction.ModelTracerBackend",
scope=Scope.content)
def resource_string(self, path):
"""Handy helper for getting resources from our kit."""
data = pkg_resources.resource_string(__name__, path)
return data.decode("utf8")
# TO-DO: change this view to display your data your own way.
def student_view(self, context=None):
"""
The primary view of the SimStudentXBlock, shown to students
when viewing courses.
"""
html = self.resource_string("static/html/simstudent.html")
frag = Fragment(html.format(self=self))
frag.add_css(self.resource_string("static/css/simstudent.css"))
frag.add_javascript(self.resource_string("static/js/src/simstudent.js"))
frag.initialize_js('SimStudentXBlock')
return frag
def studio_view(self, context=None):
"""
The primary view of the simstudentXBlock, shown to students
when viewing courses.
"""
html = self.resource_string("static/html/simstudent_edit.html")
frag = Fragment(html.format(self=self))
frag.add_javascript(self.resource_string("static/js/src/simstudent_edit.js"))
frag.initialize_js('simstudentXBlock')
return frag
@XBlock.json_handler
def save_simstudent(self, data, suffix=''):
"""
An example handler, which increments the data.
"""
self.href = data['href']
self.host = data['host']
self.port = data['port']
self.name = data['name']
self.folder_name = data['folder_name']
self.problem_name = data['problem_name']
self.value_type_checker_name = data['value_type_checker_name']
self.backend_name = data['backend_name']
# TO-DO: change this to create the scenarios you'd like to see in the
# workbench while developing your XBlock.
@staticmethod
def workbench_scenarios():
"""A canned scenario for display in the workbench."""
return [
("SimStudentXBlock",
"""<simstudent/>
"""),
("Multiple SimStudentXBlock",
"""<vertical_demo>
<simstudent/>
<simstudent/>
<simstudent/>
</vertical_demo>
"""),
]
| [
"nobody@ncsu.edu"
] | nobody@ncsu.edu |
a04894d893ca8889368bac7bf6f3d839ea9fb93e | 5ea1216c24b62c6beab3c6d9d2e2e06a9c58c796 | /剑指Offer/39.数组中出现超过一半的数字.py | e639b88b4d8662393e9bf94769eb2228f051f053 | [] | no_license | xiami2019/LeetCode | 596de2f093d52b58cf80421f67de03757578cd5f | 8d09a56672553ecee4af731796980b2c61c52df2 | refs/heads/master | 2021-01-02T15:15:13.984178 | 2020-07-08T01:20:15 | 2020-07-08T01:20:15 | 239,675,873 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 365 | py | class Solution:
def majorityElement(self, nums: List[int]) -> int:
preNum = nums[0]
count = 1
for i in range(1, len(nums)):
if nums[i] == preNum:
count += 1
else:
count -= 1
if count < 0:
preNum = nums[i]
count = 1
return preNum | [
"435350193@qq.com"
] | 435350193@qq.com |
dffe57932f7e1c84cc32acf781fca7f7715216ff | 84b05857cbe74d190bdbee18d442d0c720b1b84d | /Coderbyte_algorithms/Hard/OptimalAssignments/test_OptimalAssignments.py | c35bb39e583b90f4477373996093898a9f6f1e86 | [] | no_license | JakubKazimierski/PythonPortfolio | 1c8c7e7b0f1358fc42a2295b807d0afafd8e88a3 | 3aa62ad36c3b06b2a3b05f1f8e2a9e21d68b371f | refs/heads/master | 2023-06-01T01:16:22.897097 | 2023-05-15T01:05:22 | 2023-05-15T01:05:22 | 311,473,524 | 9 | 1 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | '''
Unittests for OptimalAssignments.py
December 2020 Jakub Kazimierski
'''
import unittest
import OptimalAssignments
class test_OptimalAssignments(unittest.TestCase):
'''
Class with unittests for OptimalAssignments.py
'''
# region Unittests
def test_ExpectedOutput(self):
'''
Checks if returned output is as expected.
'''
output = OptimalAssignments.OptimalAssignments(["(5,4,2)","(12,4,3)","(3,4,13)"])
self.assertEqual(output, "(1-3)(2-2)(3-1)")
# endregion
if __name__ == "__main__":
'''
Main method for test cases.
'''
unittest.main() | [
"j.m.kazimierski@gmail.com"
] | j.m.kazimierski@gmail.com |
961ada8bd15c4bbc33270ba8fdeae8e08efdfdff | 15a739dc7f2f83d3b85d7b355a81d62a0c9386ad | /src/measure/07_calc_pale_para_time_cases.py | 6cd253560e1a7f9adbf6de1082e92acbff671a52 | [] | no_license | Allen517/half | 4d232563cb85ec73b78c263b8e6df04549bcecac | 18e6b5c2b69e1b90656328a6ae840fe7f7071398 | refs/heads/master | 2020-05-16T17:29:40.304766 | 2019-09-25T15:16:52 | 2019-09-25T15:16:52 | 183,195,625 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,863 | py | import sys,os
import random
import time
import numpy as np
from concurrent.futures import ThreadPoolExecutor
from concurrent import futures
def read_code(filename):
code_map = dict()
with open(filename, 'r') as fin:
for ln in fin:
elems = ln.strip().split('\t')
if len(elems)<2:
continue
code_map[elems[0]]=np.array(map(float, elems[1].split(',')))
return code_map
# def match_num(src_code, target_code):
# xor_res = int(src_code, 2)^int(target_code, 2)
# one_cnt = 0
# while xor_res != 0:
# s = xor_res%2
# if s==1:
# one_cnt += 1
# xor_res /= 2
# return one_cnt
def find_matched(code, target_code_map):
match_res = set()
min_dist = 1000
min_key = ''
for target_key, target_code in target_code_map.iteritems():
dist = geo_distance(code, target_code)
if dist<=min_dist:
min_dist = dist
min_key = target_key
return min_dist, target_key
def geo_distance(vec1, vec2):
return .5*np.sum((vec1-vec2)**2)
def main_proc(num):
print num
for i in range(num):
douban_keys = douban_code_map.keys()
d_key = douban_keys[random.randint(0,len(douban_keys)-1)]
target_key, target_dist = find_matched(douban_code_map[d_key], weibo_code_map)
print '{},{}:{}'.format(d_key, target_key,target_dist)
if __name__=='__main__':
douban_code_map = read_code('dhl-alp.case.model.code.pale.f')
weibo_code_map = read_code('dhl-alp.case.model.code.pale.g')
overall = 100
worker = 8
t1 = time.time()
with ThreadPoolExecutor(max_workers=worker) as executor:
future_to_proc = {executor.submit(main_proc, num)
:overall/worker if num+overall/worker<overall else overall-num for num in range(int(overall/worker))}
for future in futures.as_completed(future_to_proc):
print 'Finish processing %d'%(future_to_proc[future])
if future.exception() is not None:
print future.exception()
t2 = time.time()
print t2-t1 | [
"wangyongqing.casia@gmail.com"
] | wangyongqing.casia@gmail.com |
6973d3f90df8ecd1028285b111baa55b7a9d0f6b | 2e3e064ec2388f7a57428c32b77207280a0a9be1 | /Sect-A/source/sect04_control/s443.py | 496623c5b1ff4cc2b7b813f48f232d8d1636308f | [] | no_license | bigpycraft/sba19-seoulit | 1857a399d8f23c4fb8f98448139be8eb968d35ee | 2cf11a1ce9a04043911006531470a3e523cbf5f0 | refs/heads/master | 2020-07-02T22:48:52.681018 | 2019-09-19T03:08:59 | 2019-09-19T03:08:59 | 201,693,283 | 5 | 7 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | #pass 문 예시
signals = 'blue', 'yellow', 'red'
for x in range(len(signals)):
print(x, signals[x], '루프 시작!')
if signals[x] == 'yellow':
pass
print(x, signals[x], '루프 종료!!')
print('프로그램 종료!!!')
| [
"bluenine52@gmail.com"
] | bluenine52@gmail.com |
f2531dc8aa8f27aa3b6227b4feb0a00c64c8816d | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/sets_20200609191039.py | 8317547fb031c680266d7b47826445c4e751bb94 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | import json
def Strings(str):
#
values = {}
newArray = []
keys = []
for i in str:
newArray.append(i.split(":"))
for j in range(0,len(newArray)):
if newArray[j][0] in values:
# if newArray[j][0] in values:
# values[newArray[j][0]] += int(newArray[j][1])
# else:
# values[newArray[j][0]] = int(newArray[j][1])
# for k in values:
# keys.append(k)
# keys = sorted(keys)
# newString = ""
# last =len(keys)-1
# lastString = ""
# lastString +=keys[last] + ":" + json.dumps(values[keys[last]])
# for i in range(len(keys)-1):
# if keys[i] in values:
# newString += keys[i] + ":"+ json.dumps(values[keys[i]])+","
# finalString = newString + lastString
# print(type(finalString))
Strings(["Z:1","B:3","C:3","Z:4","B:2"])
# "B:5,C:3,Z:5"
| [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
4dccf5e24c984abaa126f1ab46d14c24b4630c86 | df19b2ff22f0b27685cdc72d0d50b6b9dfe3fa5a | /cpuApp/urls.py | 0045b3558ce4f2b345a1a63fe6076491e4e97993 | [] | no_license | fadiarmoush/LinuxProject | 7ca01efe06f87bb13a0232327aa1a7d9d1c4bc0b | 4a7a5330e3b2af97fc8bc58e3483fda6d3cee42f | refs/heads/master | 2020-06-18T06:32:39.077572 | 2019-07-10T12:02:37 | 2019-07-10T12:02:37 | 196,196,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | from django.urls import path
from django.conf.urls import url
from .views import ListSongsView
from .views import ListSongsView2
from .views import ListSongsView3
from . import views
from .views import currentsView
urlpatterns = [
path('cpu/', ListSongsView.as_view()),
path('mem/', ListSongsView2.as_view()),
path('disk/', ListSongsView3.as_view()),
url(r'currents', currentsView.as_view()),
]
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
ec07f0da2e432113a1bac9c65ed9483a3c4b4cab | d3e31f6b8da5c1a7310b543bbf2adc76091b5571 | /Day26/mini_prj1/app.py | ff17127e9abd7fbf909d7cb7e72a85916cbb5fa1 | [] | no_license | pytutorial/py2103 | 224a5a7133dbe03fc4f798408694bf664be10613 | adbd9eb5a32eb1d28b747dcfbe90ab8a3470e5de | refs/heads/main | 2023-07-14T06:31:18.918778 | 2021-08-12T14:29:16 | 2021-08-12T14:29:16 | 355,163,185 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,794 | py | from pymongo import MongoClient
from bson.objectid import ObjectId
db_host = '34.70.69.231'
db_user = 'mongo'
db_pass = 'abc@123'
client = MongoClient(db_host, username=db_user, password=db_pass)
db = client.db0001
import time
from flask import Flask, request, render_template, redirect, jsonify
app = Flask(__name__)
@app.route('/update-product/<pid>', methods=['GET', 'POST'])
def updateProduct(pid):
if request.method == 'GET':
product = db.product.find_one({'_id': ObjectId(pid)})
return render_template('form.html', product=product)
else:
#TODO: Update DB
return redirect('/')
@app.route('/create-product', methods=['GET', 'POST'])
def createProduct():
if request.method == 'GET':
return render_template('form.html', product={})
else:
fields = ['code', 'name', 'price', 'qty', 'description']
product = {f : request.form[f] for f in fields}
product['price'] = int(product['price'] or 0)
product['qty'] = int(product['qty'] or 0)
file = request.files.get('image')
if file is not None and file.filename != '':
filepath = f'/static/images/{str(time.time())}.jpg'
file.save(app.root_path + filepath)
product['image'] = filepath
db.product.insert(product)
return redirect('/')
@app.route('/delete-product/<pid>', methods=['DELETE'])
def deleteProduct(pid):
db.product.remove({'_id': ObjectId(pid)})
return jsonify({'success': True})
@app.route('/')
def index():
keyword = request.args.get('keyword', '')
productList = list(db.product.find({
'name': {'$regex': keyword, '$options': 'i'}
}))
return render_template('index.html', productList=productList, keyword=keyword)
app.run(debug=True) | [
"duongthanhtungvn01@gmail.com"
] | duongthanhtungvn01@gmail.com |
f0d78e0407e9b493b7e09a592fbe281f65786de9 | fe47a536f3938a8f0bf0e52fa6e3284d1b35e356 | /backend/users/migrations/0002_auto_20200614_2037.py | 5976489efb44e03f267c41dda3aa73207d6f4113 | [] | no_license | crowdbotics-apps/i-do-weddings-18101 | 2d0e473b2b0f312c049c07c6e28d53c6c506ebea | aabc1b4d442378a32643365ce8a0eac30d2836b1 | refs/heads/master | 2022-10-17T02:13:55.759254 | 2020-06-14T20:38:14 | 2020-06-14T20:38:14 | 272,279,169 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | # Generated by Django 2.2.13 on 2020-06-14 20:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
957b3ad7e784e2bb7d619623c8109f9100e498a2 | 64d1211404c89da4e09d77d859f2cdf6609a057e | /models/official/nlp/configs/bert.py | 47c0b26e436a76c0b31492d5397149af5ab66172 | [
"Apache-2.0"
] | permissive | Nerfertili/Deep_learning_learning_udemy | f375209e0675ab8f4da9551d8a5bdee4f2948ed8 | 0fe6c1f36019b29151acb17a1f248b34d6089aeb | refs/heads/master | 2023-02-17T10:10:52.536426 | 2021-01-19T02:48:23 | 2021-01-19T02:48:23 | 330,823,730 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,566 | py | # Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Multi-head BERT encoder network with classification heads.
Includes configurations and instantiation methods.
"""
from typing import List, Optional, Text
import dataclasses
from official.modeling.hyperparams import base_config
from official.nlp.configs import encoders
@dataclasses.dataclass
class ClsHeadConfig(base_config.Config):
inner_dim: int = 0
num_classes: int = 2
activation: Optional[Text] = "tanh"
dropout_rate: float = 0.0
cls_token_idx: int = 0
name: Optional[Text] = None
@dataclasses.dataclass
class PretrainerConfig(base_config.Config):
"""Pretrainer configuration."""
encoder: encoders.EncoderConfig = encoders.EncoderConfig()
cls_heads: List[ClsHeadConfig] = dataclasses.field(default_factory=list)
mlm_activation: str = "gelu"
mlm_initializer_range: float = 0.02
| [
"leal.afonso@outlook.com"
] | leal.afonso@outlook.com |
8275002ef69810e28d7fee9ec873749fced30050 | 08f61d5432b6cf14bb6fc3448259a6c445b26780 | /dapodik/sekolah/base.py | 4da825ffab37d6c94b7bad8cb9f88d0073438da1 | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | nainaidaa/dapodik | 9ecfabc6f262c24a4cd0288b3b1a7116b3a09199 | d89c0fb899c89e866527f6b7b57f741abd6444ea | refs/heads/master | 2023-07-22T23:37:16.694675 | 2021-09-08T15:04:23 | 2021-09-08T15:04:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,687 | py | from dapodik.base import BaseDapodik
from typing import List
from . import AkreditasiSp
from . import BlockGrant
from . import JurusanSp
from . import Kepanitiaan
from . import ProgramInklusi
from . import Sanitasi
from . import SekolahLongitudinal
from . import SekolahPaud
from . import Sekolah
from . import Semester
from . import Yayasan
class BaseSekolah(BaseDapodik):
def akreditasi_sp(self) -> List[AkreditasiSp]:
return self._get_rows("/rest/AkreditasiSp", List[AkreditasiSp])
def blockgrant(self) -> List[BlockGrant]:
return self._get_rows("/rest/BlockGrant", List[BlockGrant])
def jurusan_sp(self) -> List[JurusanSp]:
return self._get_rows("/rest/JurusanSp", List[JurusanSp])
def kepanitiaan(self) -> List[Kepanitiaan]:
return self._get_rows("/rest/Kepanitiaan", List[Kepanitiaan])
def program_inklusi(self) -> List[ProgramInklusi]:
return self._get_rows("/rest/ProgramInklusi", List[ProgramInklusi])
def sanitasi(self) -> List[Sanitasi]:
return self._get_rows("/rest/Sanitasi", List[Sanitasi])
def sekolah_longitudinal(self) -> List[SekolahLongitudinal]:
return self._get_rows("/rest/SekolahLongitudinal", List[SekolahLongitudinal])
def sekolah_paud(self) -> List[SekolahPaud]:
return self._get_rows("/rest/SekolahPaud", List[SekolahPaud])
def sekolah(self, index: int = 0) -> Sekolah:
return self._get_rows("/rest/Sekolah", List[Sekolah])[index]
def semester(self) -> List[Semester]:
return self._get_rows("/rest/Semester", List[Semester])
def yayasan(self) -> List[Yayasan]:
return self._get_rows("/rest/Yayasan", List[Yayasan])
| [
"revolusi147id@gmail.com"
] | revolusi147id@gmail.com |
1de781d634fab52bcefa1bc7e3d2b231a9ea18d3 | 7013527a33a756c0f08a0672515e2c20df58faca | /clean_stripes.py | 9b47390310d9b6bf5de62b3688848299b2e63441 | [] | no_license | tukiains/stripe-filter-test | b1b88c9129f714c0e3cb47c6f62567a3b7eaec5a | 069d8a480628c9b1ae79de648ceddeeb264a6bcc | refs/heads/main | 2023-06-01T21:21:28.537687 | 2021-06-15T08:37:09 | 2021-06-15T08:37:09 | 369,167,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,872 | py | import os.path
import requests
from cloudnetpy.categorize import generate_categorize
from cloudnetpy.plotting import generate_figure
test_cases = {
'munich': ('2021-05-16', '2021-05-05', '2021-05-08'),
'hyytiala': ('2021-05-09', '2021-05-10'),
'palaiseau': ('2021-03-05', ),
'granada': ('2021-05-11', '2021-05-07'),
'norunda': ('2021-03-05', ),
'bucharest': ('2021-03-05', )
}
def _download(site: str, date: str, product: str = None) -> str:
payload = {'site': site, 'date': date}
if product is not None:
payload['product'] = product
url = 'https://cloudnet.fmi.fi/api/files'
else:
url = 'https://cloudnet.fmi.fi/api/model-files'
metadata = requests.get(url, payload).json()
if not metadata:
raise RuntimeError
metadata = metadata[0]
filename = metadata['filename']
if not os.path.isfile(filename):
print(f"downloading {filename} ...")
res = requests.get(metadata['downloadUrl'])
with open(filename, 'wb') as f:
f.write(res.content)
return filename
def main():
for site, dates in test_cases.items():
input_files = {}
for date in dates:
input_files['model'] = _download(site, date)
for file_type in ('radar', 'lidar'):
input_files[file_type] = _download(site, date, file_type)
try:
input_files['mwr'] = _download(site, date, 'mwr')
except RuntimeError:
input_files['mwr'] = _download(site, date, 'radar')
generate_categorize(input_files, 'categorize.nc')
generate_figure('categorize.nc',
['ldr', 'Z', 'v', 'v_sigma'],
show=False,
image_name=f'images/{date}_{site}_filtered')
if __name__ == "__main__":
main()
| [
"simo.tukiainen@fmi.fi"
] | simo.tukiainen@fmi.fi |
d099475d44c7f9479ce738eb7e71bd3820b37e50 | 8eab8ab725c2132bb8d090cdb2d23a5f71945249 | /virt/Lib/site-packages/openpyxl/chart/print_settings.py | 6a8fc2fab6ce1792aa3048c69834f29af9bb563d | [
"MIT"
] | permissive | JoaoSevergnini/metalpy | 6c88a413a82bc25edd9308b8490a76fae8dd76ca | c2d0098a309b6ce8c756ff840bfb53fb291747b6 | refs/heads/main | 2023-04-18T17:25:26.474485 | 2022-09-18T20:44:45 | 2022-09-18T20:44:45 | 474,773,752 | 3 | 1 | MIT | 2022-11-03T20:07:50 | 2022-03-27T22:21:01 | Python | UTF-8 | Python | false | false | 1,454 | py | # Copyright (c) 2010-2022 openpyxl
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.descriptors import (
Float,
Typed,
Alias,
)
from openpyxl.worksheet.page import PrintPageSetup
from openpyxl.worksheet.header_footer import HeaderFooter
class PageMargins(Serialisable):
"""
Identical to openpyxl.worksheet.page.Pagemargins but element names are different :-/
"""
tagname = "pageMargins"
l = Float()
left = Alias('l')
r = Float()
right = Alias('r')
t = Float()
top = Alias('t')
b = Float()
bottom = Alias('b')
header = Float()
footer = Float()
def __init__(self, l=0.75, r=0.75, t=1, b=1, header=0.5, footer=0.5):
self.l = l
self.r = r
self.t = t
self.b = b
self.header = header
self.footer = footer
class PrintSettings(Serialisable):
tagname = "printSettings"
headerFooter = Typed(expected_type=HeaderFooter, allow_none=True)
pageMargins = Typed(expected_type=PageMargins, allow_none=True)
pageSetup = Typed(expected_type=PrintPageSetup, allow_none=True)
__elements__ = ("headerFooter", "pageMargins", "pageMargins")
def __init__(self,
headerFooter=None,
pageMargins=None,
pageSetup=None,
):
self.headerFooter = headerFooter
self.pageMargins = pageMargins
self.pageSetup = pageSetup
| [
"joao.a.severgnini@gmail.com"
] | joao.a.severgnini@gmail.com |
5656a471d8a4d6fc40c39d07f5d22a52839f396b | 65cc1193afeced475cec02184dbc8043738a5a2e | /src/uvc/content/__init__.py | 125e0485fdbeb4068503d8f46244109a20d10daa | [] | no_license | novareto/uvc.content | e39b088b5d98b83f0e8836adc9c2188cbe501d50 | dc1b28f8f755dee26eb7721ecb2651761db03c16 | refs/heads/master | 2021-01-21T10:13:18.943078 | 2014-10-02T16:17:43 | 2014-10-02T16:17:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | # -*- coding: utf-8 -*-
from .interfaces import IContent, IDescriptiveSchema
from .directives import schema, Fields
from .utils import bootstrap_component, schematic_bootstrap
| [
"trollfot@gmail.com"
] | trollfot@gmail.com |
1f09508e58003a0f4fdffde95018f92964965c1a | fee4adc0f8a74801d728f1e3f1278110622a74be | /app/auth/views.py | 33d7919d5c4c4f1fcf292985b741777a515ef57c | [] | no_license | HuuBaa/todolist | 02957fef9c8690eaadf1f9e49f9505ae69c4c614 | eef38a3e1aa26f0ff8f9112c34587b46e6752e8a | refs/heads/master | 2021-09-13T03:19:17.414242 | 2018-04-24T13:15:04 | 2018-04-24T13:15:04 | 107,774,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,978 | py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
from . import auth
from ..models import User
from flask import request,redirect,url_for,render_template,flash
from flask_login import login_user,login_required,logout_user,current_user
from .forms import RegisterForm,CPasswdForm
from app import db
@auth.route('/login',methods=['POST','GET'])
def login():
if request.method=='POST':
email=request.form.get('email')
password=request.form.get('password')
user=User.query.filter_by(user_email=email).first()
if user is not None and user.verify_password(password):
login_user(user,True)
flash('登录成功!')
return redirect(url_for('todo.mytasks'))
else:
flash('账号、密码错误!')
return redirect(url_for('auth.login'))
return render_template('auth/login.html')
@auth.route('/register',methods=['POST','GET'])
def register():
form=RegisterForm()
if form.validate_on_submit():
user = User(user_email=form.email.data, user_name=form.username.data, password=form.password.data)
db.session.add(user)
db.session.commit()
flash('注册成功')
return redirect(url_for('auth.login'))
return render_template('auth/register.html',form=form)
@login_required
@auth.route('/logout')
def logout():
logout_user()
return redirect(url_for('todo.index'))
@auth.route('/cpasswd',methods=['POST','GET'])
@login_required
def cpasswd():
form=CPasswdForm()
if form.validate_on_submit():
if current_user.verify_password(form.oldpasswd.data):
current_user.password=form.newpasswd.data
db.session.add(current_user)
db.session.commit()
logout_user()
flash('修改密码成功,请重新登录')
return redirect(url_for('auth.login'))
else:
flash('旧密码错误')
return render_template('auth/cpasswd.html',form=form) | [
"742790905@qq.com"
] | 742790905@qq.com |
633db023bda8914aa62fcc73e17d56ffcf002b45 | 107e62a03254c9ebe2e1830977a47861633b0d33 | /TCPserver.py | 7ca995beb699d6e30bf8c95905f51a341c16e072 | [] | no_license | prasanna-ranganathan/mypython | bb798c0782cfb79a27b0730e924921b802da2a44 | 25fa93602e2465ec6ccb0c3ff30a2bbf90da96e4 | refs/heads/master | 2021-06-03T18:35:31.129399 | 2016-08-28T14:21:44 | 2016-08-28T14:22:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | #!/usr/bin/env python
import SocketServer
class EchoHandler(SocketServer
serverAddr = ("0.0.0.0",90000)
server = SocketServer.TCPServer(serverAddr,EchoHandler)
server.serve_forver()
| [
"prassanna.mit@gmail.com"
] | prassanna.mit@gmail.com |
92d84d5e4a383d5e858345bc045ba5c2c7d536d2 | 353def93fa77384ee3a5e3de98cfed318c480634 | /.history/week01/homework02/maoyanspiders/maoyanspiders/pipelines_20200628013036.py | 0eefa73f2f51dc3dddf0fec4bc0d05238f5652f1 | [] | no_license | ydbB/Python001-class01 | d680abc3ea1ccaeb610751e3488421417d381156 | ad80037ccfc68d39125fa94d2747ab7394ac1be8 | refs/heads/master | 2022-11-25T11:27:45.077139 | 2020-07-19T12:35:12 | 2020-07-19T12:35:12 | 272,783,233 | 0 | 0 | null | 2020-06-16T18:28:15 | 2020-06-16T18:28:15 | null | UTF-8 | Python | false | false | 672 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
class MaoyanspidersPipeline(object):
def process_item(self, item, spider):
films_name = item['films_name']
films_type = item['films_type']
release_time = item['release_time']
output = f'|{films_name}|\t|{films_type}|\t|{release_time}|\n\n'
with open('D:\py\Python001-class01\week01\homework02\top10.csvtop10.csv', 'a+', encoding='utf-8') as article:
article.write(output)
article.close()
return item
| [
"31039587+ydbB@users.noreply.github.com"
] | 31039587+ydbB@users.noreply.github.com |
e84fbbd512417f983e287bc02ddae6f48acc8762 | b55a8edf12b5298c63966de429b61e97c61c9d46 | /src/generate_random_matrix.py | 19a2a2d4e0ccd7c26dfa385d092676a51f9b0e5e | [] | no_license | tmoldwin/DBasisHDA | c9bb5ba9d90e2526de5c0a78fe3d6cf34031a2d7 | 00063da2c27fa12470aa54d9d0321f95e350de8a | refs/heads/master | 2020-05-30T09:54:01.754277 | 2013-08-30T07:58:05 | 2013-08-30T07:58:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 867 | py | #!/usr/bin/env python
#
"""
Author: Ulrich Norbisrath
This creates on stdout a matrix which can be fed to DBasisHDA
"""
import random
import sys
import getopt
__doc__ = """call: generate_random_matrix height width number_of_ones"""
def main():
# parse command line options
options = sys.argv[1:]
if len(options) != 3:
print __doc__
sys.exit(0)
height = int(sys.argv[1])
width = int(sys.argv[2])
ones = int(sys.argv[3])
dimension = height*width
assert ones<=dimension, "Too many ones requested. Dimension is smaller."
mysequence = ["1"]*ones +["0"]*(dimension-ones)
random.shuffle(mysequence)
# write output
print height
print width
for line in range(height):
print " ".join(mysequence[line*width:(line+1)*width])
if __name__ == "__main__":
main() | [
"devel@mail.ulno.net"
] | devel@mail.ulno.net |
9af4d770117e900fdc4f395633ef86a68d710ca6 | 9bf7d7ace42a61991970fd967c19071a50609b9e | /ipython/Matplotlib(2f)UnfilledHistograms.py | be423b643ffae045db7075154fed13fc75d8295f | [] | no_license | ParsonsRD/SciPy-CookBook | 29b68eace76962ae00735039bc3d488f31714e50 | 52f70a7aa4bd4fd11217a13fc8dd5e277f2388ea | refs/heads/master | 2020-03-17T17:33:28.827269 | 2013-05-17T06:56:54 | 2013-05-17T06:56:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,599 | py | # <markdowncell>
# Here's some template code for plotting histograms that don't look like
# bar charts, but instead have only outlines (like IDL creates).
#
# First define a function that does the bulk of the heavy lifting.
#
# <codecell>
import numpy as np
def histOutline(dataIn, *args, **kwargs):
(histIn, binsIn) = np.histogram(dataIn, *args, **kwargs)
stepSize = binsIn[1] - binsIn[0]
bins = np.zeros(len(binsIn)*2 + 2, dtype=np.float)
data = np.zeros(len(binsIn)*2 + 2, dtype=np.float)
for bb in range(len(binsIn)):
bins[2*bb + 1] = binsIn[bb]
bins[2*bb + 2] = binsIn[bb] + stepSize
if bb < len(histIn):
data[2*bb + 1] = histIn[bb]
data[2*bb + 2] = histIn[bb]
bins[0] = bins[1]
bins[-1] = bins[-2]
data[0] = 0
data[-1] = 0
return (bins, data)
# <markdowncell>
# Now we can make plots:
#
# <codecell>
# Make some data to plot
data = randn(500)
figure(2, figsize=(10, 5))
clf()
##########
#
# First make a normal histogram
#
##########
subplot(1, 2, 1)
(n, bins, patches) = hist(data)
# Boundaries
xlo = -max(abs(bins))
xhi = max(abs(bins))
ylo = 0
yhi = max(n) * 1.1
axis([xlo, xhi, ylo, yhi])
##########
#
# Now make a histogram in outline format
#
##########
(bins, n) = histOutline(data)
subplot(1, 2, 2)
plot(bins, n, 'k-')
axis([xlo, xhi, ylo, yhi])
# <markdowncell>
# Here you can find this functionality packaged up into
# <UnfilledHistograms_attachments/histOutline.py>
)#
# UnfilledHistograms_attachments/hist_outline.png
# | [
"matti.pastell@helsinki.fi"
] | matti.pastell@helsinki.fi |
d809e87594386c61ac619c02129311ca823d2635 | 19af2e1dfe389afc71e26bebaadf7008251e04e2 | /android_test/tensorflow-master/tensorflow/examples/saved_model/integration_tests/export_mnist_cnn.py | 1d36bc234aea7c51e9f284c7ea92aad0b392f6fc | [
"Apache-2.0"
] | permissive | simi48/Ef-If_Jassen | 6c4975216bb4ae4514fe94a8395a5da5c8e8fb2d | 6076839492bff591cf9b457e949999e9167903e6 | refs/heads/master | 2022-10-15T15:36:35.023506 | 2020-12-02T10:38:13 | 2020-12-02T10:38:13 | 173,759,247 | 4 | 0 | Apache-2.0 | 2022-10-04T23:51:35 | 2019-03-04T14:22:28 | PureBasic | UTF-8 | Python | false | false | 8,104 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exports a convolutional feature extractor for MNIST in SavedModel format.
The feature extractor is a convolutional neural network plus a hidden layer
that gets trained as part of an MNIST classifier and then written to a
SavedModel (without the classification layer). From there, use_mnist_cnn.py
picks it up for transfer learning.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import tensorflow.compat.v2 as tf
from tensorflow.examples.saved_model.integration_tests import mnist_util
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
FLAGS = flags.FLAGS
flags.DEFINE_string(
'export_dir', None,
'Directory of exported SavedModel.')
flags.DEFINE_integer(
'epochs', 10,
'Number of epochs to train.')
flags.DEFINE_bool(
'fast_test_mode', False,
'Shortcut training for running in unit tests.')
flags.DEFINE_bool(
'export_print_hparams', False,
'If true, the exported function will print its effective hparams.')
def make_feature_extractor(l2_strength, dropout_rate):
"""Returns a Keras Model to compute a feature vector from MNIST images."""
regularizer = lambda: tf.keras.regularizers.l2(l2_strength)
net = inp = tf.keras.Input(mnist_util.INPUT_SHAPE)
net = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', name='conv1',
kernel_regularizer=regularizer())(net)
net = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', name='conv2',
kernel_regularizer=regularizer())(net)
net = tf.keras.layers.MaxPooling2D(pool_size=(2, 2), name='pool1')(net)
net = tf.keras.layers.Dropout(dropout_rate, name='dropout1')(net)
net = tf.keras.layers.Flatten(name='flatten')(net)
net = tf.keras.layers.Dense(10, activation='relu', name='dense1',
kernel_regularizer=regularizer())(net)
return tf.keras.Model(inputs=inp, outputs=net)
def set_feature_extractor_hparams(model, dropout_rate):
model.get_layer('dropout1').rate = dropout_rate
def make_classifier(feature_extractor, l2_strength, dropout_rate=0.5):
"""Returns a Keras Model to classify MNIST using feature_extractor."""
regularizer = lambda: tf.keras.regularizers.l2(l2_strength)
net = inp = tf.keras.Input(mnist_util.INPUT_SHAPE)
net = feature_extractor(net)
net = tf.keras.layers.Dropout(dropout_rate)(net)
net = tf.keras.layers.Dense(mnist_util.NUM_CLASSES, activation='softmax',
kernel_regularizer=regularizer())(net)
return tf.keras.Model(inputs=inp, outputs=net)
def wrap_keras_model_for_export(model, batch_input_shape,
set_hparams, default_hparams):
"""Wraps `model` for saving and loading as SavedModel."""
if default_hparams is None: default_hparams = {}
hparam_keys = list(default_hparams.keys())
hparam_defaults = tuple(default_hparams.values())
# The goal is to save a function with this argspec...
argspec = tf_inspect.FullArgSpec(
args=(['inputs', 'training'] + hparam_keys),
defaults=((False,) + hparam_defaults),
varargs=None, varkw=None,
kwonlyargs=[], kwonlydefaults=None,
annotations={})
# ...and this behavior:
def call_fn(inputs, training, *args):
if FLAGS.export_print_hparams:
args = [tf.keras.backend.print_tensor(args[i], 'training=%s and %s='
% (training, hparam_keys[i]))
for i in range(len(args))]
kwargs = dict(zip(hparam_keys, args))
if kwargs: set_hparams(model, **kwargs)
return model(inputs, training=training)
# We cannot spell out `args` in def statement for call_fn, but since
# tf.function uses tf_inspect, we can use tf_decorator to wrap it with
# the desired argspec.
def wrapped(*args, **kwargs): # TODO(arnoegw): Can we use call_fn itself?
return call_fn(*args, **kwargs)
traced_call_fn = tf.function(autograph=False)(
tf_decorator.make_decorator(call_fn, wrapped, decorator_argspec=argspec))
# Now we need to trigger traces for
# - training set to Python values True or False (hence two traces),
# - tensor inputs of the expected nesting, shape and dtype,
# - tensor-valued kwargs for hparams, with caller-side defaults.
# Tracing with partially determined shapes requires an input signature,
# so we initiate tracing from a helper function with only tensor inputs.
@tf.function(autograph=False)
def trigger_traces(inputs, **kwargs):
return tuple(traced_call_fn(inputs, training=training, **kwargs)
for training in (True, False))
inputs_spec = tf.TensorSpec(shape=batch_input_shape, dtype=tf.float32)
hparams_spec = {name: tf.TensorSpec.from_tensor(tf.constant(value))
for name, value in default_hparams.items()}
_ = trigger_traces.get_concrete_function(inputs_spec, **hparams_spec)
# Assemble the output object.
obj = tf.train.Checkpoint()
obj.__call__ = traced_call_fn
obj.trainable_variables = model.trainable_variables
obj.variables = model.trainable_variables + model.non_trainable_variables
obj.regularization_losses = [_get_traced_loss(model, i)
for i in range(len(model.losses))]
return obj
def _get_traced_loss(model, i):
"""Returns tf.function for model.losses[i] with a trace for zero args.
The intended usage is
[_get_traced_loss(model, i) for i in range(len(model.losses))]
This is better than
[tf.function(lambda: model.losses[i], input_signature=[]) for i ...]
because it avoids capturing a loop index in a lambda, and removes any
chance of deferring the trace.
Args:
model: a Keras Model.
i: an integer between from 0 up to but to len(model.losses).
"""
f = tf.function(lambda: model.losses[i])
_ = f.get_concrete_function()
return f
def main(argv):
del argv
# Build a complete classifier model using a feature extractor.
default_hparams = dict(dropout_rate=0.25)
l2_strength = 0.01 # Not a hparam for inputs -> outputs.
feature_extractor = make_feature_extractor(l2_strength=l2_strength,
**default_hparams)
classifier = make_classifier(feature_extractor, l2_strength=l2_strength)
# Train the complete model.
(x_train, y_train), (x_test, y_test) = mnist_util.load_reshaped_data(
fake_tiny_data=FLAGS.fast_test_mode)
classifier.compile(loss=tf.keras.losses.categorical_crossentropy,
optimizer=tf.keras.optimizers.SGD(),
metrics=['accuracy'])
classifier.fit(x_train, y_train,
batch_size=128,
epochs=FLAGS.epochs,
verbose=1,
validation_data=(x_test, y_test))
# Save the feature extractor to a framework-agnostic SavedModel for reuse.
# Note that the feature_extractor object has not been compiled or fitted,
# so it does not contain an optimizer and related state.
exportable = wrap_keras_model_for_export(feature_extractor,
(None,) + mnist_util.INPUT_SHAPE,
set_feature_extractor_hparams,
default_hparams)
tf.saved_model.save(exportable, FLAGS.export_dir)
if __name__ == '__main__':
app.run(main)
| [
"TheSiebi@users.noreply.github.com"
] | TheSiebi@users.noreply.github.com |
938bf1cf1edfa71a44cf21caf52bffa5190185a1 | b77cc1448ae2c68589c5ee24e1a0b1e53499e606 | /recruitment/models.py | d7d2e7e6ae37db258fa3fa7f13aad1e057ab63fb | [] | no_license | PregTech-c/Hrp_system | a5514cf6b4c778bf7cc58e8a6e8120ac7048a0a7 | 11d8dd3221497c536dd7df9028b9991632055b21 | refs/heads/master | 2022-10-09T07:54:49.538270 | 2018-08-21T11:12:04 | 2018-08-21T11:12:04 | 145,424,954 | 1 | 1 | null | 2022-10-01T09:48:53 | 2018-08-20T13:58:31 | JavaScript | UTF-8 | Python | false | false | 4,915 | py | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from payroll.models.employee_models import (ServiceLine, Position,
EducationLevel, EmployeeProfile)
GENDER_CHOICES = (
('M', 'Male'),
('F', 'Female')
)
class District(models.Model):
short_name = models.CharField(max_length=4)
name = models.CharField(max_length=32)
def __str__(self):
return self.name
class Vacancy(models.Model):
VACANCY_TYPE_CHOICES = (
('PUB', 'Public'),
('INT', 'Internal'),
)
GENDER_CHOICES = (
('A', 'Any'), ('M', 'Male'), ('F', 'Female')
)
class Meta:
ordering = ['-due_date']
job_title = models.ForeignKey(Position, related_name='vacancies')
service_line = models.ForeignKey(ServiceLine, null=True, blank=True)
reports_to = models.ForeignKey(Position, null=True, blank=True)
due_date = models.DateField()
minimum_education = models.CharField(max_length=16,
choices=EducationLevel.EDUC_LEVEL_CHOICES, null=True, blank=True)
fields_of_education = models.TextField(null=True,
blank=True, help_text="Separate with semi-colon (;)")
skills = models.TextField(max_length=256, null=True, blank=True)
number_of_positions = models.IntegerField(default=1)
job_description = models.TextField()
vacancy_type = models.CharField(max_length=8, choices=VACANCY_TYPE_CHOICES,
default=VACANCY_TYPE_CHOICES[0][1])
duty_station = models.ForeignKey(District, null=True, blank=True)
min_age = models.IntegerField(null=True, blank=True)
max_age = models.IntegerField(null=True, blank=True)
experience_years = models.IntegerField(null=True, blank=True)
required_gender = models.CharField(max_length=1, choices=GENDER_CHOICES, default='M')
def __str__(self):
return self.job_title.name
class EvaluationStage(models.Model):
STATUS_CHOICES = (('OPEN', 'Open'), ('CLOSED', 'Closed'))
class Meta:
unique_together = ('vacancy', 'name')
vacancy = models.ForeignKey(Vacancy, related_name='evaluation_stages')
name = models.CharField(max_length=32)
description = models.CharField(max_length=64)
status = models.CharField(max_length=8, choices=STATUS_CHOICES,
default=STATUS_CHOICES[0][0], blank=True)
created_at = models.DateTimeField(auto_now_add=True)
created_by = models.ForeignKey(EmployeeProfile)
def __str__(self):
return self.name
class JobApplication(models.Model):
APPLICATION_SOURCE_CHOICES = (
('NP', 'Newspaper'),
('REF', 'Referral'),
('WEB', 'Website')
)
QUALIFICATION_CHOICES = (
('PRIM', 'Primary Level'),
('OLEV', 'Secondary Level'),
('ALEV', 'Advanced Level'),
('DEG', 'Bachelors Degree'),
('DIPL', 'Diploma'),
('MAST', 'Masters Degree'),
('PHD', 'Phd'),
)
class Meta:
ordering = ['-id']
employee = models.ForeignKey(EmployeeProfile,
related_name="job_applications", null=True, blank=True)
vacancy = models.ForeignKey(Vacancy, related_name='applications')
first_name = models.CharField(max_length=32)
surname = models.CharField(max_length=32)
other_names = models.CharField(max_length=64, null=True, blank=True)
date_of_birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDER_CHOICES, default='M')
# source = models.CharField(max_length=16, choices=APPLICATION_SOURCE_CHOICES,
# default='NP')
email = models.EmailField()
tel_number = models.CharField(max_length=16)
qualification = models.CharField(max_length=16,
choices=QUALIFICATION_CHOICES, null=True, blank=True)
experience_years = models.DecimalField(decimal_places=1, max_digits=3)
education_fields = models.TextField()
remarks = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
evaluation_stage = models.ForeignKey(EvaluationStage, null=True, blank=True)
@property
def age_of_applicant(self):
td = timezone.now().date() - self.date_of_birth
return int(td.days/365.2425)
def get_fullname(self):
return "{} {} {}".format(
self.surname,
self.first_name if self.first_name else '',
self.other_names if self.other_names else ''
)
def upload_directory(instance, filename):
return 'payroll/static/uploads/{}/{}'.format(
'recruitment', filename)
class JobApplicationDocument(models.Model):
job_application = models.ForeignKey(JobApplication,
related_name='documents')
document = models.FileField(upload_to=upload_directory, #'uploads/recrutment/%Y/%m/%d/',
null=True, blank=True)
description = models.CharField(max_length=32, blank=True)
def filename(self):
import os
return os.path.basename(self.document.name)
| [
"imugabi64@yahoo.com"
] | imugabi64@yahoo.com |
75d0a5df0a742b764667caa2921eb78e951f0352 | 625a2b46caf94a8bfb1afdbc1b933cd19aaafc33 | /googoo2.py | 1d6ed3712b44ce201725c1f455955a635684e8da | [] | no_license | smilu97/SoftwareStartClass | 4118e5c80d0f8253785aa2ecb72db0935c457fcb | df41a2e93a315ab155e0995132a66a9e4dc86ef4 | refs/heads/master | 2020-12-24T21:21:22.779636 | 2016-05-04T07:42:35 | 2016-05-04T07:42:35 | 58,031,445 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,333 | py | import random
a_until = input()
b_until = 10
if a_until == 123456 :
print 'You found a easteregg'
print 'Try to guess my number!'
gn = random.randint(1,10000)
un = -1
while gn != un :
un = input('number? : ')
if(un > gn) :
print 'Too big'
elif(un < gn) :
print 'Too small'
elif(un == gn) :
print 'Yes!!'
exit()
if a_until == 654321 :
print 'You found a easteregg'
print 'I will guess your number'
min = 0
max = 5
findmax = True
finded = False
while findmax == True :
if finded == True :
break;
print str(max) + '?'
inputvalue = raw_input()
if inputvalue == 'small' :
findmax = False;
elif inputvalue == 'big' :
max = max * 2
elif inputvalue == 'correct' :
print 'Wow...'
finded = True
while finded == False :
mid = int((min+max)/2)
print str(mid) + '?'
inputvalue = raw_input()
if inputvalue == 'small' :
max = mid
elif inputvalue == 'big' :
min = mid
elif inputvalue == 'correct' :
print 'Yes!!'
finded = True
exit()
def print_googoo(first, second) :
for i in range(1,first) :
for j in range(1, second) :
print str(i)+'*'+str(j)+'='+str(i*j)
# print_googoo(a_until,b_until)
a = 1
while a<a_until :
b = 1
print '*'*10 + str(a) + 'dan' + '*'*10
while b<b_until :
print str(a)+'*'+str(b)+'='+str(a*b)
b = b + 1
a = a + 1
print '*'*24 | [
"smilup2244@gmail.com"
] | smilup2244@gmail.com |
54718fced5aa9042bbc2155ec342bb32c5c94923 | a5e6ce10ff98539a94a5f29abbc053de9b957cc6 | /competition/20191019/d.py | 7532b8635b4abeaabced44c4e2eea7583d904bb3 | [] | no_license | shimaw28/atcoder_practice | 5097a8ec636a9c2e9d6c417dda5c6a515f1abd9c | 808cdc0f2c1519036908118c418c8a6da7ae513e | refs/heads/master | 2020-07-26T10:59:51.927217 | 2020-06-13T11:53:19 | 2020-06-13T11:53:19 | 208,622,939 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | N = int(input())
L = [int(i) for i in input().split()]
import bisect
L.sort()
ans = 0
for i in range(N-2):
for j in range(i+1, N-1):
idx = bisect.bisect_left(L, L[i]+L[j])
ans += (idx-(j+1))
print(ans) | [
"shima.w28@gmail.com"
] | shima.w28@gmail.com |
b923bd02e0ee500e12d3202dc483f6915cc19651 | de4d88db6ea32d20020c169f734edd4b95c3092d | /aiotdlib/api/functions/get_login_url.py | 56c3af78020b2f5c644a8f4e0f93ebbde87ab0d3 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | thiagosm/aiotdlib | 5cc790a5645f7e4cc61bbd0791433ed182d69062 | 4528fcfca7c5c69b54a878ce6ce60e934a2dcc73 | refs/heads/main | 2023-08-15T05:16:28.436803 | 2021-10-18T20:41:27 | 2021-10-18T20:41:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,574 | py | # =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetLoginUrl(BaseObject):
"""
Returns an HTTP URL which can be used to automatically authorize the user on a website after clicking an inline button of type inlineKeyboardButtonTypeLoginUrl. Use the method getLoginUrlInfo to find whether a prior user confirmation is needed. If an error is returned, then the button must be handled as an ordinary URL button
:param chat_id: Chat identifier of the message with the button
:type chat_id: :class:`int`
:param message_id: Message identifier of the message with the button
:type message_id: :class:`int`
:param button_id: Button identifier
:type button_id: :class:`int`
:param allow_write_access: True, if the user allowed the bot to send them messages
:type allow_write_access: :class:`bool`
"""
ID: str = Field("getLoginUrl", alias="@type")
chat_id: int
message_id: int
button_id: int
allow_write_access: bool
@staticmethod
def read(q: dict) -> GetLoginUrl:
return GetLoginUrl.construct(**q)
| [
"pylakey@protonmail.com"
] | pylakey@protonmail.com |
ce2896ad06aa191757ad56558bbe1b5d4d665cbc | a9e60d0e5b3b5062a81da96be2d9c748a96ffca7 | /configurations/lab44-config/scripts/BeamlineI06/Users/diffcalc_i06_4circle.py | 3752a2e626b109217f99c1116b9d145d772f50f8 | [] | no_license | openGDA/gda-diamond | 3736718596f47607335ada470d06148d7b57526e | bbb64dcfd581c30eddb210c647db5b5864b59166 | refs/heads/master | 2023-08-16T08:01:11.075927 | 2023-08-15T16:01:52 | 2023-08-15T16:01:52 | 121,757,699 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | # See notes in: http://wiki.diamond.ac.uk/Wiki/Wiki.jsp?page=I06_Diffcalc
try:
import diffcalc
except ImportError:
from gda.configuration.properties import LocalProperties
import sys
diffcalc_path = LocalProperties.get("gda.install.git.loc") + '/diffcalc.git'
sys.path = [diffcalc_path] + sys.path
print diffcalc_path + ' added to GDA Jython path.'
import diffcalc
from gdascripts.pd.dummy_pds import DummyPD
from diffcalc.gdasupport.factory import create_objects, add_objects_to_namespace
CREATE_DUMMY_AXES = False
if CREATE_DUMMY_AXES:
print "!!! Staring dummy diffcalc with tth, th, chi, phi and en."
tth = DummyPD('tth')
th = DummyPD('th')
en = DummyPD('en')
en(1500)
diffcalc_energy=en
else:
print "!!! Staring LIVE diffcalc with th(dd2th), th(ddth), chi(dummy), phi(dummy) and denergy."
tth = dd2th
th = ddth
diffcalc_energy=denergy
chi = DummyPD('chi')
phi = DummyPD('phi')
diffcalcObjects = create_objects(
axis_scannable_list = [tth, th, chi, phi],
energy_scannable = diffcalc_energy,
energy_scannable_multiplier_to_get_KeV = .001,
geometry = 'fourc',
hklverbose_virtual_angles_to_report=('2theta','Bin','Bout','azimuth')
)
#demoCommands = []
#diffcalcObjects['diffcalcdemo'].commands = demoCommands
add_objects_to_namespace(diffcalcObjects, globals())
hkl.level = 6
| [
"fajin.yuan@diamond.ac.uk"
] | fajin.yuan@diamond.ac.uk |
81c5de514378a57338ac86399710a2a2b9fa1e95 | 17331ee8285a1f19e4ca1abd89dac64da381959d | /03-accessing-web-data/extracting_data_xml.py | 208cf4bf87e729149483bec35278bbab8946d4bc | [] | no_license | chaochaocodes/PY4E | 3681367ce548fe9a423adb895fe76efda60521bb | 09930f6187c3388b61903680bcd4a1533b0b4f82 | refs/heads/main | 2023-03-28T11:29:09.209120 | 2021-04-01T02:34:58 | 2021-04-01T02:34:58 | 333,506,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,472 | py | '''
Extracting Data from XML
In this assignment you will write a Python program somewhat similar to http://www.py4e.com/code3/geoxml.py. The program will prompt for a URL, read the XML data from that URL using urllib and then parse and extract the comment counts from the XML data, compute the sum of the numbers in the file.
<comment>
<name>Matthias</name>
<count>97</count>
</comment>
We provide two files for this assignment. One is a sample file where we give you the sum for your testing and the other is the actual data you need to process for the assignment.
Sample data: http://py4e-data.dr-chuck.net/comments_42.xml (Sum=2553)
Actual data: http://py4e-data.dr-chuck.net/comments_1144983.xml (Sum ends with 3)
'''
import urllib.request, urllib.parse, urllib.error
import xml.etree.ElementTree as ET
import ssl
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
while True:
url = input('Enter location: ')
if len(url) < 1: break
print('Retrieving', url)
xml = urllib.request.urlopen(url, context=ctx)
data = xml.read()
print('Retrieved', len(data), 'characters')
tree = ET.fromstring(data)
comments = tree.findall('.//comment')
counts = tree.findall('.//count')
print('Count:', len(counts))
total = 0
for count in comments:
count = count.find('count').text
total += int(count)
print('Sum:', total)
break | [
"57464564+chaochaocodes@users.noreply.github.com"
] | 57464564+chaochaocodes@users.noreply.github.com |
b2d570a63a13b28c4dd3ba8334a77947e844fd44 | 5492859d43da5a8e292777c31eace71e0a57dedf | /user/admin.py | f807881a567d5d1c59dc38216d12dd9d167794b3 | [
"MIT"
] | permissive | akindele214/181hub_2 | 93ad21dc6d899b6c56fbe200354b1678bb843705 | 48b8814b5f66ad87f9a54721506076ddf70fe9bc | refs/heads/master | 2022-12-13T01:15:07.925556 | 2020-05-19T09:39:57 | 2020-05-19T09:39:57 | 196,470,605 | 1 | 1 | MIT | 2022-12-08T01:22:55 | 2019-07-11T22:04:42 | Python | UTF-8 | Python | false | false | 724 | py | from django.contrib import admin
from .models import Profile, Monetization, UserEmailRequest, Suggestion_Report
# Register your models here.
class UserAdmin(admin.ModelAdmin):
list_display = ('user', 'id', 'user_id')
class MonAdmin(admin.ModelAdmin):
list_display = ('user', 'amount', 'views', 'status', 'account_name', 'bank')
class SuggestAdmin(admin.ModelAdmin):
list_display = ('date', 'content')
class EmailRequestAdmin(admin.ModelAdmin):
list_display = ('sender', 'receiver', 'ref_code','date_added')
admin.site.register(Profile, UserAdmin)
admin.site.register(Monetization, MonAdmin)
admin.site.register(UserEmailRequest, EmailRequestAdmin)
admin.site.register(Suggestion_Report, SuggestAdmin)
| [
"yaomingnakel@gmail.com"
] | yaomingnakel@gmail.com |
2a9e0963993064c76e3f5f9de8b5a7bb88ef5f92 | 549270020f6c8724e2ef1b12e38d11b025579f8d | /recipes/logr/0.1.0/conanfile.py | 967c8e88b68f965f50bf1e673113d8e6341a57de | [
"MIT"
] | permissive | conan-io/conan-center-index | 1bcec065ccd65aa38b1fed93fbd94d9d5fe6bc43 | 3b17e69bb4e5601a850b6e006e44775e690bac33 | refs/heads/master | 2023-08-31T11:34:45.403978 | 2023-08-31T11:13:23 | 2023-08-31T11:13:23 | 204,671,232 | 844 | 1,820 | MIT | 2023-09-14T21:22:42 | 2019-08-27T09:43:58 | Python | UTF-8 | Python | false | false | 4,011 | py | from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.microsoft import check_min_vs, is_msvc_static_runtime, is_msvc
from conan.tools.files import apply_conandata_patches, export_conandata_patches, get, copy, rmdir
from conan.tools.build import check_min_cppstd
from conan.tools.scm import Version
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
import os
required_conan_version = ">=1.53.0"
class LogrConan(ConanFile):
name = "logr"
description = "Logger frontend substitution for spdlog, glog, etc for server/desktop applications"
license = "BSD-3-Clause"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/ngrodzitski/logr"
topics = ("logger", "development", "util", "utils")
package_type = "header-library"
settings = "os", "arch", "compiler", "build_type"
options = {
"backend": ["spdlog", "glog", "log4cplus", "log4cplus-unicode", None],
}
default_options = {
"backend": "spdlog",
}
@property
def _min_cppstd(self):
return 17
@property
def _compilers_minimum_version(self):
return {
"gcc": "7",
"clang": "7",
"apple-clang": "10",
"Visual Studio": "16",
"msvc": "192",
}
def export_sources(self):
export_conandata_patches(self)
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
self.requires("fmt/9.1.0")
if self.options.backend == "spdlog":
self.requires("spdlog/1.11.0")
elif self.options.backend == "glog":
self.requires("glog/0.6.0")
elif self.options.backend in ["log4cplus", "log4cplus-unicode"]:
self.requires("log4cplus/2.0.5")
def package_id(self):
self.info.clear()
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, self._min_cppstd)
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
)
if self.options.backend == "log4cplus" and self.options["log4cplus"].unicode:
raise ConanInvalidConfiguration("backend='log4cplus' requires log4cplus:unicode=False")
elif self.options.backend == "log4cplus-unicode" and not self.options["log4cplus"].unicode:
raise ConanInvalidConfiguration("backend='log4cplus-unicode' requires log4cplus:unicode=True")
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.cache_variables["LOGR_WITH_SPDLOG_BACKEND"] = self.options.backend == "spdlog"
tc.cache_variables["LOGR_WITH_GLOG_BACKEND"] = self.options.backend == "glog"
tc.cache_variables["LOGR_WITH_LOG4CPLUS_BACKEND"] = self.options.backend in ["log4cplus", "log4cplus-unicode"]
tc.cache_variables["LOGR_INSTALL"] = True
tc.cache_variables["LOGR_BUILD_TESTS"] = False
tc.cache_variables["LOGR_BUILD_EXAMPLES"] = False
tc.cache_variables["LOGR_BUILD_BENCHMARKS"] = False
tc.generate()
dpes = CMakeDeps(self)
dpes.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, pattern="LICENSE", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib"))
def package_info(self):
self.cpp_info.bindirs = []
self.cpp_info.libdirs = []
| [
"noreply@github.com"
] | conan-io.noreply@github.com |
1636eedda11d84791b0a596bc3cca51a8acf751e | b5cc6d7b5f7ccea36fce4eab961979404414f8b0 | /kent-report/py/matrix_scaling.py | 5aec834ea62a3306e75f6bd37e8e54ef70ea1205 | [] | no_license | MiroK/cutFEM-beam | adf0c925dbe64b370dab48e82335617450675f5d | 2fb3686804e836d4031fbf231a36a0f9ac8a3012 | refs/heads/master | 2021-01-21T23:54:32.868307 | 2015-02-14T13:14:59 | 2015-02-14T13:14:59 | 25,625,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,097 | py | from eigen_poisson import mass_matrix as Meig1d
from eigen_poisson import laplacian_matrix as Aeig1d
from shen_poisson import mass_matrix as Mshen1d
from shen_poisson import laplacian_matrix as Ashen1d
import numpy as np
from numpy.linalg import cond
# Build 2d matrices
def Meig2d(n):
'2d mass matrix w.r.t Eigen basis.'
M = Meig1d(n)
return np.kron(M, M)
def Aeig2d(n):
'2d stiffness matrix w.r.t Eigen basis.'
A = Aeig1d(n)
M = Meig1d(n)
return np.kron(A, M) + np.kron(M, A)
def Mshen2d(n):
'2d mass matrix w.r.t Shen basis.'
M = Mshen1d(n)
return np.kron(M, M)
def Ashen2d(n):
'2d stiffness matrix w.r.t Shen basis.'
A = Ashen1d(n)
M = Mshen1d(n)
return np.kron(A, M) + np.kron(M, A)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
from matplotlib import rc
rc('text', usetex=True)
rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
import matplotlib.pyplot as plt
ns = np.arange(2, 20)
# Plot 2d
if False:
condMe = np.array([cond(Meig1d(n)) for n in ns])
condAe = np.array([cond(Aeig1d(n)) for n in ns])
condMs = np.array([cond(Mshen1d(n)) for n in ns])
condAs = np.array([cond(Ashen1d(n)) for n in ns])
else:
condMe = np.array([cond(Meig2d(n)) for n in ns])
condAe = np.array([cond(Aeig2d(n)) for n in ns])
condMs = np.array([cond(Mshen2d(n)) for n in ns])
condAs = np.array([cond(Ashen2d(n)) for n in ns])
# Common marker == commmon basis, Common color == common matrix
plt.figure()
plt.loglog(ns, condMe, label='$M_E$', color='b', marker='s', linestyle='--')
plt.loglog(ns, condAe, label='$A_E$', color='g', marker='s', linestyle='--')
plt.loglog(ns, condMs, label='$M_S$', color='b', marker='o', linestyle='--')
plt.loglog(ns, condAs, label='$A_S$', color='g', marker='o', linestyle='--')
plt.legend(loc='best')
plt.xlabel('$n$')
plt.ylabel('$\kappa$')
plt.savefig('matrix_scaling_2d.pdf')
plt.show()
| [
"miroslav.kuchta@gmail.com"
] | miroslav.kuchta@gmail.com |
69c00bce760359ee971f98708caf1821b7d4f64d | 81e6391b9db249296ec84f6524093cf41b581f31 | /단계별로 풀어보기/16. 수학3/[2609] 최대공약수와 최소공배수.py | fb8c37e1e3d2c077f89732f90ea2d7ba29967031 | [] | no_license | jaeehooon/baekjoon_python | e991be4b510d642f72f625b898d20451dc920d7c | 295776309a883338bfbf51c33caf6dc6629493ca | refs/heads/master | 2023-04-15T14:22:21.281930 | 2021-04-26T02:15:09 | 2021-04-26T02:15:09 | 294,137,750 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 434 | py | import sys
# 최대공약수 찾기 with 유클리드 호제법
def gcd(a, b):
while a != b:
if a > b:
a -= b
else:
b -= a
return a
def lcm(c, d):
gcd_val = gcd(c, d)
return gcd_val * (c // gcd_val) * (d // gcd_val)
if __name__ == '__main__':
num_a, num_b = map(int, sys.stdin.readline().rstrip('\n').split())
print(gcd(num_a, num_b))
print(lcm(num_a, num_b))
| [
"qlenfr0922@gmail.com"
] | qlenfr0922@gmail.com |
bff9007a8afa686e5c49c170d58711ff0baa3f89 | 0740320ac36a5943a96a8ff005a604c1ae354bed | /leave_serration/main3/main.py | aec1ca6d60d7f84244ec5ae03be8604c98d705f4 | [] | no_license | dezhili/leaves-image-processing | 0a4d4083b530bbb74b3f6d02c8370ccb61841127 | 62e2122e2aae39235c4e60b6526798270b1b1ed3 | refs/heads/master | 2021-01-21T12:31:19.568032 | 2017-09-01T06:01:48 | 2017-09-01T06:01:48 | 102,078,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,120 | py | import cv2
import matplotlib.pyplot as plt
import get_leave_boundary
from get_leave_top_bottom import get_leave_top_bottom
from get_leave_top_bottom import search_points
import math
image = '14.jpg'
# image = '442_100_1.jpg'
img = cv2.imread(image, 0)
contours = get_leave_boundary.get_leave_boundary(image)
p1_t, p2_b, k1, k2 = get_leave_top_bottom(image, contours)
p3_c, p4_r, p5_l = search_points(p1_t, p2_b, k2, contours, search_heng=True)
p6_c, p7_r, p8_l = search_points(p1_t, p3_c, k2, contours, search_heng=True)
p9_c, p10_r, p11_l = search_points(p2_b, p3_c, k2, contours, search_heng=True)
p12_c, p13_t, p14_b = search_points(p5_l, p3_c, k1, contours, search_heng=False)
p15_c, p16_t, p17_b = search_points(p4_r, p3_c, k1, contours, search_heng=False)
# 计算长度 宽度
len_c = math.sqrt((p1_t[0]-p2_b[0])**2 + (p1_t[1]-p2_b[1])**2) / 118.11
len_l = math.sqrt((p13_t[0]-p14_b[0])**2 + (p13_t[1]-p14_b[1])**2) / 118.11
len_r = math.sqrt((p16_t[0]-p17_b[0])**2 + (p16_t[1]-p17_b[1])**2) / 118.11
wid_c = math.sqrt((p5_l[0]-p4_r[0])**2 + (p5_l[1]-p4_r[1])**2) / 118.11
wid_t = math.sqrt((p8_l[0]-p7_r[0])**2 + (p8_l[1]-p7_r[1])**2) / 118.11
wid_b = math.sqrt((p11_l[0]-p10_r[0])**2 + (p11_l[1]-p10_r[1])**2) / 118.11
print('中间的长度为:\t', len_c)
print('左边的长度为:\t', len_l)
print('右边的长度为:\t', len_r)
print('中间的宽度为:\t', wid_c)
print('上边的宽度为:\t', wid_t)
print('下边的宽度为:\t', wid_b)
# plot
fig, axes = plt.subplots(1, 2, figsize=(8, 8))
ax0, ax1 = axes.ravel()
ax0.imshow(img, plt.cm.gray)
ax0.set_title('original image')
rows, cols = img.shape
ax1.axis([0, rows, cols, 0])
ax1.plot(contours[:, 0], contours[:, 1], linewidth=2)
ax1.axis('image')
ax1.set_title('boundary and lines')
ax1.plot([p1_t[0], p2_b[0]], [p1_t[1], p2_b[1]], 'r-')
ax1.plot([p4_r[0], p5_l[0]], [p4_r[1], p5_l[1]], 'g-')
ax1.plot([p7_r[0], p8_l[0]], [p7_r[1], p8_l[1]], 'g-')
ax1.plot([p10_r[0], p11_l[0]], [p10_r[1], p11_l[1]], 'g-')
ax1.plot([p13_t[0], p14_b[0]], [p13_t[1], p14_b[1]], 'r-')
ax1.plot([p16_t[0], p17_b[0]], [p16_t[1], p17_b[1]], 'r-')
plt.show() | [
"599709657@qq.com"
] | 599709657@qq.com |
99d8c7c2a932fc594c79ee725fbc898cb2e020c1 | 371f94ee0f44feeaf68d740f24bd9bb27f83c8aa | /app/migrations/0002_auto_20161110_1920.py | 9061620da8349eb3f87829559618cfd001f3f03d | [] | no_license | cjredmond/final_proto | f69d62da97428af3d52020054e9a327abbff8d15 | 28d73c7f25a7e8fae07d78b12964cdefeb3103de | refs/heads/master | 2020-09-12T04:57:59.279571 | 2016-11-11T22:44:47 | 2016-11-11T22:44:47 | 73,517,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-10 19:20
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='team',
name='league',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='app.League'),
),
]
| [
"connor.redmond@gmail.com"
] | connor.redmond@gmail.com |
8f143b6444dd282b2bd0c6c3b9dcf9efc2353a8a | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/services/services/batch_job_service/transports/base.py | 7c0a5731d78ea08d03580092102147c29aebb834 | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,647 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
import google.auth # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.ads.googleads.v8.resources.types import batch_job
from google.ads.googleads.v8.services.types import batch_job_service
from google.longrunning import operations_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
'google-ads',
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class BatchJobServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for BatchJobService."""
AUTH_SCOPES = (
'https://www.googleapis.com/auth/adwords',
)
def __init__(
self, *,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ':' not in host:
host += ':443'
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.mutate_batch_job: gapic_v1.method.wrap_method(
self.mutate_batch_job,
default_timeout=None,
client_info=client_info,
),
self.get_batch_job: gapic_v1.method.wrap_method(
self.get_batch_job,
default_timeout=None,
client_info=client_info,
),
self.list_batch_job_results: gapic_v1.method.wrap_method(
self.list_batch_job_results,
default_timeout=None,
client_info=client_info,
),
self.run_batch_job: gapic_v1.method.wrap_method(
self.run_batch_job,
default_timeout=None,
client_info=client_info,
),
self.add_batch_job_operations: gapic_v1.method.wrap_method(
self.add_batch_job_operations,
default_timeout=None,
client_info=client_info,
),
}
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Return the client designed to process long-running operations."""
raise NotImplementedError
@property
def mutate_batch_job(self) -> typing.Callable[
[batch_job_service.MutateBatchJobRequest],
batch_job_service.MutateBatchJobResponse]:
raise NotImplementedError
@property
def get_batch_job(self) -> typing.Callable[
[batch_job_service.GetBatchJobRequest],
batch_job.BatchJob]:
raise NotImplementedError
@property
def list_batch_job_results(self) -> typing.Callable[
[batch_job_service.ListBatchJobResultsRequest],
batch_job_service.ListBatchJobResultsResponse]:
raise NotImplementedError
@property
def run_batch_job(self) -> typing.Callable[
[batch_job_service.RunBatchJobRequest],
operations_pb2.Operation]:
raise NotImplementedError
@property
def add_batch_job_operations(self) -> typing.Callable[
[batch_job_service.AddBatchJobOperationsRequest],
batch_job_service.AddBatchJobOperationsResponse]:
raise NotImplementedError
__all__ = (
'BatchJobServiceTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
3fcd539d30d02cd5409abaeb8516fa34204dfe44 | 548c26cc8e68c3116cecaf7e5cd9aadca7608318 | /feedback/migrations/0004_auto__add_field_feedback_type__chg_field_feedback_email__chg_field_fee.py | 82b0d78830f50c8b2e808681d8cdc1c75dc68e31 | [] | no_license | Morphnus-IT-Solutions/riba | b69ecebf110b91b699947b904873e9870385e481 | 90ff42dfe9c693265998d3182b0d672667de5123 | refs/heads/master | 2021-01-13T02:18:42.248642 | 2012-09-06T18:20:26 | 2012-09-06T18:20:26 | 4,067,896 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,758 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Feedback.type'
db.add_column('feedback_feedback', 'type', self.gf('django.db.models.fields.CharField')(default='feedback', max_length=25), keep_default=False)
# Changing field 'Feedback.email'
db.alter_column('feedback_feedback', 'email', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True))
# Changing field 'Feedback.phone'
db.alter_column('feedback_feedback', 'phone', self.gf('django.db.models.fields.CharField')(max_length=15, null=True, blank=True))
# Changing field 'Feedback.name'
db.alter_column('feedback_feedback', 'name', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True))
def backwards(self, orm):
# Deleting field 'Feedback.type'
db.delete_column('feedback_feedback', 'type')
# Changing field 'Feedback.email'
db.alter_column('feedback_feedback', 'email', self.gf('django.db.models.fields.EmailField')(max_length=75))
# Changing field 'Feedback.phone'
db.alter_column('feedback_feedback', 'phone', self.gf('django.db.models.fields.CharField')(max_length=15))
# Changing field 'Feedback.name'
db.alter_column('feedback_feedback', 'name', self.gf('django.db.models.fields.CharField')(max_length=50))
models = {
'accounts.client': {
'Meta': {'object_name': 'Client'},
'confirmed_order_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> order@chaupaati.com'", 'max_length': '500'}),
'confirmed_order_helpline': ('django.db.models.fields.CharField', [], {'default': "'0-922-222-1947'", 'max_length': '25'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'noreply_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> noreply@chaupaati.com'", 'max_length': '200'}),
'pending_order_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> lead@chaupaati.com'", 'max_length': '500'}),
'pending_order_helpline': ('django.db.models.fields.CharField', [], {'default': "'0-922-222-1947'", 'max_length': '25'}),
'share_product_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> share@chaupaati.com'", 'max_length': '500'}),
'signature': ('django.db.models.fields.TextField', [], {}),
'sms_mask': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'feedback.feedback': {
'Meta': {'object_name': 'Feedback'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Client']", 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'feedback': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'feedback'", 'max_length': '25'})
}
}
complete_apps = ['feedback']
| [
"dalal.saumil@gmail.com"
] | dalal.saumil@gmail.com |
198bc43024f973401ae0dec802d22885249ca303 | 1a114943c92a5db40034470ff31a79bcf8ddfc37 | /stdlib_exam/socketserver-example-1.py | bcde479b54c4867b59ab85012d4fe7549633e7e7 | [] | no_license | renwl/mylinux | 1924918599efd6766c266231d66b2a7ed6f6cdd1 | 0602fc6d2b0d254a8503e57310f848fc3e1a73b4 | refs/heads/master | 2020-07-10T22:12:03.259349 | 2017-01-02T12:32:04 | 2017-01-02T12:32:04 | 66,467,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | import socketserver
import time
# user-accessible port
PORT = 8037
# reference time
TIME1970 = 2208988800
class TimeRequestHandler(socketserver.StreamRequestHandler):
def handle(self):
print("connection from", self.client_address)
t = int(time.time()) + TIME1970
b = chr(t>>24&255) + chr(t>>16&255) + chr(t>>8&255) + chr(t&255)
self.wfile.write(b)
server = socketserver.TCPServer(("", PORT), TimeRequestHandler)
print("listening on port", PORT)
server.serve_forever()
## connection from ('127.0.0.1', 1488)
## connection from ('127.0.0.1', 1489)
## ...
| [
"wenliang.ren@quanray.com"
] | wenliang.ren@quanray.com |
6aaeb6f165fa71b2e82277ab1c4666f025d32a49 | 2772b58d09f3cc8fad2b4354dee7a06c481b7d23 | /forum/forms.py | 46319a0016cf611d0227b3061a330a88652e0231 | [
"MIT"
] | permissive | shmilyoo/ggxxBBS | 33cef10ac2284010028556c6d946566de05537da | cef6408e533bd0b0f57c3e2f5da4e93ea07c4331 | refs/heads/master | 2020-04-25T02:56:02.116161 | 2019-02-25T07:57:02 | 2019-02-25T07:57:02 | 172,458,753 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,294 | py | # -*- coding: utf-8 -*-
import datetime
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.core.validators import RegexValidator
from django.utils.html import strip_tags, remove_tags
from django.utils.safestring import mark_safe
from forum import models as forumModels
from forum.helper import check_empty
from ggxxBBS import config
from ckeditor.widgets import CKEditorWidget
form_tag_regex = RegexValidator(r'^[a-zA-Z][a-zA-Z0-9]+$')
class ForumForm(forms.ModelForm):
class Meta:
model = forumModels.Forum
fields = ['belong', 'parent_id', 'name', 'tag', 'descr', 'content', 'allow_topic', 'icon', 'topic_credit',
'post_credit', 'visit_level', 'topic_level', 'post_level']
widgets = {
# 'parent_id':forms.Select(),
'content': CKEditorWidget(config_name='basic'),
}
# initial = {
# 'parent_id':[('0' * 32, u'首页')]
# }
error_messages = {
NON_FIELD_ERRORS: {
'unique_together': u"所属论坛与版块名,所属论坛与标签,必须唯一",
}
}
def clean(self):
data = super(ForumForm, self).clean()
visit_level = data.get('visit_level')
topic_level = data.get('topic_level')
post_level = data.get('post_level')
if not topic_level >= post_level >= visit_level:
self.add_error('visit_level', u'允许发帖需要权限≥回帖≥访问')
def clean_icon(self):
icon = self.cleaned_data.get('icon')
if icon and icon.size > config.max_avatar_size:
self.add_error('icon', u'上传图标不能超过' + str(config.max_avatar_size) + u'字节')
return icon
def clean_belong(self):
belong = self.cleaned_data.get('belong')
if belong == '0':
self.add_error('belong', u'请选择论坛')
return belong
def clean_tag(self):
tag = self.cleaned_data.get('tag')
try:
form_tag_regex(tag)
except:
self.add_error('tag', u'标签由1个及以上的字母或数字组成,由字母开头')
return tag
class TopicForm(forms.ModelForm):
class Meta:
model = forumModels.Topic
fields = ['read_level', 'title', 'content', 'title_bold', 'is_hide', 'is_poll']
widgets = {
'content': CKEditorWidget(config_name='default'),
'subject': forms.Select(),
}
def clean_title(self):
title = self.cleaned_data.get('title')
return title
def clean_content(self):
content = self.cleaned_data.get('content')
if check_empty(content):
self.add_error('content', u'帖子内容不能为空')
return content
class PostFormSimple(forms.ModelForm):
class Meta:
model = forumModels.Post
fields = ['content']
widgets = {
'content': CKEditorWidget(config_name='basic')
}
def clean_content(self):
content = self.cleaned_data.get('content')
if check_empty(content):
self.add_error('content', u'帖子内容不能为空')
return content
class PostFormFull(forms.ModelForm):
class Meta:
model = forumModels.Post
fields = ['content']
widgets = {
'content': CKEditorWidget(config_name='default')
}
def clean_content(self):
content = self.cleaned_data.get('content')
if check_empty(content):
self.add_error('content', u'帖子内容不能为空')
return content
class AttachmentForm(forms.ModelForm):
class Meta:
model = forumModels.Attachment
fields = ['file', 'file_name', 'download_level']
class PollForm(forms.ModelForm):
class Meta:
model = forumModels.Poll
fields = ['descr', 'is_multi', 'is_visible', 'max_choices', 'expiry']
def clean_expiry(self):
expiry = self.cleaned_data.get('expiry')
try:
poll_expire_time = datetime.datetime.strptime(expiry, '%Y-%m-%d %H:%M')
except Exception as e:
poll_expire_time = datetime.datetime.now() + datetime.timedelta(days=7)
return unicode(poll_expire_time.strftime('%Y-%m-%d %H:%M'))
| [
"fighter_yy@qq.com"
] | fighter_yy@qq.com |
28362fceebf77532f2fe2a04d4d1ea54fd38a7e1 | be50b4dd0b5b8c3813b8c3158332b1154fe8fe62 | /Math/Python/FizzBuzz.py | 7ba15c495b1f34a12eaa2eada55c1970b97ecf14 | [] | no_license | Zimmermann25/InterviewBit | a8d89e090068d9644e28085625963c8ce75d3dff | 6d2138e740bd5ba8eab992d9bf090977e077bfc5 | refs/heads/main | 2023-03-24T18:12:48.244950 | 2021-03-24T14:36:48 | 2021-03-24T14:36:48 | 350,835,917 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 434 | py | class Solution:
# @param A : integer
# @return a list of strings
def fizzBuzz(self, A):
arr = [i+1 for i in range(A)]
for i in range(1,A+1):
if i % 5==0 and i %3==0:
arr[i-1] = 'FizzBuzz'
elif i % 5==0:
arr[i-1] = 'Buzz'
elif i%3==0:
arr[i-1] = 'Fizz'
return arr
| [
"noreply@github.com"
] | Zimmermann25.noreply@github.com |
5dd937a052275fa1dbf2c1a3cb885c1a790e34ba | 226b1c73a706f4734834196d18305d4d2c873589 | /verification_libs/axiMaster.py | ec5de04702fc9521260d68cd88cb730c51288816 | [] | no_license | ocakgun/vlsistuff | 43b4b07ae186b8d2360d11c57cd10b861e96bcbe | 776c07f5d0c40fe7d410b5c85e7381017d4dab64 | refs/heads/master | 2022-06-13T14:40:22.641310 | 2020-05-08T11:09:00 | 2020-05-08T11:09:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,722 | py |
DEMO = '''
axi = axiMaster.axiMasterClass('tb',Monitors)
axi.makeRead(1,16,0x100,4)
axi.makeWrite(1,16,0x100,4)
axi.wait(100)
'''
import os,sys,string,types
import logs
import veri
class axiMasterClass:
def __init__(self,Path,Monitors):
self.Path = Path
Monitors.append(self)
self.Queue=[]
self.arQueue=[]
self.awQueue=[]
self.wQueue=[]
self.Rid = 1
self.waiting=0
self.datawidth = 0
self.readAction = False
self.READS=[]
self.rreadyCount = 0
self.rreadyOnes = 3
self.rreadyDeny = 0
self.rreadyDenys = 10
self.renames={}
self.prefix=''
self.suffix=''
def rename(self,Sig):
if Sig in self.renames:
return self.renames[Sig]
if self.prefix:
Sig = self.prefix + Sig
if self.suffix:
Sig = Sig + self.suffix
return Sig
def peek(self,Sig):
Sig = self.rename(Sig)
return logs.peek('%s.%s'%(self.Path,Sig))
def bpeek(self,Sig):
Sig = self.rename(Sig)
return veri.peek('%s.%s'%(self.Path,Sig))
def force(self,Sig,Val):
Sig = self.rename(Sig)
veri.force('%s.%s'%(self.Path,Sig),str(Val))
def makeRead(self,Burst,Len,Address,Size=4):
self.Queue.append(('ar','force arvalid=1 arburst=%s arlen=%s araddr=%s arsize=%s arid=%s'%(Burst,Len-1,Address,Size,self.Rid)))
if self.readAction:
self.READS.append((Len,Address,self.Rid))
self.Queue.append(('ar','force arvalid=0 arburst=0 arlen=0 araddr=0 arsize=0 arid=0'))
self.Rid += 1
def makeWriteWstrb(self,Burst,Len,Address,Size=4,Wstrb='auto',Wdatas=[]):
if Wstrb == 'auto':
self.makeWrite(Burst,Len,Address,Size,Wdatas)
return
self.Queue.append(('aw','force awvalid=1 awburst=%s awlen=%s awaddr=%s awsize=%s awid=%s'%(Burst,Len-1,Address,Size,self.Rid)))
self.Queue.append(('aw','force awvalid=0 awburst=0 awlen=0 awaddr=0 awsize=0 awid=0'))
self.Rid += 1
for ii in range(Len):
if len(Wdatas)==0:
Wdata = '0x%08x%08x%08x%08x'%(self.Rid+0x1000*ii,0x100+self.Rid+0x1000*ii,0x200+self.Rid+0x1000*ii,0x300+self.Rid+0x1000*ii)
else:
Wdata = Wdatas.pop(0)
if type(Wdata)!=types.StringType:
Wdata = hex(Wdatas.pop(0))
if ii==(Len-1):
Wlast=1
else:
Wlast = 0
self.Queue.append(('w','force wvalid=1 wdata=%s wstrb=0x%x wlast=%d'%(Wdata,Wstrb,Wlast)))
self.Queue.append(('w','force wvalid=0 wdata=0 wstrb=0 wlast=0'))
def makeWrite(self,Burst,Len,Address,Size=4,Wdatas=[]):
self.Queue.append(('aw','force awvalid=1 awburst=%s awlen=%s awaddr=%s awsize=%s awid=%s'%(Burst,Len-1,Address,Size,self.Rid)))
self.Queue.append(('aw','force awvalid=0 awburst=0 awlen=0 awaddr=0 awsize=0 awid=0'))
logs.log_info('makeWrite >>>>> %x size=%s'%(Address,Size))
self.Rid += 1
for ii in range(Len):
if len(Wdatas)==0:
Wdata = '0x%08x%08x%08x%08x'%(self.Rid+0x1000*ii,0x100+self.Rid+0x1000*ii,0x200+self.Rid+0x1000*ii,0x300+self.Rid+0x1000*ii)
elif (type(Wdatas[0])==types.StringType):
Wdata = Wdatas.pop(0)
else:
Wdata = hex(Wdatas.pop(0))
if ii==(Len-1):
Wlast=1
else:
Wlast = 0
Wstrb = (1<<(1<<Size))-1
self.Queue.append(('w','force wvalid=1 wdata=%s wstrb=0x%x wlast=%d'%(Wdata,Wstrb,Wlast)))
self.Queue.append(('w','force wvalid=0 wdata=0 wstrb=0 wlast=0'))
def wait(self,Many):
self.Queue.append(('this','wait %d'%Many))
def finish(self,Many):
self.Queue.append(('this','wait %d'%Many))
self.Queue.append(('this','finish'))
def run(self):
# logs.log_info('runn lenaw=%d lenar=%d lenq=%d lenw=%d'%(len(self.awQueue),len(self.arQueue),len(self.Queue),len(self.wQueue)))
self.runResponce()
self.runAw()
self.runAr()
self.runW()
if self.waiting>0:
self.waiting -= 1
return
self.runQueue()
def manageRready(self,What):
if What==1:
print '>>>',What,self.rreadyCount,self.rreadyDeny,self.peek('rvalid')
if What==0:
self.force('rready',0)
self.rreadyCount=0
return
if self.rreadyDeny>0:
self.force('rready',0)
self.rreadyDeny += 1
if self.rreadyDeny > self.rreadyDenys:
self.rreadyDeny=0
self.rreadyCount=0
elif self.rreadyCount==self.rreadyOnes:
self.force('rready',0)
self.rreadyDeny = 1
self.rreadyCount=0
else:
self.force('rready',1)
self.rreadyCount += 1
def runResponce(self):
if self.peek('rvalid')==1:
self.manageRready(1)
rdata = self.peek('rdata')
if self.datawidth==0:
rrr = self.bpeek('rdata')
self.datawidth = len(rrr)
rid = self.peek('rid')
rlast = self.peek('rlast')
rdatax = '%032x'%rdata
msb = (self.datawidth/4)
rdatax = rdatax[-msb:]
logs.log_info('axi responce rid=%x rlast=%d rdata=%s %s'%(rid,rlast,rdatax,self.Path))
if self.readAction:
self.readAction(rid,rlast,rdatax)
else:
self.manageRready(0)
def runQueue(self):
while self.Queue!=[]:
Dst,Cmd = self.Queue.pop(0)
if Dst=='aw':
self.awQueue.append(Cmd)
elif Dst=='ar':
self.arQueue.append(Cmd)
elif Dst=='w':
self.wQueue.append(Cmd)
else:
self.runThis(Cmd)
return
def runThis(self,Cmd):
wrds = string.split(Cmd)
if wrds==[]:
pass
elif (wrds[0]=='wait'):
self.waiting = int(wrds[1])
elif (wrds[0]=='finish'):
logs.log_info('veri finish from axi Master')
veri.finish()
sys.exit()
elif (wrds[0]=='force'):
for wrd in wrds[1:]:
ww = string.split(wrd,'=')
Var = ww[0]
Val = eval(ww[1])
self.force(Var,Val)
def runW(self):
if self.peek('wready')==0: return
if self.wQueue==[]:
self.force('wvalid',0)
return
Cmd = self.wQueue.pop(0)
wrds = string.split(Cmd)
if wrds==[]:
pass
elif (wrds[0]=='force'):
self.forces(wrds[1:])
def forces(self,wrds):
for wrd in wrds:
ww = string.split(wrd,'=')
Var = ww[0]
Val = eval(ww[1])
self.force(Var,Val)
def runAw(self):
if self.peek('awready')==0: return
if self.awQueue==[]:
self.force('awvalid',0)
return
Cmd = self.awQueue.pop(0)
wrds = string.split(Cmd)
if wrds==[]:
pass
elif (wrds[0]=='force'):
self.forces(wrds[1:])
def runAr(self):
if self.peek('arready')==0: return
if self.arQueue==[]:
self.force('arvalid',0)
return
Cmd = self.arQueue.pop(0)
wrds = string.split(Cmd)
if wrds==[]:
pass
elif (wrds[0]=='force'):
self.forces(wrds[1:])
| [
"greenblat@mac.com"
] | greenblat@mac.com |
6ce81949e3f2282a9f151be94fc7093a54818ac8 | e1afe8d3f5d776c2d80be502de8380cd786a3fb9 | /pointgate/__init__.py | d8b174c2f1e216b821605ea846e1d9708e473ad1 | [] | no_license | mbr/pointgate | 5566b1929d8f1a93a1a2183b78287173e41bf82b | 44527722e797222b35ba92a6538ae72c4f980fc4 | refs/heads/master | 2023-06-06T20:42:22.766233 | 2014-04-18T13:21:34 | 2014-04-18T13:21:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | from flask import Flask
from flask.ext.appconfig import HerokuConfig
from pointgate.dyndnsapi import dyndnsapi
def create_app(configfile=None):
app = Flask(__name__)
HerokuConfig(app, configfile)
app.config.setdefault('POINTDNS_API_URL', 'https://pointhq.com')
app.config.setdefault('POINTDNS_RECORD_TTL', 60)
app.register_blueprint(dyndnsapi)
return app
| [
"git@marcbrinkmann.de"
] | git@marcbrinkmann.de |
63d99f5a3aff98c8202c811567e3db5295057bbb | 8126d1bc2afe0925a24fce039d0f02a3bd7acbae | /tests/test_grid.py | c48893de2ba516734e6e3f92675639e9c74bab2c | [
"BSD-2-Clause"
] | permissive | rafwiewiora/pytraj | 54fb6fe07a754f65b865dd161f64c7af15fc3926 | 91a019ea406081ccf0043170cc64c48b4a5ea04a | refs/heads/master | 2021-01-20T17:33:05.974254 | 2016-03-11T21:25:32 | 2016-03-11T21:25:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,992 | py | #!/usr/bin/env python
from __future__ import print_function
import unittest
import os
import numpy as np
import pytraj as pt
from pytraj.utils import eq, aa_eq
from pytraj.testing import cpptraj_test_dir
class TestGrid(unittest.TestCase):
def test_0(self):
from pytraj.math import Grid
import numpy as np
nx = ny = nz = 3
g = Grid(nx, ny, nz)
assert g.size == nx**3
assert g.nx == g.ny == g.nz == nx
value = 1000.
g[0, 0, 0] = value
assert g[0, 0, 0] == value
assert g._element(0, 0, 0) == value
class TestGridAction(unittest.TestCase):
def test_action_grid(self):
from pytraj.all_actions import calc_grid
traj = pt.load_sample_data("tz2")[:]
traj.autoimage()
traj.rmsfit(mask=':1-13')
d = calc_grid(traj, " 20 0.5 20 0.5 20 0.5 :WAT@O")
d = calc_grid(traj(), " 20 0.5 20 0.5 20 0.5 :WAT@O", top=traj.top)
def test_action_bounds(self):
# creat mutable trajectory
traj = pt.load('data/tz2.ortho.nc', 'data/tz2.ortho.parm7')
pt.autoimage(traj)
pt.superpose(traj, ref=0, mask=':1-13&!@H=', mass=True)
grid_data = pt._grid(traj, mask=':1-13', grid_spacing=[0.5, 0., 0.])
text = '''
parm data/tz2.ortho.parm7
trajin data/tz2.ortho.nc
autoimage
rms first :1-13&!@H= mass
bounds :1-13 dx .5 name MyGrid
'''
state = pt.load_cpptraj_state(text)
state.run()
cpp_grid = state.data['MyGrid'].values
aa_eq(cpp_grid, grid_data)
def test_just_run_state(self):
txt = '''
parm data/tz2.truncoct.parm7
trajin data/tz2.truncoct.nc
reference data/tz2.truncoct.nc [REF]
autoimage triclinic
grid nonortho.dx boxref [REF] 50 50 50 :WAT@O pdb output/test.pdb
'''
state = pt.load_cpptraj_state(txt)
state.run()
if __name__ == "__main__":
unittest.main()
| [
"hainm.comp@gmail.com"
] | hainm.comp@gmail.com |
3a73b821bf69bc409fc73a51db492275552b09a3 | 1581f1d66d6835b2c271295e3251c2dde239fec8 | /expense/admin.py | 569aba91027a1072734efcf4d5e35e26be0b2b1e | [] | no_license | abinash-kumar/pythod | 527659e3bdd161f9abcaaa9182dfe58044b3ff66 | 1469dc0cd9d6d72b2fe2e69f99542e470bea807b | refs/heads/master | 2023-01-30T02:54:10.729606 | 2020-02-24T07:18:51 | 2020-02-24T07:18:51 | 242,670,715 | 0 | 0 | null | 2023-01-25T13:57:52 | 2020-02-24T07:16:02 | Python | UTF-8 | Python | false | false | 902 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import Channel
from .models import ExpenceDetails
from .models import ABDoc
from django.contrib.auth.models import User
admin.site.register(Channel)
class ExpenceDetailsView(admin.ModelAdmin):
list_display = ["remark", "channel", "expense_date", "amount", "expense_by", "created_date"]
readonly_fields = ('updated_by',)
model = ExpenceDetails
filter_horizontal = ['bill']
search_fields = ["expense_by", "remark", "channel"]
def save_model(self, request, obj, form, change):
obj.updated_by = User.objects.get(id=request.user.id)
obj.save()
admin.site.register(ExpenceDetails, ExpenceDetailsView)
class ABDocView(admin.ModelAdmin):
list_display = ["title", "file"]
search_fields = ["title", "file"]
admin.site.register(ABDoc, ABDocView)
| [
"abinashlv@AbinashSymboMac.local"
] | abinashlv@AbinashSymboMac.local |
6101ffbcf6bdcad6190dbf061d007a7f56793e4a | bfcd8f1f6ac8590df321f23a422eca0370a25b8f | /myenv/lib/python3.7/site-packages/layoutmargin/_margin_layout.py | 3c7b7f4b634a6c45e59c56c7b890314835cd6c0d | [] | no_license | Stephen-Tipa-Augustine/KMC_ventilator | 0567fa0b72d41fb0de11cd72c62567bed973d9f5 | 456e88ae4fff3984d5456517ba8787f9d5762745 | refs/heads/master | 2022-12-26T08:00:41.102890 | 2020-09-07T11:00:13 | 2020-09-07T11:00:13 | 293,500,282 | 0 | 1 | null | 2022-12-19T04:33:28 | 2020-09-07T10:49:08 | Python | UTF-8 | Python | false | false | 5,591 | py | from typing import NamedTuple
from kivy.properties import ObjectProperty
class AddMargin:
margin = ObjectProperty()
_last_X = None
_last_Y = None
_last_Width = None
_last_Height = None
_last_MarginX = None
_last_MarginY = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.margin = (0, 0, 0, 0)
class _Sides(NamedTuple):
primary: str
center: str
inverted: str
_X_SIDES = _Sides("left", "center_x", "right")
_Y_SIDES = _Sides("bottom", "center_y", "top")
class MarginLayout:
def add_widget(self, widget, index=0):
if isinstance(widget, AddMargin):
widget.fbind("margin", self._apply_Margins)
return super().add_widget(widget, index)
def remove_widget(self, widget):
if isinstance(widget, AddMargin):
widget.funbind("margin", self._apply_Margins)
return super().remove_widget(widget)
def do_layout(self, *args):
super().do_layout(*args)
for child in [x for x in self.children if isinstance(x, AddMargin)]:
self._apply_Margins(child, child.margin)
self._trigger_layout.cancel()
def _apply_Margins(self, widget, margins):
def get_MarginValue(i, value):
if isinstance(value, str):
if "%" in value:
maxSizes = [
widget.width, widget.height,
widget.width, widget.height
]
percentage = float(value.replace("%", "").strip()) / 100
value = maxSizes[i] * percentage
else:
raise ValueError(
f"'{widget.__class__.__name__}' invalid margin value."
)
return value
def get_MarginValues(margin):
return (get_MarginValue(i, x) for i, x in enumerate(margin))
def get_Initial_Position(margin, position, size):
position += margin[0]
size -= sum(margin)
return position, size
def update_Widget(key, value):
if key == "_last_X":
widget.x = widget._last_X = value
elif key == "_last_Y":
widget.y = widget._last_Y = value
elif key == "_last_Width":
widget.width = widget._last_Width = value
elif key == "_last_Height":
widget.height = widget._last_Height = value
elif key == "_last_MarginX":
widget._last_MarginX = value
elif key == "_last_MarginY":
widget._last_MarginY = value
def update_SizeHint_Widget(
margin, position, lastPosition_Key, size, lastSize_Key):
position, size = get_Initial_Position(margin, position, size)
update_Widget(lastPosition_Key, position)
update_Widget(lastSize_Key, size)
def update_Sized_Widget(
sides, margin, lastMargin, lastMargin_Key,
position, position_Hint, lastPosition, lastPosition_Key,
size, size_Hint, lastSize, lastSize_Key,
):
if lastSize is None:
position, size = get_Initial_Position(margin, position, size)
else:
if(margin != lastMargin) and (position_Hint == sides.primary):
difference = (lastMargin[0] - margin[0])
position -= difference
if(size != lastSize) and (position_Hint == sides.inverted):
difference = size - lastSize
position -= (difference + (margin[1] - difference))
elif(position_Hint == sides.inverted):
position -= margin[1]
update_Widget(lastPosition_Key, position)
update_Widget(lastSize_Key, size)
update_Widget(lastMargin_Key, margin)
def apply_Margins(
sides, margin, lastMargin, lastMargin_Key,
position, position_Hint, lastPosition, lastPosition_Key,
size, size_Hint, lastSize, lastSize_Key,
):
if(size_Hint):
update_SizeHint_Widget(
margin, position, lastPosition_Key, size, lastSize_Key)
else:
update_Sized_Widget(
sides, margin, lastMargin, lastMargin_Key,
position, position_Hint, lastPosition, lastPosition_Key,
size, size_Hint, lastSize, lastSize_Key,
)
left, top, right, bottom = get_MarginValues(margins)
x_Margin, y_Margin = ((left, right), (bottom, top))
x_Hint, y_Hint = widget.pos_hint if(widget.pos_hint) else(None, None)
w_Hint, h_Hint = widget.size_hint
apply_Margins(
sides=_X_SIDES, margin=x_Margin,
lastMargin=widget._last_MarginX, lastMargin_Key="_last_MarginX",
position=widget.x, position_Hint=x_Hint,
lastPosition=widget._last_X, lastPosition_Key="_last_X",
size=widget.width, size_Hint=w_Hint,
lastSize=widget._last_Width, lastSize_Key="_last_Width",
)
apply_Margins(
sides=_Y_SIDES, margin=y_Margin,
lastMargin=widget._last_MarginY, lastMargin_Key="_last_MarginY",
position=widget.y, position_Hint=y_Hint,
lastPosition=widget._last_Y, lastPosition_Key="_last_Y",
size=widget.height, size_Hint=h_Hint,
lastSize=widget._last_Height, lastSize_Key="_last_Height",
)
| [
"tipastep5@gmail.com"
] | tipastep5@gmail.com |
a409824bbeb2bee142944ddfef80720e93e45871 | ec0b8bfe19b03e9c3bb13d9cfa9bd328fb9ca3f1 | /res/packages/scripts/scripts/client/gui/Scaleform/daapi/view/lobby/components/CalendarComponent.py | 468107eb74fdcf3dd1a24f4654b1b61c2cb03022 | [] | no_license | webiumsk/WOT-0.9.20.0 | de3d7441c5d442f085c47a89fa58a83f1cd783f2 | 811cb4e1bca271372a1d837a268b6e0e915368bc | refs/heads/master | 2021-01-20T22:11:45.505844 | 2017-08-29T20:11:38 | 2017-08-29T20:11:38 | 101,803,045 | 0 | 1 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,193 | py | # 2017.08.29 21:46:49 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/components/CalendarComponent.py
from debug_utils import LOG_DEBUG
import BigWorld
from Event import Event, EventManager
from gui.Scaleform.daapi.view.meta.CalendarMeta import CalendarMeta
class CalendarComponent(CalendarMeta):
def __init__(self):
super(CalendarComponent, self).__init__()
self.__em = EventManager()
self.onMonthChangedEvent = Event(self.__em)
self.onDateSelectedEvent = Event(self.__em)
def onMonthChanged(self, timestamp):
self.onMonthChangedEvent(timestamp)
def onDateSelected(self, timestamp):
self.onDateSelectedEvent(timestamp)
def formatYMHeader(self, rawDate):
return BigWorld.wg_getYMDateFormat(rawDate)
def _dispose(self):
self.__em.clear()
super(CalendarComponent, self)._dispose()
# okay decompyling c:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\daapi\view\lobby\components\CalendarComponent.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.08.29 21:46:49 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
9461eef63e4da2652e875cd5b3f59a30f042a584 | b554dbc85ca470c4598d412ff34f516a8ab324dc | /azure-mgmt-compute/azure/mgmt/compute/v2018_10_01/models/virtual_machine_scale_set_py3.py | 1db294839a1ba61549769cd0b97f6bbb0b5bbcc8 | [
"MIT"
] | permissive | bgsky/azure-sdk-for-python | 62bd7b12b0b407690707cbaf6ad4322ed58d1d3b | ec18d0b25be10fddbde416b901b905dfb0896430 | refs/heads/master | 2021-04-12T01:59:34.293330 | 2018-11-27T21:57:19 | 2018-11-27T21:57:19 | 125,912,205 | 0 | 0 | MIT | 2018-03-19T19:50:37 | 2018-03-19T19:50:37 | null | UTF-8 | Python | false | false | 5,519 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_py3 import Resource
class VirtualMachineScaleSet(Resource):
"""Describes a Virtual Machine Scale Set.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Required. Resource location
:type location: str
:param tags: Resource tags
:type tags: dict[str, str]
:param sku: The virtual machine scale set sku.
:type sku: ~azure.mgmt.compute.v2018_10_01.models.Sku
:param plan: Specifies information about the marketplace image used to
create the virtual machine. This element is only used for marketplace
images. Before you can use a marketplace image from an API, you must
enable the image for programmatic use. In the Azure portal, find the
marketplace image that you want to use and then click **Want to deploy
programmatically, Get Started ->**. Enter any required information and
then click **Save**.
:type plan: ~azure.mgmt.compute.v2018_10_01.models.Plan
:param upgrade_policy: The upgrade policy.
:type upgrade_policy: ~azure.mgmt.compute.v2018_10_01.models.UpgradePolicy
:param virtual_machine_profile: The virtual machine profile.
:type virtual_machine_profile:
~azure.mgmt.compute.v2018_10_01.models.VirtualMachineScaleSetVMProfile
:ivar provisioning_state: The provisioning state, which only appears in
the response.
:vartype provisioning_state: str
:param overprovision: Specifies whether the Virtual Machine Scale Set
should be overprovisioned.
:type overprovision: bool
:ivar unique_id: Specifies the ID which uniquely identifies a Virtual
Machine Scale Set.
:vartype unique_id: str
:param single_placement_group: When true this limits the scale set to a
single placement group, of max size 100 virtual machines.
:type single_placement_group: bool
:param zone_balance: Whether to force stictly even Virtual Machine
distribution cross x-zones in case there is zone outage.
:type zone_balance: bool
:param platform_fault_domain_count: Fault Domain count for each placement
group.
:type platform_fault_domain_count: int
:param identity: The identity of the virtual machine scale set, if
configured.
:type identity:
~azure.mgmt.compute.v2018_10_01.models.VirtualMachineScaleSetIdentity
:param zones: The virtual machine scale set zones.
:type zones: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'provisioning_state': {'readonly': True},
'unique_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'plan': {'key': 'plan', 'type': 'Plan'},
'upgrade_policy': {'key': 'properties.upgradePolicy', 'type': 'UpgradePolicy'},
'virtual_machine_profile': {'key': 'properties.virtualMachineProfile', 'type': 'VirtualMachineScaleSetVMProfile'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'overprovision': {'key': 'properties.overprovision', 'type': 'bool'},
'unique_id': {'key': 'properties.uniqueId', 'type': 'str'},
'single_placement_group': {'key': 'properties.singlePlacementGroup', 'type': 'bool'},
'zone_balance': {'key': 'properties.zoneBalance', 'type': 'bool'},
'platform_fault_domain_count': {'key': 'properties.platformFaultDomainCount', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'VirtualMachineScaleSetIdentity'},
'zones': {'key': 'zones', 'type': '[str]'},
}
def __init__(self, *, location: str, tags=None, sku=None, plan=None, upgrade_policy=None, virtual_machine_profile=None, overprovision: bool=None, single_placement_group: bool=None, zone_balance: bool=None, platform_fault_domain_count: int=None, identity=None, zones=None, **kwargs) -> None:
super(VirtualMachineScaleSet, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.plan = plan
self.upgrade_policy = upgrade_policy
self.virtual_machine_profile = virtual_machine_profile
self.provisioning_state = None
self.overprovision = overprovision
self.unique_id = None
self.single_placement_group = single_placement_group
self.zone_balance = zone_balance
self.platform_fault_domain_count = platform_fault_domain_count
self.identity = identity
self.zones = zones
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
1d3fe4cf1c880b8a628afd2b1e092450fb946243 | 7950c4faf15ec1dc217391d839ddc21efd174ede | /problems/0236.0_Lowest_Common_Ancestor_of_a_Binary_Tree.py | dd4df406dc9baf8ebc67905db08cce531e2aaf01 | [] | no_license | lixiang2017/leetcode | f462ecd269c7157aa4f5854f8c1da97ca5375e39 | f93380721b8383817fe2b0d728deca1321c9ef45 | refs/heads/master | 2023-08-25T02:56:58.918792 | 2023-08-22T16:43:36 | 2023-08-22T16:43:36 | 153,090,613 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | '''
DFS
Runtime: 114 ms, faster than 46.58% of Python3 online submissions for Lowest Common Ancestor of a Binary Tree.
Memory Usage: 26.3 MB, less than 30.85% of Python3 online submissions for Lowest Common Ancestor of a Binary Tree.
'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
if not root or root == p or root == q:
return root
left = self.lowestCommonAncestor(root.left, p, q)
right = self.lowestCommonAncestor(root.right, p, q)
'''
if left:
print('l: ', left.val, 'root: ', root.val)
if right:
print('r: ', right.val, 'root: ', root.val)
'''
if left and right:
return root
return left or right
'''
bottom-up
Your input
[3,5,1,6,2,0,8,null,null,7,4]
7
8
stdout
l: 7 root: 2
r: 7 root: 5
r: 8 root: 1
l: 7 root: 3
r: 8 root: 3
Output
3
Expected
3
'''
| [
"838255715@qq.com"
] | 838255715@qq.com |
6376fb9392d11160868a6f7f6fcd88553812000f | c1a7e00efbf7ebb2a60063c62eb2642631a49fd8 | /backend/src/passwordreset/admin.py | f7e0157a57cddc8951fc543b9e6532b644466ad1 | [] | no_license | rajesh241/GandhiCollectedWorks | a8557c3367a69be9e13606cef0bb74d11e8bc747 | efb68310a1362791e4cda09e7bb54ae91e407db3 | refs/heads/master | 2023-01-21T10:19:19.465420 | 2021-02-21T11:10:33 | 2021-02-21T11:10:33 | 194,344,659 | 2 | 0 | null | 2023-01-07T15:00:15 | 2019-06-28T23:40:41 | HTML | UTF-8 | Python | false | false | 299 | py | """ contains basic admin views for MultiToken """
from django.contrib import admin
from passwordreset.models import ResetPasswordToken
@admin.register(ResetPasswordToken)
class ResetPasswordTokenAdmin(admin.ModelAdmin):
list_display = ('user', 'key', 'created_at', 'ip_address', 'user_agent')
| [
"togoli@gmail.com"
] | togoli@gmail.com |
01b29bd4c0ab79ad15650e94fa06bb276c2daa04 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5752104073297920_0/Python/festony/2014_r1A_C.py | 6c2ad6f11d19adb3199ff6afe0482c43aab22da6 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,432 | py | '''
CodeJam Practice
Created on 2012-12-20
@author: festony
'''
from cj_lib import *
from properties import *
import math
import fractions
#curr_file_name = 'C-large'
curr_file_name = 'C-small-attempt1'
#curr_file_name = 'test'
# map(int, input_lines.pop(0).split(' '))
def input_dividing_func(input_lines):
total_case = int(input_lines.pop(0))
case_inputs = []
for i in range(total_case):
N = int(input_lines.pop(0))
p = map(int, input_lines.pop(0).split(' '))
case_inputs.append([N, p])
return case_inputs
def stay_same_p(N):
P = fractions.Fraction(0)
for i in range(N):
p = fractions.Fraction(1)
p *= math.factorial(N-1)
p /= math.factorial(N-1-i)
p *= int(math.pow(N-i-1, N-i-1))
p /= int(math.pow(N, N))
P += p
return P
def process_func(func_input):
N, p = func_input
c = 0
for i in range(N):
if i == p[i]:
c += 1
if c >= N/2-1:
return 'BAD'
return 'GOOD'
run_proc(process_func, input_dividing_func, curr_working_folder, curr_file_name)
#print (stay_same_p(4))
#print float(stay_same_p(4))
##print math.pow(1000,1000)
#def gen_good_perm(N):
# if N == 1:
# return [[1]]
# ps = []
# sps = gen_good_perm(N-1)
# for i in range(N):
# p = []
# for sp in sps:
# p = sp[:]
# p.insert(i, N)
# ps.append(p)
# return ps
#
#print gen_good_perm(4)
#
#def gen_bad_perm(N):
# bp = range(1, N+1)
# ps = []
# for i in range(N):
# p = bp[:]
# if i > 0:
# p[0], p[i] = p[i], p[0]
# ps.append(p)
# tps = []
# for i in range(1, N):
# for b in ps:
# for i in range(N):
# p = b[:]
# if i > 0:
# p[0], p[i] = p[i], p[0]
# tps.append(p)
# ps = tps
# tps = []
# return ps
#
#x = gen_bad_perm(4)
#y = []
#for xx in x:
# y.append(tuple(xx))
#print y
#
#d = dict()
#for yy in y:
# if d.has_key(yy):
# d[yy] += 1
# else:
# d[yy] = 1
#z = []
#for k in d.keys():
# #print k, d[k]
# z.append([list(k), d[k]])
#z.sort(cmp=None, key=lambda x:x[1], reverse=False)
#print z
#for zz in z:
# print zz
#print sum(map(lambda x:x[1], z))
| [
"eewestman@gmail.com"
] | eewestman@gmail.com |
009e681b99ae5412a43a6b8aaa5ec64851a1afb1 | 7030259044f2afd4164f2f0507bdb9f091708067 | /project/settings/dev.py | f5a58a8568aa08a174cd82beca88bf9f92851863 | [] | no_license | m2candre/django-vue-template | f64fa5c28e08abeab7f10dcb2d3a93132d5a969a | 79517dbcaf740f07bad6fc19a18ba36e1e9fdd24 | refs/heads/master | 2020-03-25T23:35:29.759950 | 2018-08-10T06:19:21 | 2018-08-10T06:19:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,016 | py | """
Django settings for project project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
SETTINGS_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(SETTINGS_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'z-+$tyr)mif-dsjx)vd#pkay86u_((ut^8(_0)283#bus5k&he'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'whitenoise.runserver_nostatic', # < Per Whitenoise, to disable built in
'django.contrib.staticfiles',
'rest_framework',
'project.api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Add dist to
'DIRS': ['dist'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
# When Vue Builds, path will be `/static/css/...` so we will have Django Serve
# In Production, it's recommended use an alternative approach such as:
# http://whitenoise.evans.io/en/stable/django.html?highlight=django
STATIC_URL = '/static/'
# Serve `dist` as is, built by webpack
STATIC_ROOT = os.path.join(BASE_DIR, 'dist', 'static')
STATICFILES_DIRS = []
##########
# STATIC #
##########
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Insert Whitenoise Middleware at top but below Security Middleware
# MIDDLEWARE.insert(1, 'whitenoise.middleware.WhiteNoiseMiddleware',)
# http://whitenoise.evans.io/en/stable/django.html#make-sure-staticfiles-is-configured-correctly
| [
"gtalarico@gmail.com"
] | gtalarico@gmail.com |
8d8aff77d83973f585f201fd0852388ae2b83750 | 5077fc5d82caa3b3ed5ce0e062bfe75cd4037ebc | /杂项/豆瓣2.0.py | ca3069d5209891cdea35ff7b96a964dff22a719e | [] | no_license | asswecanfat/git_place | ee10e1057d8307d3c72f57291b5bcb6d0579017e | 244ff0de11ffbe1aa9f20308e43af39486507f6f | refs/heads/master | 2021-07-18T23:06:14.324164 | 2020-09-02T12:15:27 | 2020-09-02T12:15:27 | 210,833,462 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,177 | py | import requests
from bs4 import BeautifulSoup
import re
class Douban_login:
def login(self):
def post_data(data):#发送data并判断是否登陆成功
reponse = s.post(url, data = data, headers = headers)
if str(reponse.url) == final_url:
print('登陆成功!')
else:
print('登陆失败,请再次重试!!')
soup = BeautifulSoup(reponse.text, 'html.parser')
soup1 = soup.find_all('span')
user_name = soup1[0].string.replace('的帐号', '')
print('登陆的账号为:' + user_name)
def get_txt(url_data_text):#获取网页源文件
with open('C:\\Users\\10248\\Desktop\\url_data.txt', 'w', encoding = 'utf-8') as f:
f.write(url_data_text)
def get_pit(pit_url):#获取验证码
pit_data = s.get(pit_url)
with open('C:\\Users\\10248\\Desktop\\yzm.jpg', 'wb') as f:
f.write(pit_data.content)
def get_url_data(url):#获取页面数据
url_data = s.get(url, headers = headers)
return url_data.text
def deal_url_data(url_data_text):#处理网页并找出和下载验证码
global yzm
soup = BeautifulSoup(url_data_text, 'html.parser')
soup1 = soup.find_all('img', id = 'captcha_image')
if int(len(soup1)) == 0:
yzm = 0
print('无需验证码')
else:
yzm = 1
pit_url = soup1[0].get('src')
get_pit(pit_url)
return pit_url
def get_data(pit_url):#处理captcha:id并放入data中
if yzm == 0:
data={
'source' : 'index_nav',
'form_email' : user_ac,
'form_password' : user_password,
'user_login' : '登录'
}
return data
else:
p = r'.+id=(.+)&size=s'
data_list = re.findall(p, pit_url)#列表
data = {
'source' : 'index_nav',
'form_email' : user_ac,
'form_password' : user_password,
'captcha-solution' : user_getin,
'captcha-id' : data_list[0],
'user_login' : '登录'
}
return data
url = 'https://www.douban.com/accounts/login'
final_url = 'https://www.douban.com/'
user_ac = str(input('请输入账号:'))
user_password = str(input('请输入密码:'))
s = requests.Session()
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',\
'Referer':'https://www.douban.com/'}
url_data_text = get_url_data(url)
pit_url = deal_url_data(url_data_text)
if yzm == 1:
user_getin = str(input('请输入验证码:'))
data = get_data(pit_url)
post_data(data)
| [
"1024847824@qq.com"
] | 1024847824@qq.com |
998b1ddc40d9341734d6d53772d4c1c0015de1a3 | 876de904572c611b8cbad21f50877cdc812f2946 | /Leetcode/103. 二叉树的锯齿形层次遍历.py | 77492d7c29f409cbabe69df893d7f7c2079e571c | [
"MIT"
] | permissive | QDylan/Learning- | 66a33de0e15f26672fb63c0b393866721def27ae | f09e0aa3de081883b4a7ebfe4d31b5f86f24b64f | refs/heads/master | 2023-02-08T02:34:26.616116 | 2020-12-25T05:02:32 | 2020-12-25T05:02:32 | 263,805,536 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,330 | py | # -*- coding: utf-8 -*-
"""
@Time : 2020/7/14 8:28
@Author : QDY
@FileName: 103. 二叉树的锯齿形层次遍历.py
给定一个二叉树,返回其节点值的锯齿形层次遍历。(即先从左往右,再从右往左进行下一层遍历,以此类推,层与层之间交替进行)。
例如:
给定二叉树 [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
返回锯齿形层次遍历如下:
[
[3],
[20,9],
[15,7]
]
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def zigzagLevelOrder(self, root):
if not root: return []
queue, res, mark = [root], [], False
while queue:
length = len(queue)
tmp = []
for i in range(length):
node = queue.pop(0)
tmp.append(node.val)
if node.left:
queue.append(node.left)
if node.right:
queue.append(node.right)
if mark:
tmp.reverse()
mark = False
else:
mark = True
res.append(tmp)
return res
| [
"qdy960411@outlook.com"
] | qdy960411@outlook.com |
306a3bd19cae6afd4c07662217dfa4233cc55817 | 4e808ecca7a94a70a63c59b4c91a15cd61bada6e | /natuurpunt_imp_members/__init__.py | aa961a1fb03e95eeccdde06ef8b561394c9cab51 | [] | no_license | smart-solution/natuurpunt-crm | 79b98cfc8c69027dc82afa3779b65616141e6779 | 0bd247e78c01e79ec54b90c0d0bcaca38742f04d | refs/heads/master | 2021-05-22T05:43:42.913854 | 2020-12-02T08:06:05 | 2020-12-02T08:06:05 | 39,186,027 | 0 | 0 | null | 2020-12-02T08:06:06 | 2015-07-16T08:36:27 | Python | UTF-8 | Python | false | false | 228 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
#
##############################################################################
import natuurpunt_imp_members
| [
"fabian.semal@smartsolution.be"
] | fabian.semal@smartsolution.be |
221069477e0d8b653578b817b9402d37916c81ea | 2e5dd9e5c634c1c2163b96a3607f9806a9598d39 | /rastervision2/pytorch_learner/semantic_segmentation_learner.py | 38fec3f11bd120f09443b73f6423fae3662d50e7 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | jpolchlo/raster-vision | 8fc60c8bfa2d6319810c3414606400abd63ff633 | 8aa9601d168de2c6bbc00d7f5ba2b70f1d5f7f13 | refs/heads/master | 2022-10-29T07:39:15.551708 | 2020-04-28T19:20:42 | 2020-04-28T19:20:42 | 259,924,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,656 | py | import warnings
warnings.filterwarnings('ignore') # noqa
from os.path import join, isdir, basename
import logging
import glob
import numpy as np
import matplotlib
matplotlib.use('Agg') # noqa
import torch
from torch.utils.data import Dataset, ConcatDataset
import torch.nn.functional as F
from torchvision import models
from PIL import Image
from rastervision2.pytorch_learner.learner import Learner
from rastervision2.pytorch_learner.utils import (
compute_conf_mat_metrics, compute_conf_mat, color_to_triple)
log = logging.getLogger(__name__)
class SemanticSegmentationDataset(Dataset):
def __init__(self, data_dir, transform=None):
self.data_dir = data_dir
self.img_paths = glob.glob(join(data_dir, 'img', '*.png'))
self.transform = transform
def __getitem__(self, ind):
img_path = self.img_paths[ind]
label_path = join(self.data_dir, 'labels', basename(img_path))
x = Image.open(img_path)
y = Image.open(label_path)
x = np.array(x)
y = np.array(y)
if self.transform is not None:
out = self.transform(image=x, mask=y)
x = out['image']
y = out['mask']
x = torch.tensor(x).permute(2, 0, 1).float() / 255.0
y = torch.tensor(y).long()
return (x, y)
def __len__(self):
return len(self.img_paths)
class SemanticSegmentationLearner(Learner):
def build_model(self):
# TODO support FCN option
model = models.segmentation.segmentation._segm_resnet(
'deeplabv3',
self.cfg.model.get_backbone_str(),
len(self.cfg.data.class_names),
False,
pretrained_backbone=True)
return model
def get_datasets(self):
cfg = self.cfg
data_dirs = self.unzip_data()
transform, aug_transform = self.get_data_transforms()
train_ds, valid_ds, test_ds = [], [], []
for data_dir in data_dirs:
train_dir = join(data_dir, 'train')
valid_dir = join(data_dir, 'valid')
if isdir(train_dir):
if cfg.overfit_mode:
train_ds.append(
SemanticSegmentationDataset(
train_dir, transform=transform))
else:
train_ds.append(
SemanticSegmentationDataset(
train_dir, transform=aug_transform))
if isdir(valid_dir):
valid_ds.append(
SemanticSegmentationDataset(
valid_dir, transform=transform))
test_ds.append(
SemanticSegmentationDataset(
valid_dir, transform=transform))
train_ds, valid_ds, test_ds = \
ConcatDataset(train_ds), ConcatDataset(valid_ds), ConcatDataset(test_ds)
return train_ds, valid_ds, test_ds
def train_step(self, batch, batch_ind):
x, y = batch
out = self.post_forward(self.model(x))
return {'train_loss': F.cross_entropy(out, y)}
def validate_step(self, batch, batch_ind):
x, y = batch
out = self.post_forward(self.model(x))
val_loss = F.cross_entropy(out, y)
num_labels = len(self.cfg.data.class_names)
y = y.view(-1)
out = self.prob_to_pred(out).view(-1)
conf_mat = compute_conf_mat(out, y, num_labels)
return {'val_loss': val_loss, 'conf_mat': conf_mat}
def validate_end(self, outputs, num_samples):
conf_mat = sum([o['conf_mat'] for o in outputs])
val_loss = torch.stack([o['val_loss']
for o in outputs]).sum() / num_samples
conf_mat_metrics = compute_conf_mat_metrics(conf_mat,
self.cfg.data.class_names)
metrics = {'val_loss': val_loss.item()}
metrics.update(conf_mat_metrics)
return metrics
def post_forward(self, x):
return x['out']
def prob_to_pred(self, x):
return x.argmax(1)
def plot_xyz(self, ax, x, y, z=None):
x = x.permute(1, 2, 0)
if x.shape[2] == 1:
x = torch.cat([x for _ in range(3)], dim=2)
ax.imshow(x)
ax.axis('off')
labels = z if z is not None else y
colors = [color_to_triple(c) for c in self.cfg.data.class_colors]
colors = [tuple([_c / 255 for _c in c]) for c in colors]
cmap = matplotlib.colors.ListedColormap(colors)
labels = labels.numpy()
ax.imshow(labels, alpha=0.4, vmin=0, vmax=len(colors), cmap=cmap)
| [
"lewfish@gmail.com"
] | lewfish@gmail.com |
16e691f5d9996ec589acc9303e1a1b44345a49f2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03209/s412514584.py | 8a91d7202b2f680aa7cfc95494723a5132f74cba | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,472 | py | from collections import defaultdict, deque, Counter
from heapq import heappush, heappop, heapify
from itertools import permutations, accumulate, combinations, combinations_with_replacement
from math import sqrt, ceil, floor, factorial
from bisect import bisect_left, bisect_right, insort_left, insort_right
from copy import deepcopy
from operator import itemgetter
from functools import reduce
from fractions import gcd
import sys
def input(): return sys.stdin.readline().rstrip()
def I(): return int(input())
def Is(): return map(int, input().split())
def LI(): return list(map(int, input().split()))
def TI(): return tuple(map(int, input().split()))
def IR(n): return [I() for _ in range(n)]
def LIR(n): return [LI() for _ in range(n)]
def TIR(n): return [TI() for _ in range(n)]
def S(): return input()
def Ss(): return input().split()
def LS(): return list(input())
def SR(n): return [S() for _ in range(n)]
def SsR(n): return [Ss() for _ in range(n)]
def LSR(n): return [LS() for _ in range(n)]
sys.setrecursionlimit(10**6)
MOD = 10**9+7
INF = 10**18
# ----------------------------------------------------------- #
n, x = Is()
Blen = [0]*(n+1)
P = [0]*(n+1)
Blen[0] = 1
P[0] = 1
for i in range(1, n):
Blen[i] = Blen[i-1]*2 + 3
P[i] = P[i-1]*2 + 1
def burger(i, y):
if i == 0:
return 0 if y <= 0 else 1
if y <= Blen[i-1] + 1:
return burger(i-1, y-1)
else:
return P[i-1] + 1 + burger(i-1, y-Blen[i-1]-2)
print(burger(n, x))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
807db398dd1cd2e6f9f343554b314b6cf2edaaa3 | 21bbc3fbeb7a1616dbd6993b66dc44d9b30df3e7 | /PycharmProjects/samp_proj1/day_201118/assign20_4.py | ae6872a76db98685e7dd0950125cf38c9062df25 | [] | no_license | PoornimaDevii/python_training | 6124640608d8bf14289ae61b2b28e0db3b473b6f | 42b535590a6a244a91bd48b4451b74a29c1aaa80 | refs/heads/master | 2020-04-05T19:55:49.723114 | 2018-12-04T11:49:59 | 2018-12-04T11:49:59 | 157,157,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py |
# see expandtabs()
def gen_f(fname):
with open(fname) as file:
file_lines = file.readlines()
for each in file_lines:
if each == '\n':
a = each.strip()
yield a
j = 0
for i in gen_f('/home/cisco/PycharmProjects/samp_proj1/day_201118/para.txt'):
print(i)
j += 1
print("No of paras",j)
# lis = list(map(lambda x: x ,open('para.txt').readlines()))
# print(lis)
# lis1 =0
# i =0
# count=0
#
# while i<len(lis):
# count=0
# if i+2 < len ( lis ) and lis[i]!='\n' and lis[i+2]=='\n' :
# lis1+=1
# elif i+2 > len ( lis ) and lis[ i ]:
# lis1+=1
# i+=1
#
# print("The number of paras",lis1)
#
| [
"poornimadevi.rama@gmail.com"
] | poornimadevi.rama@gmail.com |
8495a3c6af74d2c3e28bea856fdf2fc0cbc300d6 | 50914176887f9f21a3489a9407195ba14831354c | /insert_delete_get_random.py | 7d4822ccfec6abae0608b190b89b3b2f2688234c | [] | no_license | nkukarl/leetcode | e8cfc2a31e64b68222ad7af631277f1f66d277bc | b1dbe37e8ca1c88714f91643085625ccced76e07 | refs/heads/master | 2021-01-10T05:42:04.022807 | 2018-02-24T03:55:24 | 2018-02-24T03:55:24 | 43,725,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,395 | py | import random
class RandomizedSet(object):
def __init__(self):
# Use self.vals to store all the elements
self.vals = []
# Use element:index as key:value pairs to store position info
self.pos = {}
def insert(self, val):
"""
Inserts a value to the set.
Returns true if the set did not already contain the specified element.
"""
if val not in self.pos:
self.vals.append(val)
self.pos[val] = len(self.vals) - 1
return True
return False
def remove(self, val):
"""
Removes a value from the set.
Returns true if the set contained the specified element.
"""
if val in self.pos:
# Find the index of val
index = self.pos[val]
# Update position info before swapping elements
self.pos[self.vals[-1]] = index
# Swap val with the last element of self.vals
self.vals[index], self.vals[-1] = self.vals[-1], self.vals[index]
# Remove the last element of self.vals
self.vals.pop()
# Remove position info from self.pos
del self.pos[val]
return True
return False
def get_random(self):
"""
Get a random element from the set.
"""
return random.choice(self.vals)
| [
"kai.wang.nankai@gmail.com"
] | kai.wang.nankai@gmail.com |
9ab7d64d30b2d113f53f28daec9949ecff89d635 | 60f5b2e7916bfb2686f5e0da2a5d9031a097ce05 | /test/hlt/pytest/python/com/huawei/iotplatform/client/dto/QueryDevicesInfoInDTO.py | 651c76e969fba7bc6266b6f6f356c18de0700aae | [
"BSD-3-Clause"
] | permissive | SuYai/LiteOS_Lab | 65f1a73026331f2a78567f07955e43ee82df265f | 3c9a579cd17d730e21814efc15076351a365e5f8 | refs/heads/iot_link | 2020-11-28T18:39:02.989946 | 2020-03-02T02:09:51 | 2020-03-02T02:09:51 | 229,894,455 | 1 | 0 | BSD-3-Clause | 2020-03-05T07:49:17 | 2019-12-24T07:32:24 | C | UTF-8 | Python | false | false | 498 | py | class QueryDevicesInfoInDTO(object):
def __init__(self):
self.deviceId = None
self.select = None
self.appId = None
def getDeviceId(self):
return self.deviceId
def setDeviceId(self, deviceId):
self.deviceId = deviceId
def getSelect(self):
return self.select
def setSelect(self, select):
self.select = select
def getAppId(self):
return self.appId
def setAppId(self, appId):
self.appId = appId
| [
"xu_liqun@hoperun.com"
] | xu_liqun@hoperun.com |
1488329995fbcb36ea381a45fa7fe9f6cb83e31d | ffd5e689f88c49ab7af3554c22dc0c36301084fa | /count_the_smiley_faces.py | d6531cdadb8fdc7555b1e23aa72d792f0cf282e4 | [] | no_license | ellismckenzielee/codewars-python | 1710e6f0499047139479de386927c7dbd5f1cdf6 | af3f4b4534798a58115d0565730aae28ce87437e | refs/heads/master | 2023-08-09T13:38:40.964141 | 2023-08-01T14:45:22 | 2023-08-01T14:45:22 | 168,981,376 | 45 | 18 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | #Count the smiley faces! kata
#https://www.codewars.com/kata/583203e6eb35d7980400002a
def count_smileys(arr):
total = 0
for face in arr:
template = [[':',';'], ['-','~'], [')','D']]
if len(face) == 2:
template = template[0::2]
elif len(face) != 3:
continue
face = list(zip(face, template))
face = list(map(lambda x: x[0] in x[1], face))
total += int(all(face))
return total | [
"ellismckenzielee@gmail.com"
] | ellismckenzielee@gmail.com |
157b8295a211d8eb2ee9ad809fd14457e720b6fe | 6c151b3952a0c84765b78d82f2db64de92dcbb71 | /tests/test_save_multiple.py | 7a856cb6ed8fd387768c90d677fb147f60779b1d | [
"MIT"
] | permissive | SmirkCao/jupytext | 1e02daf54e24f600bef2ddd4f4d2b91f1c11aa79 | 3d8c5e8bffe35289dd46df04cd3529f0b8716383 | refs/heads/master | 2020-08-10T20:17:31.462179 | 2019-09-29T07:56:11 | 2019-09-29T07:56:11 | 214,413,393 | 1 | 0 | MIT | 2019-10-11T10:58:02 | 2019-10-11T10:58:02 | null | UTF-8 | Python | false | false | 3,510 | py | import os
import pytest
from nbformat.v4.nbbase import new_notebook
from nbformat.validator import NotebookValidationError
from tornado.web import HTTPError
import jupytext
from jupytext.contentsmanager import TextFileContentsManager
from jupytext.compare import compare_notebooks
from .utils import list_notebooks
@pytest.mark.parametrize('nb_file', list_notebooks(skip='66'))
def test_rmd_is_ok(nb_file, tmpdir):
nb = jupytext.read(nb_file)
tmp_ipynb = 'notebook.ipynb'
tmp_rmd = 'notebook.Rmd'
nb.metadata.setdefault('jupytext', {})['formats'] = 'ipynb,Rmd'
cm = TextFileContentsManager()
cm.root_dir = str(tmpdir)
cm.save(model=dict(type='notebook', content=nb), path=tmp_ipynb)
nb2 = jupytext.read(str(tmpdir.join(tmp_rmd)))
compare_notebooks(nb, nb2, 'Rmd')
@pytest.mark.parametrize('nb_file', list_notebooks('Rmd'))
def test_ipynb_is_ok(nb_file, tmpdir):
nb = jupytext.read(nb_file)
tmp_ipynb = 'notebook.ipynb'
tmp_rmd = 'notebook.Rmd'
cm = TextFileContentsManager()
cm.root_dir = str(tmpdir)
cm.default_jupytext_formats = 'ipynb,Rmd'
cm.save(model=dict(type='notebook', content=nb), path=tmp_rmd)
nb2 = jupytext.read(str(tmpdir.join(tmp_ipynb)))
compare_notebooks(nb, nb2)
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_py', skip='66'))
def test_all_files_created(nb_file, tmpdir):
nb = jupytext.read(nb_file)
tmp_ipynb = 'notebook.ipynb'
tmp_rmd = 'notebook.Rmd'
tmp_py = 'notebook.py'
nb.metadata['jupytext'] = {'formats': 'ipynb,Rmd,py'}
cm = TextFileContentsManager()
cm.root_dir = str(tmpdir)
cm.save(model=dict(type='notebook', content=nb), path=tmp_ipynb)
nb2 = jupytext.read(str(tmpdir.join(tmp_py)))
compare_notebooks(nb, nb2)
nb3 = jupytext.read(str(tmpdir.join(tmp_rmd)))
compare_notebooks(nb, nb3, 'Rmd')
def test_no_files_created_on_no_format(tmpdir):
tmp_ipynb = 'notebook.ipynb'
tmp_rmd = 'notebook.Rmd'
tmp_py = 'notebook.py'
cm = TextFileContentsManager()
cm.root_dir = str(tmpdir)
cm.default_jupytext_formats = ''
cm.save(model=dict(type='notebook', content=new_notebook(nbformat=4, metadata=dict())), path=tmp_ipynb)
assert not os.path.isfile(str(tmpdir.join(tmp_py)))
assert not os.path.isfile(str(tmpdir.join(tmp_rmd)))
def test_raise_on_wrong_format(tmpdir):
tmp_ipynb = str(tmpdir.join('notebook.ipynb'))
cm = TextFileContentsManager()
cm.root_dir = str(tmpdir)
with pytest.raises(HTTPError):
cm.save(path=tmp_ipynb, model=dict(
type='notebook',
content=new_notebook(nbformat=4, metadata=dict(jupytext_formats=['.doc']))))
def test_no_rmd_on_not_notebook(tmpdir):
tmp_ipynb = 'notebook.ipynb'
tmp_rmd = 'notebook.Rmd'
cm = TextFileContentsManager()
cm.root_dir = str(tmpdir)
cm.default_jupytext_formats = 'ipynb,Rmd'
with pytest.raises(HTTPError):
cm.save(model=dict(type='not notebook', content=new_notebook()), path=tmp_ipynb)
assert not os.path.isfile(str(tmpdir.join(tmp_rmd)))
def test_no_rmd_on_not_v4(tmpdir):
tmp_ipynb = 'notebook.ipynb'
tmp_rmd = 'notebook.Rmd'
cm = TextFileContentsManager()
cm.root_dir = str(tmpdir)
cm.default_jupytext_formats = 'ipynb,Rmd'
with pytest.raises(NotebookValidationError):
cm.save(model=dict(type='notebook', content=new_notebook(nbformat=3)), path=tmp_rmd)
assert not os.path.isfile(str(tmpdir.join(tmp_ipynb)))
| [
"marc.wouts@gmail.com"
] | marc.wouts@gmail.com |
079f18d1faa74d780d07da79b90885ac6a046b56 | 1d928c3f90d4a0a9a3919a804597aa0a4aab19a3 | /python/sentry/2016/4/apikey.py | 6815b0fec307ab6cbbaf52e803d90027c59c261b | [] | no_license | rosoareslv/SED99 | d8b2ff5811e7f0ffc59be066a5a0349a92cbb845 | a062c118f12b93172e31e8ca115ce3f871b64461 | refs/heads/main | 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null | UTF-8 | Python | false | false | 3,384 | py | """
sentry.models.apikey
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import six
from bitfield import BitField
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from uuid import uuid4
from sentry.db.models import (
Model, BaseManager, BoundedPositiveIntegerField, FlexibleForeignKey,
sane_repr
)
# TODO(dcramer): pull in enum library
class ApiKeyStatus(object):
ACTIVE = 0
INACTIVE = 1
class ApiKey(Model):
organization = FlexibleForeignKey('sentry.Organization', related_name='key_set')
label = models.CharField(max_length=64, blank=True, default='Default')
key = models.CharField(max_length=32, unique=True)
scopes = BitField(flags=(
('project:read', 'project:read'),
('project:write', 'project:write'),
('project:delete', 'project:delete'),
('team:read', 'team:read'),
('team:write', 'team:write'),
('team:delete', 'team:delete'),
('event:read', 'event:read'),
('event:write', 'event:write'),
('event:delete', 'event:delete'),
('org:read', 'org:read'),
('org:write', 'org:write'),
('org:delete', 'org:delete'),
('member:read', 'member:read'),
('member:write', 'member:write'),
('member:delete', 'member:delete'),
))
status = BoundedPositiveIntegerField(default=0, choices=(
(ApiKeyStatus.ACTIVE, _('Active')),
(ApiKeyStatus.INACTIVE, _('Inactive')),
), db_index=True)
date_added = models.DateTimeField(default=timezone.now)
allowed_origins = models.TextField(blank=True, null=True)
objects = BaseManager(cache_fields=(
'key',
))
class Meta:
app_label = 'sentry'
db_table = 'sentry_apikey'
__repr__ = sane_repr('organization_id', 'key')
def __unicode__(self):
return six.text_type(self.key)
@classmethod
def generate_api_key(cls):
return uuid4().hex
@property
def is_active(self):
return self.status == ApiKeyStatus.ACTIVE
def save(self, *args, **kwargs):
if not self.key:
self.key = ApiKey.generate_api_key()
super(ApiKey, self).save(*args, **kwargs)
def get_allowed_origins(self):
if not self.allowed_origins:
return []
return filter(bool, self.allowed_origins.split('\n'))
def get_audit_log_data(self):
return {
'label': self.label,
'key': self.key,
'scopes': int(self.scopes),
'status': self.status,
}
def get_scopes(self):
return [k for k, v in self.scopes.iteritems() if v]
def has_scope(self, scope):
return scope in self.scopes
class SystemKey(object):
is_active = True
organization = None
def get_allowed_origins(self):
return []
def get_audit_log_data(self):
return {
'label': 'System',
'key': '<system>',
'scopes': -1,
'status': ApiKeyStatus.ACTIVE
}
def get_scopes(self):
# All scopes!
return ApiKey.scopes
def has_scope(self, scope):
return True
ROOT_KEY = SystemKey()
| [
"rodrigosoaresilva@gmail.com"
] | rodrigosoaresilva@gmail.com |
07f0aff0ee04c49fe79e03dbf6a3da55ffd35cd2 | 7d096568677660790479d87c22b47aae838ef96b | /examples/RFSG/RFSGGettingStartedIQDevice.py | 784cee721d32221af1028ea131db6d96d4acdb74 | [
"MIT"
] | permissive | NISystemsEngineering/rfmx-pythonnet | 30adbdd5660b0d755957f35b68a4c2f60065800c | cd4f90a88a37ed043df880972cb55dfe18883bb7 | refs/heads/master | 2023-02-04T00:39:41.107043 | 2023-02-01T21:58:50 | 2023-02-01T21:58:50 | 191,603,578 | 7 | 5 | MIT | 2023-02-01T21:58:52 | 2019-06-12T16:02:32 | Python | UTF-8 | Python | false | false | 4,025 | py | import clr
import sys
import time
import os
import argparse
import csv
import numpy as np
# Argparse section
parser = argparse.ArgumentParser()
parser.add_argument('--resource', \
help="enter instrument resource name")
parser.add_argument('--trigger', default="none", \
help="enable trigger on requested terminal")
parser.add_argument('--iqrate', default=1e6, type=float, \
help="enter IQ rate")
args = parser.parse_args()
# Location of assemblies
dotNetFWDirectory = r"C:\Program Files (x86)\IVI Foundation\IVI\Microsoft.NET\Framework32"
dotNetClassLibrary = r'v4.0.30319\NationalInstruments.ModularInstruments.NIRfsg 17.1.0'
assy_path = os.path.join(dotNetFWDirectory, dotNetClassLibrary)
print(".NET Library: " + dotNetClassLibrary)
sys.path.append(assy_path)
clr.AddReference("NationalInstruments.ModularInstruments.NIRfsg.Fx40")
clr.AddReference("NationalInstruments.Common")
# Import .NET drivers
from NationalInstruments.ModularInstruments.NIRfsg import *
from NationalInstruments import PrecisionTimeSpan
from NationalInstruments import ComplexDouble
# Instrument Settings
ResourceName = args.resource # Instrument alias in MAX
IQOutCarrierFrequency = 1000.0 # FPGA DSP Frequencyshift
IQOutPortLevel = 0.5
IQOutIQRate = args.iqrate
IQOutTriggerDestination = args.trigger
# Initialize Instrument
instrSession = NIRfsg(ResourceName, True, False)
# Configure Instrument
print("Reference Clock Source: " + instrSession.FrequencyReference.Source.ToString())
instrSession.FrequencyReference.Configure(RfsgFrequencyReferenceSource.PxiClock, 10e6)
print("Reference Clock Source: " + instrSession.FrequencyReference.Source.ToString())
print("IQ Out Output Port: " + str(instrSession.Arb.OutputPort))
instrSession.Arb.OutputPort = RfsgOutputPort.IQOut
print("IQ Out Output Port: " + str(instrSession.Arb.OutputPort))
print("IQ Out Carrier Frequency: " + str(instrSession.IQOutPort.CarrierFrequency))
instrSession.IQOutPort.CarrierFrequency = IQOutCarrierFrequency
print("IQ Out Carrier Frequency: " + str(instrSession.IQOutPort.CarrierFrequency))
print("IQ Out Port Level: " + str(instrSession.IQOutPort[""].Level))
instrSession.IQOutPort[""].Level = IQOutPortLevel
print("IQ Out Port Level: " + str(instrSession.IQOutPort[""].Level))
print("IQ Out Generation Mode: " + str(instrSession.Arb.GenerationMode))
instrSession.Arb.GenerationMode = RfsgWaveformGenerationMode.ArbitraryWaveform
print("IQ Out Generation Mode: " + str(instrSession.Arb.GenerationMode))
print("IQ Out Power Level Type: " + str(instrSession.RF.PowerLevelType))
instrSession.RF.PowerLevelType = RfsgRFPowerLevelType.PeakPower
print("IQ Out Power Level Type: " + str(instrSession.RF.PowerLevelType))
print("IQ Out IQ Rate: " + str(instrSession.Arb.IQRate))
instrSession.Arb.IQRate = IQOutIQRate
print("IQ Out IQ Rate: " + str(instrSession.Arb.IQRate))
print("IQ Out isWaveformRepeatCountFinite: " + str(instrSession.Arb.IsWaveformRepeatCountFinite))
instrSession.Arb.IsWaveformRepeatCountFinite = False
print("IQ Out isWaveformRepeatCountFinite: " + str(instrSession.Arb.IsWaveformRepeatCountFinite))
print("IQ Out WaveformRepeatCount: " + str(instrSession.Arb.WaveformRepeatCount))
instrSession.Arb.WaveformRepeatCount = 1
print("IQ Out WaveformRepeatCount: " + str(instrSession.Arb.WaveformRepeatCount))
# Write DC values to I
iData = np.ones(1000)
qData = np.zeros(1000)
instrSession.Arb.WriteWaveform("wfm0", iData, qData)
if args.trigger is not "none":
print("IQ Out Export Start Trigger: " +
str(instrSession.Triggers.StartTrigger.ExportedOutputTerminal))
instrSession.Triggers.StartTrigger.ExportedOutputTerminal = \
RfsgStartTriggerExportedOutputTerminal.FromString(IQOutTriggerDestination)
print("IQ Out Export Start Trigger: " +
str(instrSession.Triggers.StartTrigger.ExportedOutputTerminal))
# Start Generation
instrSession.Initiate()
# Wait for user to stop script
input("Press Enter to continue...")
# Abort Generation
instrSession.Abort()
# Close Instrument
instrSession.Close()
| [
"sean.moore@ni.com"
] | sean.moore@ni.com |
6fc1c089abc9dba76e6b8a4df05edc5f6e2817f3 | a12bd9ee16dd864756829f2ff98f9e7ca59e322a | /07. raspberrypi/python/objectdetection-ex/tf/examples/obj-detect-api-ex/ex02.py | aa6248d3e51ee81ca5b24c2d31758f7650e2a698 | [] | no_license | hongjy127/TIL | 0bda9250f850a21dc27597a8b6288cf7ecb9e470 | a1760aba50bb3b77ab6576f5b5dcb16f0d9f5c5a | refs/heads/master | 2023-06-17T17:04:21.873930 | 2021-07-18T07:54:26 | 2021-07-18T07:54:26 | 325,415,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 908 | py | import cv2
from video import Video
from objdetect import ObjDetectApi
import time
PATH_TO_LABELS = 'data/mscoco_label_map.pbtxt'
MODEL_NAME = 'ssd_mobilenet_v1_coco_2017_11_17'
api = ObjDetectApi(MODEL_NAME, PATH_TO_LABELS)
def detect(frame):
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
# output_dict를 클라이언트로 보내기
output_dict = api.inference_image(frame_rgb)
labeled_image = api.visualize(frame_rgb, output_dict)
labeled_image = cv2.cvtColor(labeled_image, cv2.COLOR_RGB2BGR)
cv2.imshow('frame', labeled_image)
key = cv2.waitKey(1)
if key == 27:
return False
else:
return True
with Video(file="vtest.avi") as v:
# with Video(device=0) as v:
for image in v:
start_time = time.time()
if not detect(image): break
end_time = time.time()
fps = 1/(end_time-start_time)
print(f'fps: {fps}') | [
"hhhong127@gmail.com"
] | hhhong127@gmail.com |
4c4ee08f8d7ff033d096bc6f764a9a81fd99e6ce | f425902cfa5dba2058b0885ffa103f7156b6be08 | /supervised_learning/0x0A-object_detection/5-yolo.py | 53410b498a3dbc4ae2422f5c29928baaebc8730b | [] | no_license | PilarPinto/holbertonschool-machine_learning | 2a23af14c851bd3e5d1926e644525bf0ab45df84 | 74213384b0998f65e123adc146ea5e91c4d77b37 | refs/heads/master | 2022-12-22T16:29:03.960474 | 2020-10-03T22:47:51 | 2020-10-03T22:47:51 | 279,387,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,488 | py | #!/usr/bin/env python3
'''
algorithm to perform object detection
File in YOLO dataset
'''
import tensorflow.keras as K
import numpy as np
import glob
import cv2
class Yolo:
'''
Initialize the class YOLO
'''
def __init__(self, model_path, classes_path, class_t, nms_t, anchors):
'''uses the Yolo v3 algorithm to perform object detection'''
self.model = K.models.load_model(model_path)
with open(classes_path, 'r') as f:
self.class_names = f.read().splitlines()
self.class_t = class_t
self.nms_t = nms_t
self.anchors = anchors
def sigmoid(self, x):
'''Sigmoid function'''
return(1/(1 + np.exp(-x)))
def process_outputs(self, outputs, image_size):
'''Find and return the tuple of outputs in the detection process'''
boxes, box_confidences, box_class_probs = [], [], []
img_h, img_w = image_size
for ind in range(len(outputs)):
# input sizes
input_weight = self.model.input_shape[1]
input_height = self.model.input_shape[2]
grid_he = outputs[ind].shape[0]
grid_we = outputs[ind].shape[1]
anchor_boxes = outputs[ind].shape[2]
tx = outputs[ind][..., 0]
ty = outputs[ind][..., 1]
tw = outputs[ind][..., 2]
th = outputs[ind][..., 3]
c = np.zeros((grid_he, grid_we, anchor_boxes))
idx_y = np.arange(grid_he)
idx_y = idx_y.reshape(grid_he, 1, 1)
idx_x = np.arange(grid_we)
idx_x = idx_x.reshape(1, grid_we, 1)
cx = c + idx_x
cy = c + idx_y
p_w = self.anchors[ind, :, 0]
p_h = self.anchors[ind, :, 1]
# Using cx (width) cy (height) for cbounding
b_x = self.sigmoid(tx) + cx
b_y = self.sigmoid(ty) + cy
b_w = p_w * np.exp(tw)
b_h = p_h * np.exp(th)
# normalizing
bx = b_x / grid_we
by = b_y / grid_he
bw = b_w / input_weight
bh = b_h / input_height
bx1 = bx - bw / 2
by1 = by - bh / 2
bx2 = bx + bw / 2
by2 = by + bh / 2
outputs[ind][..., 0] = bx1 * img_w
outputs[ind][..., 1] = by1 * img_h
outputs[ind][..., 2] = bx2 * img_w
outputs[ind][..., 3] = by2 * img_h
boxes.append(outputs[ind][..., 0:4])
box_confidences.append(self.sigmoid(outputs[ind][..., 4:5]))
box_class_probs.append(self.sigmoid(outputs[ind][..., 5:]))
return(boxes, box_confidences, box_class_probs)
def filter_boxes(self, boxes, box_confidences, box_class_probs):
'''Filter boxes over the threshold'''
scores = []
for i in range(len(boxes)):
scores.append(box_confidences[i] * box_class_probs[i])
filter_boxes = [box.reshape(-1, 4) for box in boxes]
filter_boxes = np.concatenate(filter_boxes)
class_m = [np.argmax(box, -1) for box in scores]
class_m = [box.reshape(-1) for box in class_m]
class_m = np.concatenate(class_m)
class_scores = [np.max(box, -1) for box in scores]
class_scores = [box.reshape(-1) for box in class_scores]
class_scores = np.concatenate(class_scores)
f_mask = np.where(class_scores >= self.class_t)
filtered_boxes = filter_boxes[f_mask]
box_classes = class_m[f_mask]
box_scores = class_scores[f_mask]
return(filtered_boxes, box_classes, box_scores)
def non_max_suppression(self, filtered_boxes, box_classes, box_scores):
'''non max suppression'''
box_pred = []
box_classes_pred = []
box_scores_pred = []
u_classes = np.unique(box_classes)
for ucls in u_classes:
idx = np.where(box_classes == ucls)
bfilters = filtered_boxes[idx]
bscores = box_scores[idx]
bclasses = box_classes[idx]
pick = self._intersectionou(bfilters, self.nms_t, bscores)
filters = bfilters[pick]
scores = bscores[pick]
classes = bclasses[pick]
box_pred.append(filters)
box_classes_pred.append(classes)
box_scores_pred.append(scores)
filtered_boxes = np.concatenate(box_pred, axis=0)
box_classes = np.concatenate(box_classes_pred, axis=0)
box_scores = np.concatenate(box_scores_pred, axis=0)
return (filtered_boxes, box_classes, box_scores)
def _intersectionou(self, filtered_boxes, thresh, scores):
'''Compute intersection'''
x1 = filtered_boxes[:, 0]
y1 = filtered_boxes[:, 1]
x2 = filtered_boxes[:, 2]
y2 = filtered_boxes[:, 3]
area = (x2 - x1 + 1) * (y2 - y1 + 1)
idxs = scores.argsort()[::-1]
pick = []
while idxs.size > 0:
i = idxs[0]
pick.append(i)
xx1 = np.maximum(x1[i], x1[idxs[1:]])
yy1 = np.maximum(y1[i], y1[idxs[1:]])
xx2 = np.minimum(x2[i], x2[idxs[1:]])
yy2 = np.minimum(y2[i], y2[idxs[1:]])
w = np.maximum(0, xx2 - xx1 + 1)
h = np.maximum(0, yy2 - yy1 + 1)
inter = (w * h)
overlap = inter / (area[i] + area[idxs[1:]] - inter)
ind = np.where(overlap <= self.nms_t)[0]
idxs = idxs[ind + 1]
return pick
@staticmethod
def load_images(folder_path):
'''Load a little set of images'''
images = []
images_paths = []
for filename in glob.glob(folder_path + '/*.jpg'):
images.append(cv2.imread(filename))
images_paths.append(filename)
return(images, images_paths)
def preprocess_images(self, images):
'''Change the size of the image'''
rescale_lst, image_shapes = [], []
input_weight = self.model.input_shape[1]
input_height = self.model.input_shape[2]
for image in images:
resized = cv2.resize(image, (input_weight,
input_height),
interpolation=cv2.INTER_CUBIC)
rescale_lst.append(resized / 255)
image_shapes.append(image.shape[:2])
p_images = np.array(rescale_lst)
image_shapes = np.array(image_shapes)
return(p_images, image_shapes)
| [
"piapintoch@unal.edu.co"
] | piapintoch@unal.edu.co |
88895b7e721bf818a7dc07867690fd85b1bb77e8 | 6eea60bcbf206dafc5fe578b996267ce2bc9ae6e | /UVA/11953 - Battleships.py | 0d6e814baa4fed1595d51143f5b702dbd8328d1f | [] | no_license | SueAli/cs-problems | 491fef79f3e352d7712cd622d3b80ec15d38642b | b321116d135f868d88bd849b5ea7172feb74fb4c | refs/heads/master | 2023-08-31T10:46:30.374394 | 2023-08-24T20:14:04 | 2023-08-24T20:14:04 | 95,930,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,073 | py | # Time Complexity is O( n * n )
# Space Complexity is O( n * n )
dir_ = [(0,-1),(0,1),(-1,0),(1,0)]
def dfsFloodFill(i,j,grid, x_cnt):
if (i <0 or j <0
) or (i == len(grid)
or j == len(grid[0])
or grid[i][j]=='.' or
grid[i][j] == '#' ):
return
if grid[i][j] == 'x':
x_cnt[0] += 1
grid[i][j] = '#'
for d in dir_:
dfsFloodFill(i+d[0],j+d[1],grid, x_cnt)
while True:
try:
T = int(input())
for t in range(T):
n = int(input())
g = []
for r in range(n):
g.append(list(str(input())))
ships_cnt = 0
for ii in range(n):
for jj in range(n):
if g[ii][jj] in ['x','@']:
x_cnt = [0]
dfsFloodFill(ii,jj,g, x_cnt)
if x_cnt[0] >0:
ships_cnt += 1
print ("Case "+str(t+1)+": "+str(ships_cnt))
except:
break
| [
"souad.hassanien@gmail.com"
] | souad.hassanien@gmail.com |
9def12bb10c280f6d218ab0e4f4e022178c42c79 | 3c934c97bd5748237ac8963c8be779a7d77be629 | /NumArray.py | 7b83af17fd371ac49d4747ee093cd73e1eb4ba28 | [] | no_license | Franktian/leetcode | 2b0d0280d18e3401b9f337f027c5d70f26237f02 | 98e7852ba144cefbdb02f705651b1519155ee4d6 | refs/heads/master | 2021-06-12T15:23:09.733650 | 2020-06-17T23:09:18 | 2020-06-17T23:09:18 | 128,710,359 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | def generateSumArray(lst):
res = [0 for _ in range(len(lst) + 1)]
for i in range(len(lst)):
res[i + 1] = res[i] + lst[i]
return res | [
"tianyawen201209@hotmail.com"
] | tianyawen201209@hotmail.com |
43ec620f796bd7583adf61a5d35f884a7ed0a131 | 13c5b9fc590954a4a25b9d38e8140eb83a63c9a1 | /src/bxutils/encoding/json_encoder.py | f19ef44071daf049b5ca02db5826cf4b49fe4c99 | [
"MIT"
] | permissive | aspin/bxcommon | f746c405c693f4efb8af815cf4f9408284299e50 | 325a0844e3fc16176e90ea574eb45fff1177c527 | refs/heads/master | 2020-09-10T16:26:55.814270 | 2019-11-07T21:53:23 | 2019-11-07T21:53:23 | 221,758,675 | 0 | 0 | null | 2019-11-14T18:08:11 | 2019-11-14T18:08:10 | null | UTF-8 | Python | false | false | 2,974 | py | import json
import os
import traceback
from datetime import date, time, datetime
from enum import Enum
from inspect import istraceback
from typing import Union, Any, Iterable, Collection, Optional, Dict
from bxutils import logging
logger = logging.get_logger(__name__)
SPECIAL_ITERABLE_TYPES = (type(dict().values()), type(dict().keys()),)
def is_iterable_no_collection(o):
return isinstance(o, SPECIAL_ITERABLE_TYPES) or \
(isinstance(o, Iterable) and not isinstance(o, Collection))
def to_json(obj: Any, remove_nulls: bool = False) -> str:
if remove_nulls:
clean_dict = {}
for key, value in obj.__dict__.items():
if value:
clean_dict[key] = value
return json.dumps(clean_dict, cls=EnhancedJSONEncoder)
return json.dumps(obj, cls=EnhancedJSONEncoder)
def to_dict(obj: Any) -> Dict[str, Any]:
return EnhancedJSONEncoder().as_dict(obj)
def load_json_from_file(json_file_path: str) -> Optional[Union[list, dict]]:
node_json = None
if os.path.isfile(json_file_path):
try:
with open(json_file_path) as json_file:
node_json = json.load(json_file)
except ValueError as e:
logger.debug("Failed to parse json: %s", e)
except OSError as e:
logger.debug("Failed trying to check for a json file: %s", e)
else:
raise ValueError("Could not locate json file: %s", json_file_path)
return node_json
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
if is_iterable_no_collection(o):
o = list(o)
elif isinstance(o, (bytearray, memoryview)):
o = bytes(o)
if isinstance(o, Enum):
return str(o)
if hasattr(o, "__dict__"):
if isinstance(o.__dict__, dict):
return o.__dict__
else:
return str(o)
if isinstance(o, (date, datetime, time)):
return o.isoformat() # pyre-ignore
if isinstance(o, bytes):
try:
return o.decode("utf-8")
except UnicodeDecodeError:
return str(o)
if hasattr(o, "hexdigest"):
return o.hexdigest() # pyre-ignore
if hasattr(o, "hex_string"):
return o.hex_string() # pyre-ignore
if istraceback(o):
return "".join(traceback.format_tb(o)).strip()
return o
def _encode(self, obj):
obj = self.default(obj)
if isinstance(obj, dict):
return {self.default(self._encode(k)): self._encode(v) for k, v in obj.items()}
elif isinstance(obj, list) or isinstance(obj, set):
return [self._encode(l) for l in obj]
else:
return obj
def encode(self, o) -> str:
return super(EnhancedJSONEncoder, self).encode(self._encode(o))
def as_dict(self, obj) -> Dict[str, Any]:
return self._encode(obj)
| [
"vc.shane@gmail.com"
] | vc.shane@gmail.com |
d34717fa89b9334c16bbce45114f375f90df6212 | 5f86944bdf1b810a84c63adc6ed01bbb48d2c59a | /kubernetes/client/models/v1_host_path_volume_source.py | 3c20e047e90416302ea98ba790825974c4a38243 | [
"Apache-2.0"
] | permissive | m4ttshaw/client-python | 384c721ba57b7ccc824d5eca25834d0288b211e2 | 4eac56a8b65d56eb23d738ceb90d3afb6dbd96c1 | refs/heads/master | 2021-01-13T06:05:51.564765 | 2017-06-21T08:31:03 | 2017-06-21T08:31:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,264 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1HostPathVolumeSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, path=None):
"""
V1HostPathVolumeSource - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'path': 'str'
}
self.attribute_map = {
'path': 'path'
}
self._path = path
@property
def path(self):
"""
Gets the path of this V1HostPathVolumeSource.
Path of the directory on the host. More info: http://kubernetes.io/docs/user-guide/volumes#hostpath
:return: The path of this V1HostPathVolumeSource.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this V1HostPathVolumeSource.
Path of the directory on the host. More info: http://kubernetes.io/docs/user-guide/volumes#hostpath
:param path: The path of this V1HostPathVolumeSource.
:type: str
"""
if path is None:
raise ValueError("Invalid value for `path`, must not be `None`")
self._path = path
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1HostPathVolumeSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"mehdy@google.com"
] | mehdy@google.com |
337555d0ffdb156debb715b400abbd43a9f4ed40 | 9c5f36b72323090b9f0254938923a04b436fd3be | /main/collect_data_X.py | a37f4e8d9f31c29e69b2f9b8a93a7076b5e83221 | [] | no_license | mdlaskey/IL_ROS_HSR | 7c7233905e6a1fc8388661236bade3862da0fc90 | d12f8397249acea4fae71d12c74074314a8a005e | refs/heads/master | 2021-01-20T18:30:11.815581 | 2018-04-07T01:14:41 | 2018-04-07T01:14:41 | 90,918,344 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,777 | py | from hsrb_interface import geometry
import hsrb_interface
from geometry_msgs.msg import PoseStamped, Point, WrenchStamped
import geometry_msgs
import controller_manager_msgs.srv
import cv2
from cv_bridge import CvBridge, CvBridgeError
import IPython
from numpy.random import normal
import time
#import listener
from geometry_msgs.msg import Twist
from sensor_msgs.msg import Joy
from il_ros_hsr.core.sensors import RGBD, Gripper_Torque, Joint_Positions
from il_ros_hsr.core.joystick_X import JoyStick_X
import matplotlib.pyplot as plt
import numpy as np
import numpy.linalg as LA
from tf import TransformListener
import rospy
from il_ros_hsr.p_pi.safe_corl.com import Safe_COM as COM
from il_ros_hsr.p_pi.safe_corl.bottle_detector import Bottle_Detect
class Collect_Demos():
def __init__(self,user_name = None,inject_noise = False,noise_scale = 1.0):
self.robot = hsrb_interface.Robot()
self.noise = 0.1
self.omni_base = self.robot.get('omni_base')
self.whole_body = self.robot.get('whole_body')
self.gripper = self.robot.get('gripper')
self.tl = TransformListener()
self.cam = RGBD()
time.sleep(5)
self.b_d = Bottle_Detect(self.cam.read_info_data())
self.start_recording = False
self.stop_recording = False
self.com = COM()
if(not user_name == None):
self.com.Options.setup(self.com.Options.root_dir,user_name)
#self.com.go_to_initial_state(self.whole_body,self.gripper)
#self.whole_body.move_to_joint_positions({'head_tilt_joint':-0.3})
self.joystick = JoyStick_X(self.com,inject_noise = inject_noise,noise_scale = noise_scale)
self.torque = Gripper_Torque()
self.joints = Joint_Positions()
def proess_state(self):
img_rgb = self.cam.read_color_data()
img_depth = self.cam.read_depth_data()
cur_action,noise_action,time_pub = self.joystick.apply_control()
state = self.com.format_data(img_rgb,img_depth)
#cv2.imwrite('frame_'+str(self.count)+'.png',state[0])
#Save all data
data = {}
data['action'] = cur_action
data['color_img'] = state[0]
data['depth_img'] = state[1]
data['noise_action'] = noise_action
pose = self.whole_body.get_end_effector_pose().pos
pose = np.array([pose.x,pose.y,pose.z])
data['robot_pose'] = pose
# data['action_time'] = time_pub
# data['image_time'] = self.cam.color_time_stamped
print "ACTION AT COUNT ",self.count
print cur_action
self.count += 1
self.trajectory.append(data)
# if(LA.norm(cur_action) > 1e-3):
# print "GOT ACCEPTED"
# self.trajectory.append(data)
def check_success(self):
img_rgb = self.cam.read_color_data()
img_depth = self.cam.read_depth_data()
success = self.b_d.detect_bottle(img_rgb,img_depth)
print "BOTTLE FOUND ",success
return success
def run(self):
self.joystick.apply_control()
cur_recording = self.joystick.get_record_actions_passive()
if(cur_recording[0] < -0.1):
print "BEGIN DATA COLLECTION"
self.start_recording = True
count = 0
if(self.start_recording):
self.count = 0
self.trajectory = []
while not self.stop_recording:
#while count < 20:
if(self.cam.is_updated):
self.proess_state()
self.cam.is_updated = False
cur_recording = self.joystick.get_record_actions()
if(cur_recording[1] < -0.1):
print "END DATA COLLECTION"
self.stop_recording = True
count += 1
self.check_success()
q = input('SAVE DATA: y or n: ')
if(q == 'y'):
self.com.save_recording(self.trajectory)
self.start_recording = False
self.stop_recording = False
if __name__=='__main__':
user_name = 'corl_anne_n1/'
noise_scale = 4.0
inject_noise = True
cd = Collect_Demos(user_name,inject_noise=inject_noise,noise_scale = noise_scale)
time.sleep(5)
while True:
cd.run() | [
"mdlaskey@umich.edu"
] | mdlaskey@umich.edu |
a458fcba14a9b526fea72c863cbaf95925bb15fd | 5c5b34f6f598a43ddfbd473228737a27c26d1d8e | /contest/第 16 场双周赛/5153. 层数最深叶子节点的和.py | 9c625b10b34a8d937c7c013c9da14a932474e7f8 | [] | no_license | lovehhf/LeetCode | 34a1bc140b10dc83a32ef9a70f9c73176948a9c4 | 5d3574ccd282d0146c83c286ae28d8baaabd4910 | refs/heads/master | 2021-11-04T04:52:34.518621 | 2021-10-26T15:34:47 | 2021-10-26T15:34:47 | 173,673,492 | 0 | 0 | null | 2020-03-03T14:54:09 | 2019-03-04T04:26:15 | Python | UTF-8 | Python | false | false | 2,252 | py | # -*- coding:utf-8 -*-
"""
给你一棵二叉树,请你返回层数最深的叶子节点的和。
示例:
输入:root = [1,2,3,4,5,null,6,7,null,null,null,null,8]
输出:15
提示:
树中节点数目在 1 到 10^4 之间。
每个节点的值在 1 到 100 之间。
mid
dl的dfs解法:
class Solution {
public:
int md, res;
void dfs(TreeNode* x , int dep) {
if (x == NULL) return;
if (x->left == NULL && x->right == NULL) {
if (dep > md) {
res = 0;
md = dep;
}
if (dep == md) {
res += x->val;
}
} else {
dfs(x->left, dep + 1);
dfs(x->right, dep + 1);
}
}
int deepestLeavesSum(TreeNode* root) {
md = -1;
res = 0;
dfs(root , 0);
return res;
}
};
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
from utils.TreeNode import TreeNode
class Solution:
def deepestLeavesSum(self, root: TreeNode) -> int:
"""
bfs层次遍历
:param root:
:return:
"""
s = [0] * 10010
q = [(root, 0)]
level = 0
while q:
cur, level = q.pop(0)
if cur.left:
q.append((cur.left, level + 1))
if cur.right:
q.append((cur.right, level + 1))
if not cur.left and not cur.right:
s[level] += cur.val
return s[level]
class DFS_Solution:
"""
dfs 解法
"""
def __init__(self):
self.max_depth = 0
self.res = 0
def dfs(self, node, depth):
if not node:
return
if not node.left and not node.right:
if depth == self.max_depth:
self.res += node.val
else:
if depth + 1 > self.max_depth:
self.max_depth = depth + 1
self.res = 0
self.dfs(node.left, depth + 1)
self.dfs(node.right, depth + 1)
def deepestLeavesSum(self, root: TreeNode) -> int:
self.dfs(root, 0)
return self.res
| [
"853885165@qq.com"
] | 853885165@qq.com |
6ccb5f9dd768379c7dbc65f8f3782ebb2724ba65 | ecd4b06d5d5368b71fd72a1c2191510a03b728fd | /10 - python-data-science-toolbox-part-2/13 - write your own generator expressions.py | d2c273692fdfcb03da16650e1b4cafb4b11fe05a | [
"MIT"
] | permissive | Baidaly/datacamp-samples | 86055db5e326b59bfdce732729c80d76bf44629e | 37b4f78a967a429e0abca4a568da0eb9d58e4dff | refs/heads/master | 2022-07-27T01:18:00.700386 | 2022-07-18T19:27:23 | 2022-07-18T19:27:23 | 123,827,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,029 | py | '''
You are familiar with what generators and generator expressions are, as well as its difference from list comprehensions. In this exercise, you will practice building generator expressions on your own.
Recall that generator expressions basically have the same syntax as list comprehensions, except that it uses parentheses () instead of brackets []; this should make things feel familiar! Furthermore, if you have ever iterated over a dictionary with .items(), or used the range() function, for example, you have already encountered and used generators before, without knowing it! When you use these functions, Python creates generators for you behind the scenes.
Now, you will start simple by creating a generator object that produces numeric values.
'''
# Create generator object: result
result = (num for num in range(31))
# Print the first 5 values
print(next(result))
print(next(result))
print(next(result))
print(next(result))
print(next(result))
# Print the rest of the values
for value in result:
print(value)
| [
"daulet.urazalinov@uptake.com"
] | daulet.urazalinov@uptake.com |
1f1c0fa57670131ce843fb8fd1fff22ae434970c | 8cc862aa51d3fec95d094dc4bd3151e1155d240a | /PythonSpider/toutiao/jiepai.py | 6112f808f2392f33cd7d22605d53420e6247c8a4 | [] | no_license | activehuahua/python | bcbf3a2190025e2315399bfd0c725f598211632b | cc36a93c01c53f856426ccf2724848142524d9c0 | refs/heads/master | 2023-04-14T10:23:21.590765 | 2019-08-12T06:52:15 | 2019-08-12T06:52:15 | 160,277,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,174 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
'''
@Author : zhaojianghua
@File : jiepai.py
@Time : 2019/1/2 10:45
@desc :
街拍网址: https://www.toutiao.com/search/?keyword=%E8%A1%97%E6%8B%8D
'''
import requests
from urllib.parse import urlencode
import pprint,os
from hashlib import md5
from multiprocessing import Pool
def get_page(offset):
params={
'offset' :offset,
'format' : 'json',
'keyword' :'街拍',
'autoload' : 'true',
'count' :20 ,
'cur_tab': 1
}
url = 'https://www.toutiao.com/search_content/?' + urlencode(params)
try:
response = requests.get(url)
if response.status_code ==200:
return response.json()
except requests.ConnectionError :
return None
def get_images(json):
if json.get('data'):
for item in json.get('data'):
title= item.get('title')
images=item.get('image_list')
if images:
for image in images :
yield{
'image' : 'http:'+image.get('url'),
'title':title
}
def save_image(item):
dir= os.path.join(os.getcwd()+'/img/'+item.get('title'))
print(dir)
try:
if not os.path.exists(dir) :
os.makedirs(dir)
response =requests.get(item.get('image'))
if response.status_code == 200 :
file_path ='{0}/{1}.{2}'.format(dir,md5(response.content).hexdigest(),'jpg')
if not os.path.exists(file_path):
with open(file_path,'wb') as f :
f.write(response.content)
else :
print('Already Download ',file_path)
except requests.ConnectionError:
print('Failed to save Image')
except Exception :
pass
def main(offset):
json=get_page(offset)
for item in get_images(json):
print(item)
save_image(item)
GROUP_START =1
GROUP_END=20
if __name__ == '__main__':
pool=Pool()
groups =([x * 20 for x in range(GROUP_START,GROUP_END+1)])
pool.map(main,groups)
pool.close()
pool.join()
| [
"zhaojianghua@pretang.com"
] | zhaojianghua@pretang.com |
8c77c556534fee53c2d8b3f8323b07fa4aa34f7a | 17b9f098d783b58a65a2f4a2d51c7d1ae19285cf | /Mayordomo.py | 88dc5fde4a0f1885bfec2efdae1dc685064bf827 | [
"MIT"
] | permissive | elenajimenezm/Mayordomo | ea17a3168f25f4648910a71aece478155dffabd3 | da5e8746ee41906eb60c8626b5de2db8e111ad83 | refs/heads/master | 2021-07-25T23:32:15.382348 | 2017-11-09T22:51:38 | 2017-11-09T22:51:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,650 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import paho.mqtt.client as paho
import json
import time
import uuid
import Queue
import subprocess
import unicodedata
MQTT_SERVER = 'localhost'
MAYORDOMO_TOPIC = 'rcr/Mayordomo'
SPEAK_TOPIC = 'rcr/Speak'
PLAYER_TOPIC = 'rcr/MusicPlayer'
DRONE_TOPIC = 'rcr/RollerSpider'
DHT22_TOPIC = 'rcr/DHT22'
MINDSET_TOPIC = 'rcr/MindSet'
MAX7219_TOPIC = 'rcr/Max7219'
NOISE_TOPIC = 'rcr/Ruido'
S2_TOPIC = 'rcr/S2'
messages = Queue.Queue( 1 )
data_dht22 = None
def sendToSpeak( msg ):
global mqtt_client, SPEAK_TOPIC
mqtt_client.publish( SPEAK_TOPIC, msg )
def sendToMusicPlayer( msg ):
global mqtt_client, PLAYER_TOPIC
mqtt_client.publish( PLAYER_TOPIC, msg )
def sendToDrone( msg ):
global mqtt_client, DRONE_TOPIC
mqtt_client.publish( DRONE_TOPIC, msg )
def sendToMindSet( msg ):
global mqtt_client, MINDSET_TOPIC
mqtt_client.publish( MINDSET_TOPIC, msg )
def sendToMax7219( msg ):
global mqtt_client, MAX7219_TOPIC
mqtt_client.publish( MAX7219_TOPIC, msg )
def sendToNoise( msg ):
global mqtt_client, NOISE_TOPIC
mqtt_client.publish( NOISE_TOPIC, msg )
def sendToS2( msg ):
global mqtt_client,S2_TOPIC
mqtt_client.publish( S2_TOPIC, msg )
def mqtt_on_message( client, userdata, message ):
global messages, data_dht22
# es el dht22
if( message.topic == DHT22_TOPIC ):
data_dht22 = message.payload
return
# lon comandos para el mayordomo
# si no se ha procesado el ultimo mensaje lo eliminamos
try:
messages.get_nowait()
except Queue.Empty:
pass
# agregamos el mensaje
try:
messages.put_nowait( message )
except Queue.Full:
pass
def mqtt_on_connect( client, arg1, arg2, arg3 ):
global MAYORDOMO_TOPIC, MQTT_SERVER
client.subscribe( MAYORDOMO_TOPIC )
client.subscribe( DHT22_TOPIC )
print( "[Mayordomo] Esperando en %s - %s" % ( MQTT_SERVER, MAYORDOMO_TOPIC ) )
def main():
global mqtt_client, MQTT_SERVER, messages, data_dht22
print( '[Mayordomo] Iniciando sistema' )
subprocess.Popen( '/bin/sh ./Speak.sh', shell=True )
subprocess.Popen( '/usr/bin/python ./MusicPlayer/MusicPlayer.py', shell=True )
mqtt_client = paho.Client( 'Mayordomo-' + uuid.uuid4().hex )
mqtt_client.on_connect = mqtt_on_connect
mqtt_client.on_message = mqtt_on_message
mqtt_client.connect( MQTT_SERVER, 1883 )
mqtt_client.loop_start()
time.sleep( 2 )
sendToSpeak( ' ' )
sendToSpeak( ' Sistema inicializado' )
abort = False
while( not abort ):
message = messages.get()
# hacemos el manejo del payload que viene en utf-8 (se supone)
# la idea es cambiar tildes y otros caracteres especiales
# y llevar todo a minuscula
cmd = message.payload.decode('utf-8').lower()
cmd = ''.join((c for c in unicodedata.normalize('NFD', cmd) if unicodedata.category(c) != 'Mn'))
cmd = cmd.replace( 'mary', 'mari' )
cmd = cmd.replace( 'detener', 'deten' )
cmd = cmd.replace( 'tocar', 'toca' )
cmd = cmd.replace( 'pausar', 'pausa' )
cmd = cmd.replace( 'iniciar', 'inicia' )
cmd = cmd.replace( 'finalizar', 'finaliza' )
cmd = cmd.replace( 'mostrar', 'muestra' )
cmd = cmd.replace( 'robots', 'robot' )
cmd = cmd.replace( 'conectar', 'conecta' )
cmd = cmd.replace( 'desconectar', 'desconecta' )
print( "[Mayordomo] Mensaje recibido:", message.payload, "<<" + cmd + ">>" )
# locales
if( cmd == 'finaliza sistema' ):
abort = True
elif( cmd == 'mari' ):
sendToSpeak( 'Dime Padre' )
elif( cmd == 'que hora es' ):
now = time.localtime()
sendToSpeak( 'son las %d horas con %d minutos' % (now.tm_hour, now.tm_min) )
elif( cmd == 'conversemos' ):
now = time.localtime()
sendToSpeak( 'de que deseas conversar?' )
# MusicPlayer
elif( cmd == 'toca musica' ):
sendToMusicPlayer( 'play' )
elif( cmd == 'deten musica' ):
sendToMusicPlayer( 'stop' )
elif( cmd == 'pausa musica' ):
sendToMusicPlayer( 'pause' )
elif( cmd == 'tema siguiente' ):
sendToMusicPlayer( 'next' )
elif( cmd == 'tema anterior' ):
sendToMusicPlayer( 'previous' )
elif( cmd == 'quien canta' ):
sendToMusicPlayer( 'songtitle' )
# DroneRollerSpider
elif( cmd == 'inicia spider' ):
subprocess.Popen( '/usr/bin/python ./DroneRollerSpider/DroneRollerSpider.py', shell=True )
elif( cmd == 'finaliza spider' ):
sendToDrone( 'exit' )
elif( cmd == 'conecta spider' ):
sendToDrone( 'connect' )
elif( cmd == 'desconecta spider' or cmd =='desconectar spyder' ):
sendToDrone( 'disconnect' )
elif( cmd == 'sube spider' ):
sendToDrone( 'takeoff' )
elif( cmd == 'baja spider' ):
sendToDrone( 'land' )
elif( cmd == 'gira spider' ):
for i in range( 10 ):
sendToDrone( 'turn_left' )
time.sleep( 0.100 )
# MindSet
elif( cmd == 'inicia sensor neuronal' ):
subprocess.Popen( '/usr/bin/python ./MindSet/MindSetPub.py', shell=True )
subprocess.Popen( '/usr/bin/python ./MindSet/MindSetGraphics.py', shell=True )
subprocess.Popen( '/usr/bin/python ./MindSet/MindSetMusic.py', shell=True )
elif( cmd == 'finaliza sensor neuronal' ):
sendToMindSet( 'exit' )
# DHT22
elif( cmd == 'temperatura' ):
if( data_dht22 == None ):
sendToSpeak( 'No tengo datos de temperatura' )
else:
d = data_dht22
d = json.loads( d )
sendToSpeak( 'La Temperatura es de %3.1f grados' % ( d["temperatura"] ) )
elif( cmd == 'humedad' ):
if( data_dht22 == None ):
sendToSpeak( 'No tengo datos de humedad' )
else:
d = data_dht22
d = json.loads( d )
sendToSpeak( 'La humedad es de un %3.1f por ciento' % ( d["humedad"] ) )
# Max72129
elif( cmd.startswith( 'muestra ' ) and len( cmd ) == 9 ):
try:
digit = int( cmd[8] )
sendToSpeak( "Mostrando un %d en la matriz" % digit )
sendToMax7219( str( digit ) )
except Exception as e:
pass
# Sensor de ruido
elif( cmd == 'inicia analisis de ruido' ):
subprocess.Popen( '/usr/bin/python ./Noise/NoiseGraphics.py', shell=True )
elif( cmd == 'finaliza analisis de ruido' ):
sendToNoise( 'exit' )
# robot S2
elif( cmd == 'inicia control de robot' ):
subprocess.Popen( '/usr/bin/python ./S2/S2.py', shell=True )
elif( cmd == 'nombre de robot' ):
sendToS2( 'name' )
elif( cmd == 'robot izquierda' ):
sendToS2( 'left 1' )
elif( cmd == 'robot derecha' ):
sendToS2( 'right 1' )
elif( cmd == 'robot avanza' ):
sendToS2( 'forward 5' )
elif( cmd == 'robot retrocede' ):
sendToS2( 'backward 5' )
elif( cmd == 'finaliza control de robot' ):
sendToS2( 'exit' )
sendToSpeak( 'Sistema finalizado' )
time.sleep( 2 )
mqtt_client.loop_stop()
print( '[Mayordomo] Sistema finalizado' )
#--
main()
| [
"titos.carrasco@gmail.com"
] | titos.carrasco@gmail.com |
f8849681bf0f73b561cd28da56bec644274b35b2 | 0e99d2efff685a66869d5a7cd4a68de8955f498c | /baseproblems/maxSerise.py | 59e500dd6acc33c4ffc189b1fcf29faf76b97e71 | [] | no_license | supercp3/code_leetcode | f303109c70ccdd0baa711cf606d402158b212525 | 1dc6260e229a012111ec4d5e60071c2458ce5002 | refs/heads/master | 2020-03-26T11:33:28.741405 | 2018-10-15T02:18:24 | 2018-10-15T02:18:24 | 144,848,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | '''
最大连续子序列的和
'''
def maxlist(data):
length=len(data)
maxnum=0
for i in range(length):
subnum=0
for j in range(i,length):
subnum+=data[j]
if subnum>maxnum:
maxnum=subnum
return maxnum
if __name__=="__main__":
data=[1,2,3,-2,3,-10,3]
res=maxlist(data)
print(res) | [
"13281099@bjtu.edu.cn"
] | 13281099@bjtu.edu.cn |
6ba0d69d629dad7ff9c362d2aaa88f0ed544dfe5 | 2fe1cc0cca927276a1f936e57c6427aa4210265a | /flasky/app/__init__.py | 33d1d993450feb757a5d81cf9576e679b99e4a20 | [] | no_license | D-Mbithi/flask-web-development-code | 57b29488f87ff76a0775f16965f8df906d517b5f | 98d24e498372be74a17b7451b46ed1bb22093a8d | refs/heads/master | 2022-12-12T17:13:11.337107 | 2019-12-02T16:42:47 | 2019-12-02T16:42:47 | 225,421,531 | 0 | 0 | null | 2022-09-16T18:14:07 | 2019-12-02T16:36:23 | JavaScript | UTF-8 | Python | false | false | 974 | py | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_debugtoolbar import DebugToolbarExtension
from config import config
bootstrap = Bootstrap()
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
moment = Moment()
db = SQLAlchemy()
mail = Mail()
toolbar = DebugToolbarExtension()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
login_manager.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
toolbar.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
return app
| [
"jonas@devlieghere.com"
] | jonas@devlieghere.com |
e47367f80b9ab84975e9b2f5f3ecdfcd1a28d9e8 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_135/562.py | 3640e88ff78b2a491484cb555a0a0c269267b59a | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,695 | py | from sys import *
from heapq import *
from time import time
from multiprocessing import Pool
from collections import *
import itertools
from copy import deepcopy
from bisect import *
setrecursionlimit(10000)
from math import *
def readint():
return int(fi.readline())
def readints():
return [int(X) for X in fi.readline().split()]
def readstr():
return fi.readline().rstrip()
def read_case():
a1 = readint()
g1 = [readints() for X in range(4)]
a2 = readint()
g2 = [readints() for X in range(4)]
return (a1, g1, a2, g2)
def solve_case(a1, g1, a2, g2):
p1 = set(g1[a1-1])
p2 = set(g2[a2-1])
p = p1 & p2
if len(p) == 0: return "Volunteer cheated!"
elif len(p) > 1: return "Bad magician!"
else: return min(p)
def print_solution(case):
val = solve_case(*case[1])
msg = "Case #{}: {}".format(case[0], val)
print msg
return msg
t0 = time()
# Initialisation here
t1 = time()
print "Intialisation took %f seconds" % (t1 - t0)
# raw_input("Press enter when the input file has been downloaded: ")
if __name__ == '__main__':
fni = "%s-%s-%s.in" % (argv[1], argv[2], argv[3])
fno = "%s-%s-%s.out" % (argv[1], argv[2], argv[3])
fi = open(fni, 'r')
fo = open(fno, 'w')
numcases = readint()
cases = [(I, read_case()) for I in range(1,1+numcases)]
mt = False
if mt:
print "Running multi-threaded"
p = Pool(8)
fo.write('\n'.join(p.map(print_solution, cases)))
else:
print "Running single-threaded"
fo.write('\n'.join(map(print_solution, cases)))
print "Elapsed time %f seconds " % (time() - t1)
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
2f6984f86dbb761041f432d70be05ec86d3e84f6 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_12_01/aio/operations/_domain_registration_provider_operations.py | 1c663f674b0cbfd5b8e1d5930de10fb86e0d22d9 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 5,710 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._domain_registration_provider_operations import build_list_operations_request
from .._vendor import MixinABC
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DomainRegistrationProviderOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.web.v2020_12_01.aio.WebSiteManagementClient`'s
:attr:`domain_registration_provider` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_operations(
self,
**kwargs: Any
) -> AsyncIterable[_models.CsmOperationCollection]:
"""Implements Csm operations Api to exposes the list of available Csm Apis under the resource
provider.
Implements Csm operations Api to exposes the list of available Csm Apis under the resource
provider.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CsmOperationCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.CsmOperationCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.CsmOperationCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_operations_request(
api_version=api_version,
template_url=self.list_operations.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_operations_request(
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("CsmOperationCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_operations.metadata = {'url': "/providers/Microsoft.DomainRegistration/operations"} # type: ignore
| [
"noreply@github.com"
] | kurtzeborn.noreply@github.com |
de6926e063452533074ff6429ef970fc7829e000 | 035730cf12c43f59b76d9809e444b9070c3e5732 | /BOJ_16197.py | 8b217b46d4491a74e861471fd3f2c11ac628f378 | [] | no_license | kimhaggie/Coding_practice | e18153838425874b80a683094369a6dfb8836c93 | a4f2732e5d7a63adae990226073333b88324765a | refs/heads/master | 2023-08-01T11:33:54.071564 | 2021-09-07T14:40:56 | 2021-09-07T14:40:56 | 310,264,349 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,333 | py | #16197
import sys
from collections import deque
import math
dx = [1,-1,0,0]
dy = [0,0,1,-1]
def move(direction,ball1,ball2,m):
N=len(m)
M=len(m[0])
if direction==0:#오른쪽
y1,x1 = ball1[0],ball1[1]+1
y2,x2 = ball2[0],ball2[1]+1
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
x1 = ball1[1]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
x2 = ball2[1]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
if direction==1:#왼쪽
y1,x1 = ball1[0],ball1[1]-1
y2,x2 = ball2[0],ball2[1]-1
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
x1 = ball1[1]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
x2 = ball2[1]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
if direction==2:#위쪽
y1,x1 = ball1[0]-1,ball1[1]
y2,x2 = ball2[0]-1,ball2[1]
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
y1 = ball1[0]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
y2 = ball2[0]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
if direction==3:#아래쪽
y1,x1 = ball1[0]+1,ball1[1]
y2,x2 = ball2[0]+1,ball2[1]
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
y1 = ball1[0]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
y2 = ball2[0]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
def BFS(ball1,ball2,m):
target = [[ball1,ball2]]
step = 1
while target:
if step==11:
print(-1)
sys.exit()
new_target = []
while target:
cur = target.pop()
cur1 = cur[0]
cur2 = cur[1]
for idx in range(4):
result = move(idx,cur1,cur2,m)
if (result[0] and not result[1]) or (not result[0] and result[1]):
print(step)
sys.exit()
if result[2][0]!=result[2][1] and not result[0] and not result[1]:
if not result[2] in new_target:
new_target.append(result[2])
target = new_target
step+=1
N,M = map(int,sys.stdin.readline().rstrip('\n').split(' '))
m = []
for _ in range(N):
m.append(list(sys.stdin.readline().rstrip('\n')))
ball1 = []
ball2 = []
for i in range(N):
for j in range(M):
if m[i][j] == 'o':
if len(ball1)==0:
ball1 = [i,j]
else:
ball2 = [i,j]
BFS(ball1,ball2,m) | [
"kimhaggie@gmail.com"
] | kimhaggie@gmail.com |
3f2d538e2a917016702fee2504b2099156eb05df | eeeb3e85de712e71417630035417e6cf0d3a1da4 | /LSTM-GRU/gen-queries.py | df92915b1371e5e6b9078642d04d5215b7a98769 | [] | no_license | VNGResearch/RNN---QA | 21029647940cac97d628b8d5a25ae68dcbd226b7 | dc54c5d99e2a56cc981d53d89e0fcdaf6804dba9 | refs/heads/master | 2020-04-05T10:39:58.896752 | 2017-07-21T01:27:24 | 2017-07-21T01:27:24 | 81,531,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 776 | py | from argparse import ArgumentParser
import json
import numpy as np
parser = ArgumentParser()
parser.add_argument('-l', '--length', help='The number of generated queries. Defaults to 100',
dest='len', default=100, type=int)
parser.add_argument('-o', help='The output file. Defaults to queries.txt',
dest='outfile', default='queries.txt')
args = parser.parse_args()
infile = "data/nfL6.json"
with open(infile, 'r') as fi:
data = json.load(fi)
fi.close()
questions = []
for qa in data:
questions.append(qa['question'])
questions = np.random.permutation(questions)[:args.len]
print('Questions generated:\n')
print('\n'.join(questions))
with open(args.outfile, 'wt') as fo:
fo.write('\n'.join(questions))
fo.close()
| [
"phan.ngoclan58@gmail.com"
] | phan.ngoclan58@gmail.com |
462cf61c1760e9be0261bc1c37b152eabaa6e850 | a66460a46611483dfbdc94c7996893f427e60d97 | /ansible/my_env/lib/python2.7/site-packages/ansible/module_utils/network/onyx/onyx.py | ad667e74dfd188880633b7c22c46ebaf4554c496 | [
"MIT"
] | permissive | otus-devops-2019-02/yyashkin_infra | 06b57807dde26f94f501828c07503d6bf1d70816 | 0cd0c003884155ac922e3e301305ac202de7028c | refs/heads/master | 2020-04-29T02:42:22.056724 | 2019-05-15T16:24:35 | 2019-05-15T16:24:35 | 175,780,718 | 0 | 0 | MIT | 2019-05-15T16:24:36 | 2019-03-15T08:37:35 | HCL | UTF-8 | Python | false | false | 6,984 | py | # -*- coding: utf-8 -*-
#
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.module_utils.network.common.utils import to_list, EntityCollection
_DEVICE_CONFIGS = {}
_CONNECTION = None
_COMMAND_SPEC = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
def get_connection(module):
global _CONNECTION
if _CONNECTION:
return _CONNECTION
_CONNECTION = Connection(module._socket_path)
return _CONNECTION
def to_commands(module, commands):
if not isinstance(commands, list):
raise AssertionError('argument must be of type <list>')
transform = EntityCollection(module, _COMMAND_SPEC)
commands = transform(commands)
return commands
def run_commands(module, commands, check_rc=True):
connection = get_connection(module)
commands = to_commands(module, to_list(commands))
responses = list()
for cmd in commands:
out = connection.get(**cmd)
responses.append(to_text(out, errors='surrogate_then_replace'))
return responses
def get_config(module, source='running'):
conn = get_connection(module)
out = conn.get_config(source)
cfg = to_text(out, errors='surrogate_then_replace').strip()
return cfg
def load_config(module, config):
try:
conn = get_connection(module)
conn.edit_config(config)
except ConnectionError as exc:
module.fail_json(msg=to_text(exc))
def _parse_json_output(out):
out_list = out.split('\n')
first_index = 0
opening_char = None
lines_count = len(out_list)
while first_index < lines_count:
first_line = out_list[first_index].strip()
if not first_line or first_line[0] not in ("[", "{"):
first_index += 1
continue
opening_char = first_line[0]
break
if not opening_char:
return "null"
closing_char = ']' if opening_char == '[' else '}'
last_index = lines_count - 1
found = False
while last_index > first_index:
last_line = out_list[last_index].strip()
if not last_line or last_line[0] != closing_char:
last_index -= 1
continue
found = True
break
if not found:
return opening_char + closing_char
return "".join(out_list[first_index:last_index + 1])
def show_cmd(module, cmd, json_fmt=True, fail_on_error=True):
if json_fmt:
cmd += " | json-print"
conn = get_connection(module)
command_obj = to_commands(module, to_list(cmd))[0]
try:
out = conn.get(**command_obj)
except ConnectionError:
if fail_on_error:
raise
return None
if json_fmt:
out = _parse_json_output(out)
try:
cfg = json.loads(out)
except ValueError:
module.fail_json(
msg="got invalid json",
stderr=to_text(out, errors='surrogate_then_replace'))
else:
cfg = to_text(out, errors='surrogate_then_replace').strip()
return cfg
def get_interfaces_config(module, interface_type, flags=None, json_fmt=True):
cmd = "show interfaces %s" % interface_type
if flags:
cmd += " %s" % flags
return show_cmd(module, cmd, json_fmt)
def show_version(module):
return show_cmd(module, "show version")
def get_bgp_summary(module):
cmd = "show running-config protocol bgp"
return show_cmd(module, cmd, json_fmt=False, fail_on_error=False)
class BaseOnyxModule(object):
ONYX_API_VERSION = "3.6.6000"
def __init__(self):
self._module = None
self._commands = list()
self._current_config = None
self._required_config = None
self._os_version = None
def init_module(self):
pass
def load_current_config(self):
pass
def get_required_config(self):
pass
def _get_os_version(self):
version_data = show_version(self._module)
return self.get_config_attr(
version_data, "Product release")
# pylint: disable=unused-argument
def check_declarative_intent_params(self, result):
return None
def _validate_key(self, param, key):
validator = getattr(self, 'validate_%s' % key)
if callable(validator):
validator(param.get(key))
def validate_param_values(self, obj, param=None):
if param is None:
param = self._module.params
for key in obj:
# validate the param value (if validator func exists)
try:
self._validate_key(param, key)
except AttributeError:
pass
@classmethod
def get_config_attr(cls, item, arg):
return item.get(arg)
@classmethod
def get_mtu(cls, item):
mtu = cls.get_config_attr(item, "MTU")
mtu_parts = mtu.split()
try:
return int(mtu_parts[0])
except ValueError:
return None
def _validate_range(self, attr_name, min_val, max_val, value):
if value is None:
return True
if not min_val <= int(value) <= max_val:
msg = '%s must be between %s and %s' % (
attr_name, min_val, max_val)
self._module.fail_json(msg=msg)
def validate_mtu(self, value):
self._validate_range('mtu', 1500, 9612, value)
def generate_commands(self):
pass
def run(self):
self.init_module()
result = {'changed': False}
self.get_required_config()
self.load_current_config()
self.generate_commands()
result['commands'] = self._commands
if self._commands:
if not self._module.check_mode:
load_config(self._module, self._commands)
result['changed'] = True
failed_conditions = self.check_declarative_intent_params(result)
if failed_conditions:
msg = 'One or more conditional statements have not been satisfied'
self._module.fail_json(msg=msg,
failed_conditions=failed_conditions)
self._module.exit_json(**result)
@classmethod
def main(cls):
app = cls()
app.run()
| [
"theyashkins@gmail.com"
] | theyashkins@gmail.com |
419490d9be79e02a31e8b3e89e0ac816c5f69f66 | 58141d7fc37854efad4ad64c74891a12908192ed | /setup/delete_queue.py | 8061e4d967956926ae2cb113935d91110889b166 | [] | no_license | stanleylio/fishie | b028a93b2093f59a8ceee4f78b55a91bb1f69506 | 0685045c07e4105934d713a0fd58c4bc28821ed6 | refs/heads/master | 2022-08-14T13:08:55.548830 | 2022-07-29T01:32:28 | 2022-07-29T01:32:28 | 30,433,819 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | import sys,pika,argparse
from os.path import expanduser
sys.path.append(expanduser('~'))
from cred import cred
parser = argparse.ArgumentParser(description='')
parser.add_argument('queue_name',metavar='daq',type=str,
help='name of queue to be deleted')
args = parser.parse_args()
print(args.queue_name)
credentials = pika.PlainCredentials('nuc',cred['rabbitmq'])
connection = pika.BlockingConnection(pika.ConnectionParameters('localhost',5672,'/',credentials))
channel = connection.channel()
channel.queue_delete(queue=args.queue_name)
connection.close()
| [
"stanleylio@gmail.com"
] | stanleylio@gmail.com |
23f114695e9b28063697275cac18aad4d1d253a4 | c0681769775e760d9ecf10e5803a26046bc7f45c | /Doctor/migrations/0011_remove_session_doctor.py | d7fa30395af0b3f7e22b3f7c8e70efc8a8d6ad5d | [] | no_license | enasmohmed/DoctorAPI | 34545ea4c36308363d358a493356271ac9a316ba | 2179f691243f418a48bc1d12d1a1dba7779dbcc2 | refs/heads/main | 2023-02-14T17:40:28.787673 | 2021-01-14T01:16:18 | 2021-01-14T01:16:18 | 329,144,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | # Generated by Django 3.1.5 on 2021-01-13 23:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Doctor', '0010_auto_20210113_2318'),
]
operations = [
migrations.RemoveField(
model_name='session',
name='doctor',
),
]
| [
"enasm2477@gmail.com"
] | enasm2477@gmail.com |
22fac5fbea4918dcebbfee98f0d3cea8e13e2d5b | a54007706a09b387690f79fd7ffd889decad42f1 | /day04/code/20_默认参数易错点.py | 9a0ab1a389c3755a35c8877d74e819a5746b3dac | [] | no_license | lvah/201903python | d425534544a1f91e5b80b5ff0de5ca34037fe6e9 | 1415fcb7697dfa2884d94dcd8963477e12fe0624 | refs/heads/master | 2020-07-06T16:45:37.882819 | 2019-09-08T10:13:07 | 2019-09-08T10:13:07 | 203,082,401 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py |
# 一定要注意: 默认参数的默认值一定是不可变参数;
def listOperator(li=None):
"""
对于原有的列表后面追加元素‘End’
:return:
"""
if li is None: # is, ==
li = []
li.append('End')
return li
# print(listOperator([1, 2, 3]))
# print(listOperator([]))
# print(listOperator([]))
print(listOperator())
print(listOperator())
print(listOperator()) | [
"root@foundation0.ilt.example.com"
] | root@foundation0.ilt.example.com |
d10c2dba1cb876d1b31f3b6cb6ede0f7fa374b75 | e2c5dd7020b0613852bf16606d6de48159f56a7e | /ensemble/test/test_gbdt.py | ee73d36ba89bb65b78540a47b1a1c3e847d0603c | [] | no_license | bannima/MachineLearninginAction | 7830cb6bcda43e62e937b86198e4e5bbeef28275 | 872a24e25f8ef7768b2f8b37c9d1ab39504f0c33 | refs/heads/master | 2021-08-08T08:15:43.403854 | 2021-07-25T09:37:37 | 2021-07-25T09:37:37 | 238,710,052 | 23 | 5 | null | null | null | null | UTF-8 | Python | false | false | 907 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
FileName: test_gbdt.py
Description:
Author: Barry Chow
Date: 2020/3/3 4:15 PM
Version: 0.1
"""
from numpy import *
from sklearn.datasets import load_iris
from ensemble import GradientBoostingClassifier
from ensemble import GradientBoostingRegressor
from utils import accuracy_score, mean_square_error
iris = load_iris()
class Test_GBDT(object):
def test_gbregressor(self):
gbregressor = GradientBoostingRegressor()
gbregressor.fit(mat(iris.data), mat(iris.target))
preds = gbregressor.predict(mat(iris.data))
assert mean_square_error(preds, iris.target) < 5e-2
def test_gbclassifier(self):
gbclassifier = GradientBoostingClassifier()
gbclassifier.fit(mat(iris.data), mat(iris.target))
preds = gbclassifier.predict(mat(iris.data))
assert accuracy_score(preds, iris.target) > 0.95
| [
"zhouenguo@163.com"
] | zhouenguo@163.com |
829f59b5185126d6c05e5124829bd71de947201b | b661499ebc0d9102c6516cdb1fc9902858fc1a50 | /src/core/parametrisable.py | 184af0a2c7c2262cbc75b7f1c6cf6ee6a05b594c | [] | no_license | wesselb/cgpcm | 5469239fd912f8d3a72ab1202a913ebaa3098960 | d96263f6ad338759aadb178cf1b24bcbf0a738c5 | refs/heads/master | 2021-01-20T14:34:16.489945 | 2017-04-26T03:44:03 | 2017-04-26T03:44:03 | 82,757,924 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | class Parametrisable(object):
"""
Class where keywords given to the constructor become attributes.
Required parameters can be specified through overriding `_required_pars`.
"""
_required_pars = []
def __init__(self, **pars):
# Verify that all required parameters are specified
if not all(k in pars for k in self._required_pars):
unspecified_pars = [k for k in self._required_pars
if k not in pars]
formatted = ', '.join('"{}"'.format(k) for k in unspecified_pars)
raise RuntimeError('must specify {}'.format(formatted))
# Set parameters
for k, v in pars.items():
setattr(self, k, v)
self._pars = pars
| [
"wessel.p.bruinsma@gmail.com"
] | wessel.p.bruinsma@gmail.com |
54abfd415e752c13460f79a21713413b9ab14fcc | abc89af3d78537266421803072727561111a9b2f | /rename.py | 9d4fab667a2c8f11f4e56f3abebe0b0414e633d6 | [] | no_license | Iverance/leetcode | 41b34ff847d77dfa84a0f8f656889b9d4bf125d7 | 0127190b27862ec7e7f4f2fcce5ce958d480cdac | refs/heads/master | 2021-01-22T02:28:39.498315 | 2017-12-23T01:36:51 | 2017-12-23T01:36:51 | 102,247,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | import os
for filename in os.listdir("."):
if filename[0] in '0123456789':
if '.' in filename[:2]:
#print('00'+filename)
os.rename(filename, '00'+filename)
elif '.' in filename[:3]:
#print('0'+filename)
os.rename(filename, '0'+filename)
| [
"jeremhh@gmail.com"
] | jeremhh@gmail.com |
6bedc9f9b3637f1ae471d084de0b925ba9c41d68 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /dockerized-gists/35c3d539cd5a30aaf580/snippet.py | 0cd09dc0b9c464eea7925d4248aad941e0a53623 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 2,720 | py | # this is how to highlight code for powerpoint: `pbpaste | highlight --syntax=py -O rtf | pbcopy`
# api_jsonify()
from decimal import Decimal
from uuid import UUID
from datetime import datetime
from flask import Response
from json import JSONEncoder, dumps
class JsonApiEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, Decimal) or isinstance(o, UUID):
return str(o)
if isinstance(o, datetime):
return o.isoformat()
if hasattr(o, 'api_dump'):
return o.api_dump()
return JSONEncoder.default(self, o)
def api_dumps(obj):
encoder_config = dict(cls=JsonApiEncoder, sort_keys=True,
indent=4)
return dumps(obj, **encoder_config)
# role_aware_api_jsonify()
class RoleAwareEncoder(JsonApiEncoder):
def default(self, o):
if isinstance(o, User):
base = o.api_dump()
base['_can_update'] = can_update_user(o)
base['_can_send_funds'] = can_send_funds_to_user(o)
return base
else:
return super().default(o)
def role_aware_dumps(obj):
encoder_config = dict(cls=RoleAwareEncoder, sort_keys=True,
indent=4)
return json.dumps(obj, **encoder_config)
def role_aware_jsonify(obj):
return Response(role_aware_dumps(obj), mimetype='application/json')
def api_jsonify(obj):
return Response(api_dumps(obj), mimetype='application/json')
# validation
card_funding_event_schema = {
"properties": {
"user_uuid": St.uuid,
"amount": St.dollar_value,
},
"required": ["user_uuid", "amount"],
}
@bp.route("/card_funding_events", methods=["POST"])
@validate(card_funding_event_schema)
def new_funding_event_for_user():
user = get_user(key=validated['user_uuid'])
fe = CardFundingEvent()
fe.card = user.card
fe.load_amount = Decimal(validated['amount'])
db.session.add(fe)
db.session.commit()
return role_aware_jsonify(fe)
# how it works
validated = LocalProxy(lambda: g.validated)
def check_schema(schema):
jsonschema.Draft4Validator.check_schema(schema)
def validate(schema):
check_schema(schema)
def get_errors(data):
v = jsonschema.Draft4Validator(schema)
return sorted(v.iter_errors(data), key=lambda e: e.path)
def validate_payload(data):
errors = get_errors(data)
if len(errors) > 0:
raise ValidationException(errors)
return data
def validate_decorator(fn):
@wraps(fn)
def wrapped(*args, **kwargs):
g.validated = validate_payload(request.json)
return fn(*args, **kwargs)
return wrapped
return validate_decorator
| [
"gistshub@gmail.com"
] | gistshub@gmail.com |
52512e9d370a51bcf092d65fc589ab94b7004799 | 51036d0ef641fc455eb643e3a6b942136b20e697 | /rdmo/conditions/tests/test_commands.py | a41483e4986a3d63a27ed948f34598a93c75d051 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause"
] | permissive | ItsNotYou/rdmo | 43ab02a340ae6c6f50e19cd728f5c1311117abfc | eba2056b376107e817a4080fc12245095a907429 | refs/heads/master | 2022-12-02T04:18:48.117562 | 2020-08-16T10:13:11 | 2020-08-16T10:13:11 | 287,918,765 | 0 | 0 | Apache-2.0 | 2020-08-16T10:10:19 | 2020-08-16T10:10:18 | null | UTF-8 | Python | false | false | 374 | py | import io
import os
from django.core.management import call_command
def test_import(db, settings):
xml_file = os.path.join(settings.BASE_DIR, 'xml', 'conditions.xml')
stdout, stderr = io.StringIO(), io.StringIO()
call_command('import', xml_file, '--user=user', stdout=stdout, stderr=stderr)
assert not stdout.getvalue()
assert not stderr.getvalue()
| [
"mail@jochenklar.de"
] | mail@jochenklar.de |
67c201f4428c123d2ed9ddac81f5d2a0ab16168b | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2791/60688/288720.py | fc2b23ce74d5ae4260c05243b1fa88e6f9d45963 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | times=int(input())
numslist=input().split(" ")
numslist=list(int(a) for a in numslist)
first=0
second=""
for i in range(0,times):
if(numslist[i]==1):
first=first+1
if(i!=0):
second=second+str(numslist[i-1])+" "
second=second+str(numslist[times-1])
print(first)
print(second) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
667a48100b02d4fc5b11046794c639f95c081a88 | 78537ca73fd61c5d8fd6cbb326d4ba95eadc7219 | /CircuitPython_on_Linux_and_Raspberry_Pi/PWM_motor_servo_control.py | 06f9a6ad4a2b9e7e04c7ee055bc8db5a116658ed | [
"MIT"
] | permissive | FoamyGuy/Adafruit_Learning_System_Guides | a31f7c5ef49125ad25e5bdc4d0e50aa43513ce82 | 6cb04635ce47d2292a2ea09d196fbffdad534168 | refs/heads/master | 2023-08-16T21:34:30.856046 | 2020-11-10T00:51:16 | 2020-11-10T00:51:16 | 254,224,051 | 1 | 0 | MIT | 2020-11-10T00:51:18 | 2020-04-08T23:32:18 | C | UTF-8 | Python | false | false | 498 | py | import time
import board
import pulseio
from adafruit_motor import servo
# create a PWMOut object on Pin D5.
pwm = pulseio.PWMOut(board.D5, duty_cycle=2 ** 15, frequency=50)
# Create a servo object.
servo = servo.Servo(pwm)
while True:
for angle in range(0, 180, 5): # 0 - 180 degrees, 5 degrees at a time.
servo.angle = angle
time.sleep(0.05)
for angle in range(180, 0, -5): # 180 - 0 degrees, 5 degrees at a time.
servo.angle = angle
time.sleep(0.05)
| [
"kattni@adafruit.com"
] | kattni@adafruit.com |
51c0bc39cdee200e82b120630c333d4307f1c756 | 61efd764ae4586b6b2ee5e6e2c255079e2b01cfc | /azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/virtual_network_peering.py | 610944b9edf6986e9ce86ed411f1f8f342ddc79b | [
"MIT"
] | permissive | AutorestCI/azure-sdk-for-python | a3642f53b5bf79d1dbb77851ec56f4cc0c5b3b61 | 60b0726619ce9d7baca41f6cd38f741d74c4e54a | refs/heads/master | 2021-01-21T02:23:59.207091 | 2018-01-31T21:31:27 | 2018-01-31T21:31:27 | 55,251,306 | 4 | 3 | null | 2017-11-13T17:57:46 | 2016-04-01T17:48:48 | Python | UTF-8 | Python | false | false | 4,677 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:param id: Resource ID.
:type id: str
:param allow_virtual_network_access: Whether the VMs in the linked virtual
network space would be able to access all the VMs in local Virtual network
space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs
in the remote virtual network will be allowed/disallowed.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote
virtual networking to link to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual
network. If the flag is set to true, and allowGatewayTransit on remote
peering is also true, virtual network will use gateways of remote virtual
network for transit. Only one peering can have this flag set to true. This
flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual
network. The remote virtual network can be in the same or different region
(preview). See here to register for the preview and learn more
(https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering).
:type remote_virtual_network:
~azure.mgmt.network.v2017_10_01.models.SubResource
:param remote_address_space: The reference of the remote virtual network
address space.
:type remote_address_space:
~azure.mgmt.network.v2017_10_01.models.AddressSpace
:param peering_state: The status of the virtual network peering. Possible
values are 'Initiated', 'Connected', and 'Disconnected'. Possible values
include: 'Initiated', 'Connected', 'Disconnected'
:type peering_state: str or
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeeringState
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, allow_virtual_network_access=None, allow_forwarded_traffic=None, allow_gateway_transit=None, use_remote_gateways=None, remote_virtual_network=None, remote_address_space=None, peering_state=None, provisioning_state=None, name=None, etag=None):
super(VirtualNetworkPeering, self).__init__(id=id)
self.allow_virtual_network_access = allow_virtual_network_access
self.allow_forwarded_traffic = allow_forwarded_traffic
self.allow_gateway_transit = allow_gateway_transit
self.use_remote_gateways = use_remote_gateways
self.remote_virtual_network = remote_virtual_network
self.remote_address_space = remote_address_space
self.peering_state = peering_state
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| [
"laurent.mazuel@gmail.com"
] | laurent.mazuel@gmail.com |
b1b45fa60f7e0e003895684d3f195f184d53d6c8 | aa1972e6978d5f983c48578bdf3b51e311cb4396 | /nitro-python-1.0/nssrc/com/citrix/netscaler/nitro/resource/config/aaa/aaapreauthenticationpolicy_vpnvserver_binding.py | 9d1d503c57d03e7c475d01047f132fe13a288def | [
"Python-2.0",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | MayankTahil/nitro-ide | 3d7ddfd13ff6510d6709bdeaef37c187b9f22f38 | 50054929214a35a7bb19ed10c4905fffa37c3451 | refs/heads/master | 2020-12-03T02:27:03.672953 | 2017-07-05T18:09:09 | 2017-07-05T18:09:09 | 95,933,896 | 2 | 5 | null | 2017-07-05T16:51:29 | 2017-07-01T01:03:20 | HTML | UTF-8 | Python | false | false | 5,608 | py | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class aaapreauthenticationpolicy_vpnvserver_binding(base_resource) :
""" Binding class showing the vpnvserver that can be bound to aaapreauthenticationpolicy.
"""
def __init__(self) :
self._boundto = None
self._priority = None
self._activepolicy = None
self._name = None
self.___count = 0
@property
def name(self) :
r"""Name of the preauthentication policy whose properties you want to view.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
r"""Name of the preauthentication policy whose properties you want to view.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def boundto(self) :
r"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
r"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(aaapreauthenticationpolicy_vpnvserver_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.aaapreauthenticationpolicy_vpnvserver_binding
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name="", option_="") :
r""" Use this API to fetch aaapreauthenticationpolicy_vpnvserver_binding resources.
"""
try :
if not name :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
response = obj.get_resources(service, option_)
else :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
r""" Use this API to fetch filtered set of aaapreauthenticationpolicy_vpnvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
r""" Use this API to count aaapreauthenticationpolicy_vpnvserver_binding resources configued on NetScaler.
"""
try :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
r""" Use this API to count the filtered set of aaapreauthenticationpolicy_vpnvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class aaapreauthenticationpolicy_vpnvserver_binding_response(base_response) :
def __init__(self, length=1) :
self.aaapreauthenticationpolicy_vpnvserver_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.aaapreauthenticationpolicy_vpnvserver_binding = [aaapreauthenticationpolicy_vpnvserver_binding() for _ in range(length)]
| [
"Mayank@Mandelbrot.local"
] | Mayank@Mandelbrot.local |
54c6a7f4607a6c69890642eb976d35540a9e23ec | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.0_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=74/sched.py | cc5e0ffd8b66aec58cf680cf03fc01953f535a07 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | -X FMLP -Q 0 -L 3 116 400
-X FMLP -Q 0 -L 3 59 300
-X FMLP -Q 1 -L 2 49 300
-X FMLP -Q 1 -L 2 44 250
-X FMLP -Q 2 -L 2 44 250
-X FMLP -Q 3 -L 2 37 150
37 175
27 175
23 175
22 175
18 175
4 175
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
8188758945628ec846274bd177af33c4087ef8a3 | d7ad696cd1b550bb41d20f87b83c984ec7f19aa7 | /atcoder/python/_old/beginner/contest/201_300/212/b_Weak_Password.py | 98882642828e1ac3790b880ab5ca4f94c7213b6a | [] | no_license | mida-hub/hobby | 2947d10da7964d945e63d57b549c1dcb90ef7305 | 6e6f381e59fc2b0429fab36474d867aa3855af77 | refs/heads/master | 2022-12-21T23:33:14.857931 | 2022-12-19T16:30:34 | 2022-12-19T16:30:34 | 147,890,434 | 0 | 0 | null | 2021-03-20T04:31:58 | 2018-09-08T01:31:59 | Jupyter Notebook | UTF-8 | Python | false | false | 236 | py | x = input()
x1, x2, x3, x4 = map(int, list(x))
# print(x1, x2, x3, x4)
if x1 == x2 == x3 == x4:
print('Weak')
elif x4 == (x3+1)%10 and \
x3 == (x2+1)%10 and \
x2 == (x1+1)%10:
print('Weak')
else:
print('Strong')
| [
"rusuden0106@gmail.com"
] | rusuden0106@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.