blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a174ca449539006233ff7a4acea1252aef8eb3eb
|
0ab90ab559eab46b583b4b1fdd4a5bb3f55b7793
|
/python/ray/experimental/workflow/common.py
|
3c40c555e0eab6747e2da0c8fe41e1c1b84e7018
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
swag1ong/ray
|
b22cd5ebab96c30f15b00a7d044fdeb7543a4616
|
fdbeef604692aa308973988b32405ec0d70f9f40
|
refs/heads/master
| 2023-06-25T21:55:44.398516
| 2021-07-26T00:39:24
| 2021-07-26T00:39:24
| 389,518,857
| 2
| 0
|
Apache-2.0
| 2021-07-26T05:33:40
| 2021-07-26T05:33:39
| null |
UTF-8
|
Python
| false
| false
| 7,714
|
py
|
from enum import Enum, unique
from collections import deque
import re
from typing import Dict, List, Optional, Callable, Set, Iterator, Any
import unicodedata
import uuid
from dataclasses import dataclass
import ray
from ray import ObjectRef
# Alias types
StepID = str
WorkflowOutputType = ObjectRef
@unique
class WorkflowStatus(str, Enum):
# There is at least a remote task running in ray cluster
RUNNING = "RUNNING"
# It got canceled and can't be resumed later.
CANCELED = "CANCELED"
# The workflow runs successfully.
SUCCESSFUL = "SUCCESSFUL"
# The workflow failed with an applicaiton error.
# It can be resumed.
FAILED = "FAILED"
# The workflow failed with a system error, i.e., ray shutdown.
# It can be resumed.
RESUMABLE = "RESUMABLE"
@dataclass
class WorkflowInputs:
# The object ref of the input arguments.
args: ObjectRef
# The object refs in the arguments.
object_refs: List[ObjectRef]
# TODO(suquark): maybe later we can replace it with WorkflowData.
# The workflows in the arguments.
workflows: "List[Workflow]"
@dataclass
class WorkflowData:
# The workflow step function body.
func_body: Callable
# The arguments of a workflow.
inputs: WorkflowInputs
# The num of retry for application exception
max_retries: int
# Whether the user want to handle the exception mannually
catch_exceptions: bool
# ray_remote options
ray_options: Dict[str, Any]
def to_metadata(self) -> Dict[str, Any]:
f = self.func_body
return {
"name": f.__module__ + "." + f.__qualname__,
"object_refs": [r.hex() for r in self.inputs.object_refs],
"workflows": [w.id for w in self.inputs.workflows],
"max_retries": self.max_retries,
"catch_exceptions": self.catch_exceptions,
"ray_options": self.ray_options,
}
@dataclass
class WorkflowMetaData:
# The current status of the workflow
status: WorkflowStatus
def slugify(value: str, allow_unicode=False) -> str:
"""Adopted from
https://github.com/django/django/blob/master/django/utils/text.py
Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated
dashes to single dashes. Remove characters that aren't alphanumerics,
underscores, dots or hyphens. Also strip leading and
trailing whitespace.
"""
if allow_unicode:
value = unicodedata.normalize("NFKC", value)
else:
value = unicodedata.normalize("NFKD", value).encode(
"ascii", "ignore").decode("ascii")
value = re.sub(r"[^\w.\-]", "", value).strip()
return re.sub(r"[-\s]+", "-", value)
class Workflow:
def __init__(self, workflow_data: WorkflowData):
if workflow_data.ray_options.get("num_returns", 1) > 1:
raise ValueError("Workflow should have one return value.")
self._data = workflow_data
self._executed: bool = False
self._output: Optional[WorkflowOutputType] = None
self._step_id: StepID = slugify(
self._data.func_body.__qualname__) + "." + uuid.uuid4().hex
@property
def executed(self) -> bool:
return self._executed
@property
def output(self) -> WorkflowOutputType:
if not self._executed:
raise Exception("The workflow has not been executed.")
return self._output
@property
def id(self) -> StepID:
return self._step_id
def execute(self,
outer_most_step_id: Optional[StepID] = None,
last_step_of_workflow: bool = False) -> ObjectRef:
"""Trigger workflow execution recursively.
Args:
outer_most_step_id: See
"step_executor.execute_workflow" for explanation.
last_step_of_workflow: The step that generates the output of the
workflow (including nested steps).
"""
if self.executed:
return self._output
from ray.experimental.workflow import step_executor
output = step_executor.execute_workflow_step(self._step_id, self._data,
outer_most_step_id,
last_step_of_workflow)
if not isinstance(output, WorkflowOutputType):
raise TypeError("Unexpected return type of the workflow.")
self._output = output
self._executed = True
return output
def iter_workflows_in_dag(self) -> Iterator["Workflow"]:
"""Collect all workflows in the DAG linked to the workflow
using BFS."""
# deque is used instead of queue.Queue because queue.Queue is aimed
# at multi-threading. We just need a pure data structure here.
visited_workflows: Set[Workflow] = {self}
q = deque([self])
while q: # deque's pythonic way to check emptyness
w: Workflow = q.popleft()
for p in w._data.inputs.workflows:
if p not in visited_workflows:
visited_workflows.add(p)
q.append(p)
yield w
@property
def data(self) -> WorkflowData:
"""Get the workflow data."""
return self._data
def __reduce__(self):
raise ValueError(
"Workflow is not supposed to be serialized by pickle. "
"Maybe you are passing it to a Ray remote function, "
"returning it from a Ray remote function, or using "
"'ray.put()' with it?")
def run(self, workflow_id: Optional[str] = None) -> Any:
"""Run a workflow.
Examples:
>>> @workflow.step
... def book_flight(origin: str, dest: str) -> Flight:
... return Flight(...)
>>> @workflow.step
... def book_hotel(location: str) -> Reservation:
... return Reservation(...)
>>> @workflow.step
... def finalize_trip(bookings: List[Any]) -> Trip:
... return Trip(...)
>>> flight1 = book_flight.step("OAK", "SAN")
>>> flight2 = book_flight.step("SAN", "OAK")
>>> hotel = book_hotel.step("SAN")
>>> trip = finalize_trip.step([flight1, flight2, hotel])
>>> result = trip.run()
Args:
workflow_id: A unique identifier that can be used to resume the
workflow. If not specified, a random id will be generated.
"""
return ray.get(self.run_async(workflow_id))
def run_async(self, workflow_id: Optional[str] = None) -> ObjectRef:
"""Run a workflow asynchronously.
Examples:
>>> @workflow.step
... def book_flight(origin: str, dest: str) -> Flight:
... return Flight(...)
>>> @workflow.step
... def book_hotel(location: str) -> Reservation:
... return Reservation(...)
>>> @workflow.step
... def finalize_trip(bookings: List[Any]) -> Trip:
... return Trip(...)
>>> flight1 = book_flight.step("OAK", "SAN")
>>> flight2 = book_flight.step("SAN", "OAK")
>>> hotel = book_hotel.step("SAN")
>>> trip = finalize_trip.step([flight1, flight2, hotel])
>>> result = ray.get(trip.run_async())
Args:
workflow_id: A unique identifier that can be used to resume the
workflow. If not specified, a random id will be generated.
"""
# TODO(suquark): avoid cyclic importing
from ray.experimental.workflow.execution import run
return run(self, workflow_id)
|
[
"noreply@github.com"
] |
swag1ong.noreply@github.com
|
63d50f46e6763c50b438c35733b409c516416606
|
33cff13b90fdd628560baef8b3f6d68ceaad912c
|
/tests/test_commands/test_package_downloads.py
|
e4b7b094ed22878a396f1c1e911369fd769b9165
|
[
"MIT"
] |
permissive
|
rosdyana/dephell
|
3139140d6f16288177705020a625897f91f2514b
|
993a212ce17dda04a878ceac64854d809f3dc47b
|
refs/heads/master
| 2020-08-06T09:38:21.150070
| 2019-09-27T16:58:23
| 2019-09-27T16:58:23
| 212,927,181
| 0
| 0
|
MIT
| 2019-10-05T01:22:23
| 2019-10-05T01:22:23
| null |
UTF-8
|
Python
| false
| false
| 708
|
py
|
# built-in
import json
# external
import pytest
# project
from dephell.commands import PackageDownloadsCommand
from dephell.config import Config
@pytest.mark.skipif(True, reason='disable while pypistat is down')
@pytest.mark.allow_hosts()
def test_package_downloads_command(capsys):
config = Config()
config.attach({
'level': 'WARNING',
'silent': True,
})
command = PackageDownloadsCommand(argv=['DJANGO'], config=config)
result = command()
captured = capsys.readouterr()
output = json.loads(captured.out)
assert result is True
assert len(output['pythons']) > 4
assert len(output['systems']) > 2
assert '█' in output['pythons'][0]['chart']
|
[
"master_fess@mail.ru"
] |
master_fess@mail.ru
|
1a57dcb6dd5bc694a8c241ff875abb2a00b8f021
|
a2e638cd0c124254e67963bda62c21351881ee75
|
/Extensions/Prime Services/FPythonCode/PaymentFees.py
|
d79409eb38743fa11ab65e6b6c2c6f2b1438516b
|
[] |
no_license
|
webclinic017/fa-absa-py3
|
1ffa98f2bd72d541166fdaac421d3c84147a4e01
|
5e7cc7de3495145501ca53deb9efee2233ab7e1c
|
refs/heads/main
| 2023-04-19T10:41:21.273030
| 2021-05-10T08:50:05
| 2021-05-10T08:50:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,474
|
py
|
"""-----------------------------------------------------------------------
MODULE
PaymentFees
DESCRIPTION
Date : 2012-09-19
Purpose : Returns the payment fees of a trade
Department and Desk : Prime Services
Requester : Danilo Mantoan
Developer : Nidheesh Sharma
CR Number : 556348
ENDDESCRIPTION
HISTORY
Date: CR Number: Developer: Description:
2013-03-19 C885651 Nidheesh Sharma Excluded INS, SET, Brokerage fees from OtherFee
2014-03-12 C1819376 Hynek Urban Refactor & minor bug fix of other fees.
2018-11-22 1001164411 Ondrej Bahounek ABITFA-5622: Convert Other Fees to trade currency.
2018-11-28 Jaco Swanepoel Payment migration: convert cash payments to appropriate new additional payment types.
-----------------------------------------------------------------------"""
import acm
FX_COLUMN_ID = 'FX Rate On Display Curr'
CS = acm.Calculations().CreateCalculationSpace(acm.GetDefaultContext(), 'FPortfolioSheet')
ZAR_CUR = acm.FCurrency['ZAR']
PAYMENT_TYPES_TO_EXCLUDE = ('Premium',
'Dividend Suppression',
'INS',
'SET',
'Brokerage Vatable',
'Execution Fee',
'Aggregated Settled',
'Aggregated Accrued',
'Aggregated Funding',
'Aggregated Dividends',
'Aggregated Depreciation',
'Aggregated Future Settle',
'Aggregated Forward Funding PL',
'Aggregated Cash Open Value',
'Aggregated Cash Position',
'Aggregated Forward Premium',
'Aggregated Forward Settled',
'Aggregated Forward Dividends',
'Aggregated Forward Position')
PAYMENT_TEXTS_TO_EXCLUDE = ('Execution', 'ExecutionFee', 'INS', 'SET', 'Brokerage')
def ReturnOtherFee(trade, val_date):
"""
Return the sum of all fees of a trade up to the specified date.
Fees of type Execution Fee, INS, SET and Brokerage and any payments of type
Aggregated Settled are excluded.
"""
CS.SimulateGlobalValue('Valuation Date', val_date)
CS.SimulateGlobalValue('Portfolio Profit Loss End Date', 'Custom Date')
CS.SimulateGlobalValue('Portfolio Profit Loss End Date Custom', val_date)
sumOfOtherFees = 0
if trade.Status() not in ('Void'):
payments = trade.Payments()
for payment in payments:
if payment.Type() in PAYMENT_TYPES_TO_EXCLUDE or\
payment.Text() in PAYMENT_TEXTS_TO_EXCLUDE:
continue
if payment.ValidFrom() > val_date:
continue
amount = payment.Amount()
if ZAR_CUR.Name() != payment.Currency().Name():
# Ondrej's note:
# Convert all non-ZAR payments to ZAR.
# This should be ideally converted to trade currency,
# but then many other attributes need to be changed and well tested.
# This is just a fix to accommodate Futs on FXs by the end of the month.
CS.SimulateValue(ZAR_CUR, "Portfolio Currency", payment.Currency())
fx_rate = CS.CreateCalculation(ZAR_CUR, FX_COLUMN_ID).Value().Number()
amount *= fx_rate
sumOfOtherFees += amount
return acm.DenominatedValue(sumOfOtherFees, ZAR_CUR.Name(), None, val_date)
#Function to return termination fee of a trade
def ReturnTerminationFee(trade):
terminationFee = 0
if trade.Status() in ('Terminated'):
payments = trade.Payments()
for payment in payments:
if payment.Type() in ('Cash') and ('Termination' in payment.Text() or 'Terminated' in payment.Text()):
terminationFee = terminationFee + payment.Amount()
elif payment.Type() in ('Termination Fee'):
terminationFee = terminationFee + payment.Amount()
return terminationFee
#Function to return termination fee date of a trade
def ReturnTerminationFeeDate(trade):
terminationDate = ''
if trade.Status() in ('Terminated'):
payments = trade.Payments()
for payment in payments:
if payment.Type() in ('Cash') and ('Termination' in payment.Text() or 'Terminated' in payment.Text()):
terminationDate = payment.PayDay()
elif payment.Type() in ('Termination Fee'):
terminationDate = payment.PayDay()
return terminationDate
#Function to return termination fee date of a trade in the correct format from an array of dates
def ReturnSingleTerminationFeeDate(arrayOfDates):
terminationDate = ''
for date in arrayOfDates:
if date != '' and isinstance(date, str):
dateFormatter = acm.FDateFormatter('dateFormatter')
dateFormatter.FormatDefinition("%d/%m/%Y")
terminationDate = dateFormatter.Format(date)#.replace('-','/')
break
return terminationDate
|
[
"nencho.georogiev@absa.africa"
] |
nencho.georogiev@absa.africa
|
b1efe20d5ba4c2a9c279544113a1e2bd6cdf7018
|
2432996ac1615cd36d61f0feeff8a359d2b438d8
|
/env/lib/python3.8/site-packages/_pyinstaller_hooks_contrib/hooks/stdhooks/hook-eth_hash.py
|
1b22c286fe3f7300f269b0ec19044cd2c28cc11a
|
[
"GPL-1.0-or-later",
"GPL-2.0-or-later",
"GPL-2.0-only",
"Apache-2.0"
] |
permissive
|
Parveshdhull/AutoTyper
|
dd65d53ece7c13fbc1ead7ce372947483e05e2e3
|
7fabb30e15b770d790b69c2e4eaf9bbf5a4d180c
|
refs/heads/main
| 2023-05-08T14:10:35.404160
| 2023-05-07T20:43:15
| 2023-05-07T20:43:15
| 315,415,751
| 26
| 18
|
Apache-2.0
| 2023-05-07T20:43:16
| 2020-11-23T19:13:05
|
Python
|
UTF-8
|
Python
| false
| false
| 611
|
py
|
# ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier: GPL-2.0-or-later
# ------------------------------------------------------------------
from PyInstaller.utils.hooks import collect_submodules
# The ``eth_hash.utils.load_backend`` function does a dynamic import.
hiddenimports = collect_submodules('eth_hash.backends')
|
[
"parvesh.dhullmonu@gmail.com"
] |
parvesh.dhullmonu@gmail.com
|
90ebb27f00615a63b07c8ff1cd495f77293c88ea
|
8f784ca91cd56818dc6e38d5e602756a913e13b4
|
/modbus_tcp_server/network/accept_thread.py
|
a512980848dd5a91ed2ce730cf546634df5968c6
|
[
"MIT"
] |
permissive
|
smok-serwis/modbus-tcp-server
|
9a02a3c5e9d0875179903bc4171b4d782d6d48b9
|
558eca908b6762280a74b16d78d56dc047a9dace
|
refs/heads/master
| 2023-06-14T01:26:07.299860
| 2021-07-15T13:59:15
| 2021-07-15T13:59:15
| 339,780,750
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,383
|
py
|
import socket
import typing as tp
from satella.coding import silence_excs
from satella.coding.concurrent import TerminableThread
from .conn_thread import ConnectionThread
from ..data_source import BaseDataSource, TestingDataSource
from ..datagrams import MODBUSTCPMessage
from ..processor import ModbusProcessor
class ModbusTCPServer(TerminableThread):
def __init__(self, bind_ifc: str, bind_port: int,
data_source: tp.Optional[BaseDataSource] = None,
backlog: int = 128):
super().__init__(name='accept')
if data_source is None:
data_source = TestingDataSource()
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind((bind_ifc, bind_port))
self.backlog = backlog
self.processor = ModbusProcessor(data_source)
def prepare(self) -> None:
self.socket.listen(self.backlog)
self.socket.setblocking(True)
self.socket.settimeout(5)
def process_message(self, msg: MODBUSTCPMessage) -> MODBUSTCPMessage:
return self.processor.process(msg)
def cleanup(self):
self.socket.close()
@silence_excs(socket.timeout)
def loop(self) -> None:
sock, addr = self.socket.accept()
ConnectionThread(sock, addr, self).start()
|
[
"piotr.maslanka@henrietta.com.pl"
] |
piotr.maslanka@henrietta.com.pl
|
ebc97dabe6ba4cd2d87aca268755945115d291e2
|
3447227dd54587eb8c0c7f5346ac158504f7a907
|
/compass/ocean/tests/global_ocean/threads_test/__init__.py
|
42883b53b746d85a52e069468c8ae411ba7c414e
|
[
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause"
] |
permissive
|
MPAS-Dev/compass
|
5e2c1525224dd399bcf4f56f661df05e2ec197a6
|
0b7440f0aa77c1ae052922a39e646bd35c267661
|
refs/heads/main
| 2023-08-30T20:59:52.052430
| 2023-08-29T09:45:14
| 2023-08-29T09:45:14
| 310,409,977
| 10
| 26
|
NOASSERTION
| 2023-09-13T14:19:16
| 2020-11-05T20:28:25
|
Python
|
UTF-8
|
Python
| false
| false
| 2,046
|
py
|
from compass.validate import compare_variables
from compass.ocean.tests.global_ocean.forward import ForwardTestCase, \
ForwardStep
class ThreadsTest(ForwardTestCase):
"""
A test case for performing two short forward runs to make sure the results
are identical with 1 and 2 thread per MPI process
"""
def __init__(self, test_group, mesh, init, time_integrator):
"""
Create test case
Parameters
----------
test_group : compass.ocean.tests.global_ocean.GlobalOcean
The global ocean test group that this test case belongs to
mesh : compass.ocean.tests.global_ocean.mesh.Mesh
The test case that produces the mesh for this run
init : compass.ocean.tests.global_ocean.init.Init
The test case that produces the initial condition for this run
time_integrator : {'split_explicit', 'RK4'}
The time integrator to use for the forward run
"""
super().__init__(test_group=test_group, mesh=mesh, init=init,
time_integrator=time_integrator,
name='threads_test')
for openmp_threads in [1, 2]:
name = f'{openmp_threads}thread'
step = ForwardStep(test_case=self, mesh=mesh, init=init,
time_integrator=time_integrator, name=name,
subdir=name, ntasks=4,
openmp_threads=openmp_threads)
step.add_output_file(filename='output.nc')
self.add_step(step)
# no run() method is needed
def validate(self):
"""
Test cases can override this method to perform validation of variables
and timers
"""
variables = ['temperature', 'salinity', 'layerThickness',
'normalVelocity']
compare_variables(test_case=self, variables=variables,
filename1='1thread/output.nc',
filename2='2thread/output.nc')
|
[
"xylarstorm@gmail.com"
] |
xylarstorm@gmail.com
|
631a2dcb65f7b01f394a4887810810476c69ec19
|
933376c11498a6567da8d7eb7d2675100895c3ba
|
/pyzoo/zoo/chronos/forecaster/tcn_forecaster.py
|
1d2359d1cc2e54a9820e4f91c65c4ff5cd87761b
|
[
"Apache-2.0"
] |
permissive
|
intel-analytics/analytics-zoo
|
320a461765f86d41dd456b598b1cf1d51d57f4c4
|
7cc3e2849057d6429d03b1af0db13caae57960a5
|
refs/heads/master
| 2023-08-13T20:47:58.621714
| 2023-07-06T00:49:11
| 2023-07-06T00:49:11
| 90,328,920
| 3,104
| 996
|
Apache-2.0
| 2023-09-06T01:51:18
| 2017-05-05T02:27:30
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 5,894
|
py
|
#
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from zoo.chronos.forecaster.base_forecaster import BasePytorchForecaster
from zoo.chronos.forecaster.utils import set_pytorch_seed
from zoo.chronos.model.tcn import TCNPytorch
from zoo.chronos.model.tcn import model_creator, optimizer_creator, loss_creator
class TCNForecaster(BasePytorchForecaster):
"""
Example:
>>> #The dataset is split into x_train, x_val, x_test, y_train, y_val, y_test
>>> forecaster = TCNForecaster(past_seq_len=24,
future_seq_len=5,
input_feature_num=1,
output_feature_num=1,
...)
>>> forecaster.fit((x_train, y_train))
>>> forecaster.to_local() # if you set distributed=True
>>> test_pred = forecaster.predict(x_test)
>>> test_eval = forecaster.evaluate((x_test, y_test))
>>> forecaster.save({ckpt_name})
>>> forecaster.restore({ckpt_name})
"""
def __init__(self,
past_seq_len,
future_seq_len,
input_feature_num,
output_feature_num,
num_channels=[30]*7,
kernel_size=3,
repo_initialization=True,
dropout=0.1,
optimizer="Adam",
loss="mse",
lr=0.001,
metrics=["mse"],
seed=None,
distributed=False,
workers_per_node=1,
distributed_backend="torch_distributed"):
"""
Build a TCN Forecast Model.
TCN Forecast may fall into local optima. Please set repo_initialization
to False to alleviate the issue. You can also change a random seed to
work around.
:param past_seq_len: Specify the history time steps (i.e. lookback).
:param future_seq_len: Specify the output time steps (i.e. horizon).
:param input_feature_num: Specify the feature dimension.
:param output_feature_num: Specify the output dimension.
:param num_channels: Specify the convolutional layer filter number in
TCN's encoder. This value defaults to [30]*7.
:param kernel_size: Specify convolutional layer filter height in TCN's
encoder. This value defaults to 3.
:param repo_initialization: if to use framework default initialization,
True to use paper author's initialization and False to use the
framework's default initialization. The value defaults to True.
:param dropout: Specify the dropout close possibility (i.e. the close
possibility to a neuron). This value defaults to 0.1.
:param optimizer: Specify the optimizer used for training. This value
defaults to "Adam".
:param loss: Specify the loss function used for training. This value
defaults to "mse". You can choose from "mse", "mae" and
"huber_loss".
:param lr: Specify the learning rate. This value defaults to 0.001.
:param metrics: A list contains metrics for evaluating the quality of
forecasting. You may only choose from "mse" and "mae" for a
distributed forecaster. You may choose from "mse", "me", "mae",
"mse","rmse","msle","r2", "mpe", "mape", "mspe", "smape", "mdape"
and "smdape" for a non-distributed forecaster.
:param seed: int, random seed for training. This value defaults to None.
:param distributed: bool, if init the forecaster in a distributed
fashion. If True, the internal model will use an Orca Estimator.
If False, the internal model will use a pytorch model. The value
defaults to False.
:param workers_per_node: int, the number of worker you want to use.
The value defaults to 1. The param is only effective when
distributed is set to True.
:param distributed_backend: str, select from "torch_distributed" or
"horovod". The value defaults to "torch_distributed".
"""
# config setting
self.data_config = {
"past_seq_len": past_seq_len,
"future_seq_len": future_seq_len,
"input_feature_num": input_feature_num,
"output_feature_num": output_feature_num
}
self.config = {
"lr": lr,
"loss": loss,
"num_channels": num_channels,
"kernel_size": kernel_size,
"repo_initialization": repo_initialization,
"optim": optimizer,
"dropout": dropout
}
# model creator settings
self.local_model = TCNPytorch
self.model_creator = model_creator
self.optimizer_creator = optimizer_creator
self.loss_creator = loss_creator
# distributed settings
self.distributed = distributed
self.distributed_backend = distributed_backend
self.workers_per_node = workers_per_node
# other settings
self.lr = lr
self.metrics = metrics
self.seed = seed
super().__init__()
|
[
"noreply@github.com"
] |
intel-analytics.noreply@github.com
|
98166df402980f456d8048e29aa8a450f9257655
|
80d879a552ce00a9bc73a26d0ddb74c278867b1f
|
/scripts/080_hilo_concrete.py
|
4abf39886121d03650f95582dad542dc8c6f5d56
|
[] |
no_license
|
whiskyching/WS-EscobedoGroup
|
4a25abe62fac91b82d3b1abd74ddc02af107457f
|
bd36d623ec2f60638fe3f330b9ad92c810804e8d
|
refs/heads/main
| 2023-03-20T07:03:19.594765
| 2021-03-16T13:15:14
| 2021-03-16T13:15:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,119
|
py
|
import os
import compas
from compas.utilities import pairwise
from compas_rhino.artists import MeshArtist
HERE = os.path.dirname(__file__)
DATA = os.path.join(HERE, '../data')
FILE = os.path.join(DATA, 'session.json')
session = compas.json_load(FILE)
mesh = session['mesh']
# ==============================================================================
# Idos
# ==============================================================================
idos = mesh.copy()
for face in mesh.faces_where({'is_loaded': False}):
idos.delete_face(face)
idos.remove_unused_vertices()
offset = 0.02
for vertex, attr in idos.vertices(True):
x, y, z = mesh.vertex_coordinates(vertex)
nx, ny, nz = mesh.vertex_normal(vertex)
if attr['nx'] is not None:
nx = attr['nx']
if attr['ny'] is not None:
ny = attr['ny']
if attr['nz'] is not None:
nz = attr['nz']
attr['x'] = x + offset * nx
attr['y'] = y + offset * ny
attr['z'] = z + offset * nz
# ==============================================================================
# Edos
# ==============================================================================
edos = idos.copy()
offset = 0.06
for vertex, attr in edos.vertices(True):
x, y, z = idos.vertex_coordinates(vertex)
nx, ny, nz = idos.vertex_normal(vertex)
if attr['nx'] is not None:
nx = attr['nx']
if attr['ny'] is not None:
ny = attr['ny']
if attr['nz'] is not None:
nz = attr['nz']
attr['x'] = x + offset * nx
attr['y'] = y + offset * ny
attr['z'] = z + offset * nz
# ==============================================================================
# Volume
# ==============================================================================
volume = idos.copy()
volume.flip_cycles()
max_vertex = volume._max_vertex + 1
max_face = volume._max_face + 1
for vertex, attr in edos.vertices(True):
volume.add_vertex(key=vertex + max_vertex, **attr)
for face in edos.faces():
vertices = edos.face_vertices(face)
vertices = [vertex + max_vertex for vertex in vertices]
volume.add_face(vertices)
boundary = edos.vertices_on_boundary()
boundary.append(boundary[0])
for a, b in pairwise(boundary):
volume.add_face([b, a, a + max_vertex, b + max_vertex])
# ==============================================================================
# Export
# ==============================================================================
session['idos'] = idos
session['edos'] = edos
session['volume'] = volume
compas.json_dump(session, FILE)
# ==============================================================================
# visualize
# ==============================================================================
artist = MeshArtist(idos, layer="HiLo::Concrete1::Idos")
artist.clear_layer()
artist.draw_mesh(disjoint=True, color=(255, 0, 0))
artist = MeshArtist(edos, layer="HiLo::Concrete1::Edos")
artist.clear_layer()
artist.draw_mesh(disjoint=True, color=(0, 0, 255))
artist = MeshArtist(volume, layer="HiLo::Concrete1::Volume")
artist.clear_layer()
artist.draw_mesh(disjoint=True)
|
[
"vanmelet@ethz.ch"
] |
vanmelet@ethz.ch
|
4670ba9b785563921ebd4e8eb26fa337062abb5b
|
1625edfe28b4b0979fd32b4a3c5e55249a993fd5
|
/baekjoon14915.py
|
7648498a85fccf5a369e7197408b17d1726a754d
|
[] |
no_license
|
beOk91/baekjoon2
|
b8bf504c506c6278899d4107ecfe51974ef13f5e
|
39569f8effb8e32405a7d74d98bdabcab783ec56
|
refs/heads/master
| 2023-05-11T20:11:19.015113
| 2020-09-14T23:58:49
| 2020-09-14T23:58:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 198
|
py
|
m,n=map(int,input().strip().split())
def conversion(m,n):
c="0123456789ABCDEF"
if m<n:
return str(c[m])
else:
return conversion(m//n,n)+str(c[m%n])
print(conversion(m,n))
|
[
"be_ok91@naver.com"
] |
be_ok91@naver.com
|
5e2e9ee1d976ed4b9dae0c19f9e48d49c14d8d4a
|
d4442db5a7ab9db2b04fef640a9864f3fba54758
|
/src/python/WMCore/Services/Dashboard/DashboardAPI.py
|
9f90e4842ae59431378744395dc3404a30601661
|
[] |
no_license
|
stuartw/WMCore
|
fa25ff19ab5058a635d35d3c58a0ac56a3e079a1
|
38c39c43f7237fd316930839674ac9be3c0ee8cc
|
refs/heads/master
| 2021-01-18T07:18:18.324604
| 2012-10-18T22:30:34
| 2012-10-18T22:30:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,823
|
py
|
#!/usr/bin/python
"""
This is the Dashboard API Module for the Worker Node
"""
from WMCore.Services.Dashboard import apmon
import time
from types import DictType, StringType, ListType
#
# Methods for manipulating the apmon instance
#
# Internal attributes
apmonInstance = None
apmonInit = False
# Monalisa configuration
apmonConf = ["cms-wmagent-job.cern.ch"]
#
# Method to create a single apmon instance at a time
#
def getApmonInstance( logr, apmonServer ):
global apmonInstance
global apmonInit
if apmonInstance is None and not apmonInit :
apmonInit = True
if apmonInstance is None :
try :
if not apmonServer:
apmonInstance = apmon.ApMon(apmonConf, logr) #apmonLoggingLevel)
else:
apmonInstance = apmon.ApMon(apmonServer, logr)
except Exception, e :
pass
return apmonInstance
#
# Method to free the apmon instance
#
def apmonFree() :
global apmonInstance
global apmonInit
if apmonInstance is not None :
try :
apmonInstance.free()
except Exception, e :
pass
apmonInstance = None
apmonInit = False
#
# Method to send params to Monalisa service
#
def apmonSend(taskid, jobid, params, logr, apmonServer) :
apm = getApmonInstance( logr, apmonServer )
if apm is not None :
if not isinstance(params, DictType) and not isinstance(params, ListType) :
params = {'unknown' : '0'}
if not isinstance(taskid, StringType) :
taskid = 'unknown'
if not isinstance(jobid, StringType) :
jobid = 'unknown'
try :
apm.sendParameters(taskid, jobid, params)
return 0
except Exception, e:
pass
return 1
|
[
"sfoulkes@4525493e-7705-40b1-a816-d608a930855b"
] |
sfoulkes@4525493e-7705-40b1-a816-d608a930855b
|
abe1005bd1d0c5882d3e588d9d3a1e4a7486c579
|
44197b58b52349b0557f4d2327be292d1c01ea50
|
/test/test_data_62.py
|
2dd9de7ce8a273d4da81d28b4534861d76aaff37
|
[] |
no_license
|
jonpurdy/netbox-swagger-python-client
|
58b2b7984ea24a690d8910f6a6a496b99e5098f9
|
6bfe8cf3bb753c4d293dd56a541fac026642207f
|
refs/heads/master
| 2021-06-28T03:16:09.670793
| 2017-09-17T18:15:54
| 2017-09-17T18:15:54
| 103,851,068
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 861
|
py
|
# coding: utf-8
"""
NetBox API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version:
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.data_62 import Data62
class TestData62(unittest.TestCase):
""" Data62 unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testData62(self):
"""
Test Data62
"""
# FIXME: construct object with mandatory attributes with example values
#model = swagger_client.models.data_62.Data62()
pass
if __name__ == '__main__':
unittest.main()
|
[
"jon@knowroaming.com"
] |
jon@knowroaming.com
|
601e04d3f95736775c8e3eee23c2ea0fc2a6192b
|
216ddf61c5be758efde2b50fa476ada5354aced5
|
/galaxy/gen_test.py
|
dddbbbb2c8f96cf24df4b8d0981a9c43604dbf60
|
[] |
no_license
|
cameronfabbri/ICGANs
|
4600020238d6884b710ea0b035b84e86c73705f1
|
d6be1a3e752959754be1dbf8af2ead8f75048b37
|
refs/heads/master
| 2021-01-16T18:11:38.596295
| 2017-11-26T22:35:16
| 2017-11-26T22:35:16
| 100,050,914
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,823
|
py
|
'''
Generates a dataset of encodings from real images using the trained encoder.
'''
import matplotlib.pyplot as plt
from tqdm import tqdm
from matplotlib.pyplot import cm
import scipy.misc as misc
import tensorflow as tf
import tensorflow.contrib.layers as tcl
import cPickle as pickle
import numpy as np
import argparse
import random
import ntpath
import glob
import time
import sys
import cv2
import os
sys.path.insert(0, '../ops/')
from tf_ops import *
import data_ops
from nets import *
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--CHECKPOINT_DIR', required=True,help='checkpoint directory',type=str)
parser.add_argument('--DATASET', required=False,help='The DATASET to use', type=str,default='celeba')
parser.add_argument('--DATA_DIR', required=False,help='Directory where data is', type=str,default='./')
parser.add_argument('--OUTPUT_DIR', required=False,help='Directory to save data', type=str,default='./')
parser.add_argument('--ACTIVATION', required=False,help='Activation function', type=str,default='lrelu')
a = parser.parse_args()
CHECKPOINT_DIR = a.CHECKPOINT_DIR
DATASET = a.DATASET
DATA_DIR = a.DATA_DIR
OUTPUT_DIR = a.OUTPUT_DIR
ACTIVATION = a.ACTIVATION
try: os.makedirs(OUTPUT_DIR)
except: pass
# placeholders for data going into the network
global_step = tf.Variable(0, name='global_step', trainable=False)
images = tf.placeholder(tf.float32, shape=(1, 64, 64, 3), name='images')
encoded = encZ(images, ACTIVATION)
saver = tf.train.Saver(max_to_keep=1)
init = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
sess = tf.Session()
sess.run(init)
# restore previous model if there is one
ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if ckpt and ckpt.model_checkpoint_path:
print "Restoring previous model..."
try:
saver.restore(sess, ckpt.model_checkpoint_path)
print "Model restored"
except:
print "Could not restore model"
pass
print 'Loading data...'
# images and annots: _, __
train_images, train_annots, test_images, test_annots, paths = data_ops.load_galaxy(DATA_DIR)
test_len = len(test_annots)
print 'test num:',test_len
info = {}
# want to write out a file with the image path and z vector
for p,img,label in tqdm(zip(paths, test_images, test_annots)):
img = data_ops.normalize(img)
batch_images = np.expand_dims(img, 0)
encoding = sess.run([encoded], feed_dict={images:batch_images})[0][0]
info[p] = [encoding, label]
# write out dictionary to pickle file
p = open(OUTPUT_DIR+'data.pkl', 'wb')
data = pickle.dumps(info)
p.write(data)
p.close()
|
[
"cameronfabbri@gmail.com"
] |
cameronfabbri@gmail.com
|
0c61ce225d80072549a004ed2591a718c5672896
|
7bededcada9271d92f34da6dae7088f3faf61c02
|
/pypureclient/flashblade/FB_2_8/models/target_get_response.py
|
ac033a0a864676d3ec597b61877bb7714e0e01c8
|
[
"BSD-2-Clause"
] |
permissive
|
PureStorage-OpenConnect/py-pure-client
|
a5348c6a153f8c809d6e3cf734d95d6946c5f659
|
7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e
|
refs/heads/master
| 2023-09-04T10:59:03.009972
| 2023-08-25T07:40:41
| 2023-08-25T07:40:41
| 160,391,444
| 18
| 29
|
BSD-2-Clause
| 2023-09-08T09:08:30
| 2018-12-04T17:02:51
|
Python
|
UTF-8
|
Python
| false
| false
| 4,220
|
py
|
# coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.8, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_8 import models
class TargetGetResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'continuation_token': 'str',
'total_item_count': 'int',
'items': 'list[Target]'
}
attribute_map = {
'continuation_token': 'continuation_token',
'total_item_count': 'total_item_count',
'items': 'items'
}
required_args = {
}
def __init__(
self,
continuation_token=None, # type: str
total_item_count=None, # type: int
items=None, # type: List[models.Target]
):
"""
Keyword args:
continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the `continuation_token` to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The `continuation_token` is generated if the `limit` is less than the remaining number of items, and the default sort is used (no sort is specified).
total_item_count (int): Total number of items after applying `filter` params.
items (list[Target]): A list of target objects.
"""
if continuation_token is not None:
self.continuation_token = continuation_token
if total_item_count is not None:
self.total_item_count = total_item_count
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `TargetGetResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TargetGetResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TargetGetResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"tlewis@purestorage.com"
] |
tlewis@purestorage.com
|
ab7a78b9db6f60371ee1fac74f8b8411ff23aa43
|
a179d2abea58ee4d987bf05729a5e7df727af3cd
|
/instaclone/settings.py
|
6e13a5e35aa00ba74ca16a19dd70fe50c0cb34ee
|
[
"MIT"
] |
permissive
|
Derrick-Nyongesa/instagram-clone
|
ced05a4c334c9e95e96bec9a3883b448c5fa95c6
|
2f3c018c33aa440160401f0c1878a2670f2f0081
|
refs/heads/main
| 2023-05-14T01:32:36.211904
| 2021-05-26T13:42:26
| 2021-05-26T13:42:26
| 369,403,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,796
|
py
|
"""
Django settings for instaclone project.
Generated by 'django-admin startproject' using Django 3.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
from decouple import config, Csv
import cloudinary
import cloudinary.uploader
import cloudinary.api
import django_heroku
import dj_database_url
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'instagram',
'bootstrap3',
'cloudinary'
]
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'instaclone.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'instaclone.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
if config('MODE')=="dev":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config('DB_USER'),
'PASSWORD': config('DB_PASSWORD'),
'HOST': config('DB_HOST'),
'PORT': '',
}
}
# production
else:
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/accounts/login/'
#AUTH_PROFILE_MODULE = 'accounts.Profile'
EMAIL_USE_TLS = config('EMAIL_USE_TLS')
EMAIL_HOST = config('EMAIL_HOST')
EMAIL_PORT = config('EMAIL_PORT')
EMAIL_HOST_USER = config('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = config('EMAIL_HOST_PASSWORD')
cloudinary.config(
cloud_name = config("CLOUDINARY_NAME"),
api_key = config("CLOUDINARY_KEY"),
api_secret = config("CLOUDINARY_SECRET")
)
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
django_heroku.settings(locals())
|
[
"nyongesaderrick@gmail.com"
] |
nyongesaderrick@gmail.com
|
b91ee62ab15974dcb724ceeb00d00689410e332f
|
7bf1dc58ba0884ed957efdb5459ae44851b2b36e
|
/practice_450/greedy/33_rearrange_characters.py
|
858f75ba242070202848f0f4f1146c91f0ceea28
|
[] |
no_license
|
ksaubhri12/ds_algo
|
672260f07f41bcfc33f8ac23a64085a1f27ab4a5
|
46505b89371cae3321f48609dd755c7e5cfed302
|
refs/heads/master
| 2023-05-12T08:37:06.789111
| 2023-05-03T03:06:49
| 2023-05-03T03:06:49
| 211,793,312
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 935
|
py
|
def rearrange_characters(input_string: str):
n = len(input_string)
dict_value = {}
for i in range(0, n):
if input_string[i] in dict_value:
dict_value[input_string[i]] = dict_value[input_string[i]] + 1
else:
dict_value[input_string[i]] = 1
sorted_count_list = sorted(dict_value, key=dict_value.get, reverse=True)
i = 0
start = 0
char_list = list(input_string)
while len(sorted_count_list) > 0:
char = sorted_count_list.pop(0)
count = dict_value[char]
if count > n / 2:
return -1
start = start + 1
for k in range(0, count):
char_list[i] = char
i = i + 2
if i >= n:
i = 1
return ''.join(char_list)
if __name__ == '__main__':
print(rearrange_characters('geeksforgeeks'))
print(rearrange_characters('bbbbb'))
print(rearrange_characters('kkk'))
|
[
"kalpesh@getvokal.com"
] |
kalpesh@getvokal.com
|
b0e2af4e4d675713ffc95e2005e39ebb9196bccb
|
2b1448085c5ad44e78772dde1dcc2fae9cc4c3cc
|
/botorch/models/converter.py
|
35da4a3d8d21b48c62c2098e7a129b871f4e43c0
|
[
"MIT"
] |
permissive
|
leelasd/botorch
|
47fa0ff9c5f6c534ecfcba59f5b1bf52eea0d62e
|
c48bfc822940ee8a6e5e2604d4ff282033dbe892
|
refs/heads/master
| 2022-12-17T04:42:41.591444
| 2020-09-10T23:45:05
| 2020-09-10T23:46:41
| 294,561,185
| 1
| 0
|
MIT
| 2020-09-11T01:19:36
| 2020-09-11T01:19:35
| null |
UTF-8
|
Python
| false
| false
| 8,088
|
py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
Utilities for converting between different models.
"""
from __future__ import annotations
from copy import deepcopy
import torch
from botorch.exceptions import UnsupportedError
from botorch.models.gp_regression import FixedNoiseGP, HeteroskedasticSingleTaskGP
from botorch.models.gp_regression_fidelity import SingleTaskMultiFidelityGP
from botorch.models.gpytorch import BatchedMultiOutputGPyTorchModel
from botorch.models.model_list_gp_regression import ModelListGP
from torch.nn import Module
def _get_module(module: Module, name: str) -> Module:
"""Recursively get a sub-module from a module.
Args:
module: A `torch.nn.Module`.
name: The name of the submodule to return, in the form of a period-delinated
string: `sub_module.subsub_module.[...].leaf_module`.
Returns:
The requested sub-module.
Example:
>>> gp = SingleTaskGP(train_X, train_Y)
>>> noise_prior = _get_module(gp, "likelihood.noise_covar.noise_prior")
"""
current = module
if name != "":
for a in name.split("."):
current = getattr(current, a)
return current
def _check_compatibility(models: ModelListGP) -> None:
"""Check if a ModelListGP can be converted."""
# check that all submodules are of the same type
for modn, mod in models[0].named_modules():
mcls = mod.__class__
if not all(isinstance(_get_module(m, modn), mcls) for m in models[1:]):
raise UnsupportedError(
"Sub-modules must be of the same type across models."
)
# check that each model is a BatchedMultiOutputGPyTorchModel
if not all(isinstance(m, BatchedMultiOutputGPyTorchModel) for m in models):
raise UnsupportedError(
"All models must be of type BatchedMultiOutputGPyTorchModel."
)
# TODO: Add support for HeteroskedasticSingleTaskGP
if any(isinstance(m, HeteroskedasticSingleTaskGP) for m in models):
raise NotImplementedError(
"Conversion of HeteroskedasticSingleTaskGP is currently unsupported."
)
# TODO: Add support for custom likelihoods
if any(getattr(m, "_is_custom_likelihood", False) for m in models):
raise NotImplementedError(
"Conversion of models with custom likelihoods is currently unsupported."
)
# check that each model is single-output
if not all(m._num_outputs == 1 for m in models):
raise UnsupportedError("All models must be single-output.")
# check that training inputs are the same
if not all(
torch.equal(ti, tj)
for m in models[1:]
for ti, tj in zip(models[0].train_inputs, m.train_inputs)
):
raise UnsupportedError("training inputs must agree for all sub-models.")
def model_list_to_batched(model_list: ModelListGP) -> BatchedMultiOutputGPyTorchModel:
"""Convert a ModelListGP to a BatchedMultiOutputGPyTorchModel.
Args:
model_list: The `ModelListGP` to be converted to the appropriate
`BatchedMultiOutputGPyTorchModel`. All sub-models must be of the same
type and have the shape (batch shape and number of training inputs).
Returns:
The model converted into a `BatchedMultiOutputGPyTorchModel`.
Example:
>>> list_gp = ModelListGP(gp1, gp2)
>>> batch_gp = model_list_to_batched(list_gp)
"""
models = model_list.models
_check_compatibility(models)
# if the list has only one model, we can just return a copy of that
if len(models) == 1:
return deepcopy(models[0])
# construct inputs
train_X = deepcopy(models[0].train_inputs[0])
train_Y = torch.stack([m.train_targets.clone() for m in models], dim=-1)
kwargs = {"train_X": train_X, "train_Y": train_Y}
if isinstance(models[0], FixedNoiseGP):
kwargs["train_Yvar"] = torch.stack(
[m.likelihood.noise_covar.noise.clone() for m in models], dim=-1
)
if isinstance(models[0], SingleTaskMultiFidelityGP):
init_args = models[0]._init_args
if not all(
v == m._init_args[k] for m in models[1:] for k, v in init_args.items()
):
raise UnsupportedError("All models must have the same fidelity parameters.")
kwargs.update(init_args)
# construct the batched GP model
batch_gp = models[0].__class__(**kwargs)
tensors = {n for n, p in batch_gp.state_dict().items() if len(p.shape) > 0}
scalars = set(batch_gp.state_dict()) - tensors
input_batch_dims = len(models[0]._input_batch_shape)
# ensure scalars agree (TODO: Allow different priors for different outputs)
for n in scalars:
v0 = _get_module(models[0], n)
if not all(torch.equal(_get_module(m, n), v0) for m in models[1:]):
raise UnsupportedError("All scalars must have the same value.")
# ensure dimensions of all tensors agree
for n in tensors:
shape0 = _get_module(models[0], n).shape
if not all(_get_module(m, n).shape == shape0 for m in models[1:]):
raise UnsupportedError("All tensors must have the same shape.")
# now construct the batched state dict
scalar_state_dict = {
s: p.clone() for s, p in models[0].state_dict().items() if s in scalars
}
tensor_state_dict = {
t: (
torch.stack(
[m.state_dict()[t].clone() for m in models], dim=input_batch_dims
)
if "active_dims" not in t
else models[0].state_dict()[t].clone()
)
for t in tensors
}
batch_state_dict = {**scalar_state_dict, **tensor_state_dict}
# load the state dict into the new model
batch_gp.load_state_dict(batch_state_dict)
return batch_gp
def batched_to_model_list(batch_model: BatchedMultiOutputGPyTorchModel) -> ModelListGP:
"""Convert a BatchedMultiOutputGPyTorchModel to a ModelListGP.
Args:
model_list: The `BatchedMultiOutputGPyTorchModel` to be converted to a
`ModelListGP`.
Returns:
The model converted into a `ModelListGP`.
Example:
>>> train_X = torch.rand(5, 2)
>>> train_Y = torch.rand(5, 2)
>>> batch_gp = SingleTaskGP(train_X, train_Y)
>>> list_gp = batched_to_model_list(batch_gp)
"""
# TODO: Add support for HeteroskedasticSingleTaskGP
if isinstance(batch_model, HeteroskedasticSingleTaskGP):
raise NotImplementedError(
"Conversion of HeteroskedasticSingleTaskGP currently not supported."
)
batch_sd = batch_model.state_dict()
tensors = {n for n, p in batch_sd.items() if len(p.shape) > 0}
scalars = set(batch_sd) - tensors
input_bdims = len(batch_model._input_batch_shape)
models = []
for i in range(batch_model._num_outputs):
scalar_sd = {s: batch_sd[s].clone() for s in scalars}
tensor_sd = {
t: (
batch_sd[t].select(input_bdims, i).clone()
if "active_dims" not in t
else batch_sd[t].clone()
)
for t in tensors
}
sd = {**scalar_sd, **tensor_sd}
kwargs = {
"train_X": batch_model.train_inputs[0].select(input_bdims, i).clone(),
"train_Y": batch_model.train_targets.select(input_bdims, i)
.clone()
.unsqueeze(-1),
}
if isinstance(batch_model, FixedNoiseGP):
noise_covar = batch_model.likelihood.noise_covar
kwargs["train_Yvar"] = (
noise_covar.noise.select(input_bdims, i).clone().unsqueeze(-1)
)
if isinstance(batch_model, SingleTaskMultiFidelityGP):
kwargs.update(batch_model._init_args)
model = batch_model.__class__(**kwargs)
model.load_state_dict(sd)
models.append(model)
return ModelListGP(*models)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
7fa2949cf1cd3bc986e1801d3d60ef78650ba85e
|
8186514b510a801863229e3f9711c0c657e727e5
|
/assembly/0427/explore_qt/22/2.py
|
1995344ca3c4e4d6ee83bf7b963ca016295d0b6c
|
[] |
no_license
|
masknugget/mypyqt
|
274b2cbbf66c04927453815248f9c1bc5e65ca17
|
b86a49e4b8c7c8c3d8546ce1b49f8f3bb6332307
|
refs/heads/main
| 2023-08-17T13:30:11.451066
| 2021-09-27T14:14:54
| 2021-09-27T14:14:54
| 355,904,935
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,037
|
py
|
import sys
from PyQt5.QtWidgets import QApplication, QWidget, QInputDialog, QLineEdit, QTextEdit, QPushButton, \
QGridLayout
class Demo(QWidget):
def __init__(self):
super(Demo, self).__init__()
self.name_btn = QPushButton('Name', self)
self.gender_btn = QPushButton('Gender', self)
self.age_btn = QPushButton('Age', self)
self.score_btn = QPushButton('Score', self)
self.info_btn = QPushButton('Info', self)
self.name_btn.clicked.connect(lambda: self.open_dialog_func(self.name_btn))
self.gender_btn.clicked.connect(lambda: self.open_dialog_func(self.gender_btn))
self.age_btn.clicked.connect(lambda: self.open_dialog_func(self.age_btn))
self.score_btn.clicked.connect(lambda: self.open_dialog_func(self.score_btn))
self.info_btn.clicked.connect(lambda: self.open_dialog_func(self.info_btn))
self.name_line = QLineEdit(self)
self.gender_line = QLineEdit(self)
self.age_line = QLineEdit(self)
self.score_line = QLineEdit(self)
self.info_textedit = QTextEdit(self)
self.g_layout = QGridLayout()
self.g_layout.addWidget(self.name_btn, 0, 0, 1, 1)
self.g_layout.addWidget(self.name_line, 0, 1, 1, 1)
self.g_layout.addWidget(self.gender_btn, 1, 0, 1, 1)
self.g_layout.addWidget(self.gender_line,1, 1, 1, 1)
self.g_layout.addWidget(self.age_btn, 2, 0, 1, 1)
self.g_layout.addWidget(self.age_line, 2, 1, 1, 1)
self.g_layout.addWidget(self.score_btn, 3, 0, 1, 1)
self.g_layout.addWidget(self.score_line, 3, 1, 1, 1)
self.g_layout.addWidget(self.info_btn, 4, 0, 1, 1)
self.g_layout.addWidget(self.info_textedit, 4, 1, 1, 1)
self.setLayout(self.g_layout)
def open_dialog_func(self, btn):
if btn == self.name_btn: # 1
name, ok = QInputDialog.getText(self, 'Name Input', 'Please enter the name:')
if ok:
self.name_line.setText(name)
elif btn == self.gender_btn: # 2
gender_list = ['Female', 'Male']
gender, ok = QInputDialog.getItem(self, 'Gender Input', 'Please choose the gender:', gender_list, 0, False)
if ok:
self.gender_line.setText(gender)
elif btn == self.age_btn:
age, ok = QInputDialog.getInt(self, 'Age Input', 'Please select the age:')
if ok:
self.age_line.setText(str(age))
elif btn == self.score_btn:
score, ok = QInputDialog.getDouble(self, 'Score Input', 'Please select the score:')
if ok:
self.score_line.setText(str(score))
else:
info, ok = QInputDialog.getMultiLineText(self, 'Info Input', 'Please enter the info:')
if ok:
self.info_textedit.setText(info)
if __name__ == '__main__':
app = QApplication(sys.argv)
demo = Demo()
demo.show()
sys.exit(app.exec_())
|
[
"946883098@qq.com"
] |
946883098@qq.com
|
963c21dcb3fda320cc53ce7e08d427b37c2d8aea
|
6a2b1b1d6092a8d2492a6677b6fd19d27b0f461f
|
/08-Python-DataTypes/Tuples/02-create-one-element-tuple.py
|
05fa335a53400e9ea8a0525d7b35a9f3a2482310
|
[] |
no_license
|
Uttam1982/PythonTutorial
|
3cfbe237199e048967502f3d0c1936f2b878cb87
|
8e28cc5c4be5826a011059db66f6952871248c82
|
refs/heads/master
| 2022-12-17T18:47:28.397383
| 2020-09-22T08:55:23
| 2020-09-22T08:55:23
| 288,524,784
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 535
|
py
|
# Creating a tuple with one element is a bit tricky.
# 1. Having one element within parentheses is not enough.
# 2. We will need a trailing comma to indicate that it is, in fact, a tuple.
my_tuple = ("python")
print("tuple without trailing comma: ",type(my_tuple)) # <class 'str'>
#Creating a tuple having one element
my_tuple = ("python",)
print("tuple with trailing comma: ",type(my_tuple)) # <class 'tuple'>
## Parentheses is optional
my_tuple = "python",
print("Parentheses is optional: ",type(my_tuple)) # <class 'tuple'>
|
[
"uttampat@gmail.com"
] |
uttampat@gmail.com
|
3667c8f1f8d45c41f552e9abe2d97e7838ac9395
|
ef187d259d33e97c7b9ed07dfbf065cec3e41f59
|
/work/atcoder/abc/abc083/B/answers/896331_rin1120.py
|
b9ceb3808d88e51ac154a5487becbe592cfa4936
|
[] |
no_license
|
kjnh10/pcw
|
847f7295ea3174490485ffe14ce4cdea0931c032
|
8f677701bce15517fb9362cc5b596644da62dca8
|
refs/heads/master
| 2020-03-18T09:54:23.442772
| 2018-07-19T00:26:09
| 2018-07-19T00:26:09
| 134,586,379
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 172
|
py
|
N, A, B = map(int, input().split())
ans=0
for i in range(N):
t = 0
j = i+1
while j != 0:
t += j%10
j //= 10
if A <= t and t <= B:
ans += i+1
print(ans)
|
[
"kojinho10@gmail.com"
] |
kojinho10@gmail.com
|
89cd4ca057d69b4c1e05d0a821256293352b855f
|
4a8c1f7d9935609b780aff95c886ef7781967be0
|
/atcoder/_codeforces/1467_c.py
|
a74cb1c4230efe5766c5cfc9695586b0a0b3e910
|
[] |
no_license
|
recuraki/PythonJunkTest
|
d5e5f5957ac5dd0c539ef47759b1fe5ef7a2c52a
|
2556c973d468a6988d307ce85c5f2f8ab15e759a
|
refs/heads/master
| 2023-08-09T17:42:21.875768
| 2023-07-18T23:06:31
| 2023-07-18T23:06:31
| 13,790,016
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,557
|
py
|
import sys
from io import StringIO
import unittest
import logging
logging.basicConfig(level=logging.DEBUG)
def resolve():
from pprint import pprint
import sys
input = sys.stdin.readline
def do():
n1, n2, n3 = map(int, input().split())
dat1 = list(map(int, input().split()))
dat2 = list(map(int, input().split()))
dat3 = list(map(int, input().split()))
q = int(input())
for _ in range(q):
do()
# do()
class TestClass(unittest.TestCase):
def assertIO(self, input, output):
stdout, stdin = sys.stdout, sys.stdin
sys.stdout, sys.stdin = StringIO(), StringIO(input)
resolve()
sys.stdout.seek(0)
out = sys.stdout.read()[:-1]
sys.stdout, sys.stdin = stdout, stdin
self.assertEqual(out, output)
def test_input_1(self):
print("test_input_1")
input = """2 4 1
1 2
6 3 4 5
5"""
output = """20"""
self.assertIO(input, output)
def test_input_2(self):
print("test_input_2")
input = """3 2 2
7 5 4
2 9
7 1"""
output = """29"""
self.assertIO(input, output)
def test_input_3(self):
print("test_input_3")
input = """xxx"""
output = """xxx"""
self.assertIO(input, output)
def test_input_4(self):
print("test_input_4")
input = """xxx"""
output = """xxx"""
self.assertIO(input, output)
if __name__ == "__main__":
unittest.main()
|
[
"kanai@wide.ad.jp"
] |
kanai@wide.ad.jp
|
adbeff76935cbd7b2290404a3caf4ecbd26075b6
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_motion.py
|
19c9b83026d27cb66cfde07dd09ad7733cf6dde8
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 527
|
py
|
#calss header
class _MOTION():
def __init__(self,):
self.name = "MOTION"
self.definitions = [u'the act or process of moving, or a particular action or movement: ', u'a polite way of referring to the process of getting rid of solid waste from the body, or the waste itself: ', u'a formal suggestion made, discussed, and voted on at a meeting: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
26d639c4fbab5876c769b3ea6ae7da455fd84403
|
1f7847055332e16614f5358f0ec39b39bb9a66a7
|
/exercises/12_oop_inheritance/test_task_12_4.py
|
ec440e80a177b6ac47dabd01f370487663a50659
|
[] |
no_license
|
satperm/advpyneng-examples-exercises
|
6641dae31fa7f44db7e99547bc70d740988f21b9
|
6b12c320cace1d303dae38ddba9b19550a8708ec
|
refs/heads/master
| 2022-12-14T09:28:48.255804
| 2020-09-06T14:14:42
| 2020-09-06T14:14:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,911
|
py
|
import pytest
import task_12_4
import sys
sys.path.append('..')
from common_functions import check_class_exists, check_attr_or_method
def test_class_created():
check_class_exists(task_12_4, 'OrderingMixin')
def test_special_methods_created():
class IntTest(task_12_4.OrderingMixin):
def __init__(self, number):
self._number = number
def __eq__(self, other):
return self._number == other._number
def __lt__(self, other):
return self._number < other._number
int1 = IntTest(5)
check_attr_or_method(int1, method='__ge__')
check_attr_or_method(int1, method='__ne__')
check_attr_or_method(int1, method='__le__')
check_attr_or_method(int1, method='__gt__')
def test_methods():
class IntTest(task_12_4.OrderingMixin):
def __init__(self, number):
self._number = number
def __eq__(self, other):
return self._number == other._number
def __lt__(self, other):
return self._number < other._number
int1 = IntTest(5)
int2 = IntTest(3)
assert int1 != int2
assert int1 >= int2
assert int1 > int2
assert not int1 < int2
def test_methods():
class DoThing(task_12_4.OrderingMixin):
def __init__(self, num):
self.num = num
def __eq__(self, other):
return self.num == other.num
def __lt__(self, other):
return self.num < other.num
small_num = DoThing(1)
big_num = DoThing(100)
assert small_num < big_num
assert small_num <= big_num
assert not small_num > big_num
assert not small_num >= big_num
assert small_num != big_num
small_num = DoThing(1)
big_num = DoThing(100)
assert not big_num < small_num
assert not big_num <= small_num
assert big_num > small_num
assert big_num >= small_num
assert big_num != small_num
|
[
"nataliya.samoylenko@gmail.com"
] |
nataliya.samoylenko@gmail.com
|
7ecbe0b308cb8371f7ee5198762f1a81ddafae19
|
fca80c6a22bcce507a81e05cd31e0d5ebbc43a57
|
/Chapter_05/samples/guestPicnic.py
|
cfe00f043c0535219fe766ef773df1d474944cd1
|
[
"MIT"
] |
permissive
|
GSantos23/Automate_Python
|
6b1ce29f1ee5a22b53ef6c1d45fef56d8d8e0b06
|
4bf3eadb5a330d5f22329bdcd08d37ab01a9454f
|
refs/heads/master
| 2021-06-29T04:12:32.910835
| 2020-12-26T22:28:31
| 2020-12-26T22:28:31
| 197,512,449
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 661
|
py
|
allGuests = {'Alice': {'apples': 5, 'pretzels': 12},
'Bob': {'ham sandwiches': 3, 'apples': 2},
'Carol': {'cups': 3, 'apple pies': 1}}
def totalBrought(guests, item):
numBrought = 0
for k, v in guests.items():
numBrought = numBrought + v.get(item, 0)
return numBrought
print('Number of things being brought:')
print(' - Apples ' + str(totalBrought(allGuests, 'apples')))
print(' - Cups ' + str(totalBrought(allGuests, 'cups')))
print(' - Cakes ' + str(totalBrought(allGuests, 'cakes')))
print(' - Ham sandwiches ' + str(totalBrought(allGuests, 'ham sandwiches')))
print(' - Apple Pies ' + str(totalBrought(allGuests, 'apple pies')))
|
[
"santosgerson64@gmail.com"
] |
santosgerson64@gmail.com
|
2b43980f401fc20884576fe5b39260203c3a7de9
|
ab79f8297105a7d412303a8b33eaa25038f38c0b
|
/imersia/vit_product/stock.py
|
d8ccbb76ecfe1daf893e0694292440f3b1ff45a0
|
[] |
no_license
|
adahra/addons
|
41a23cbea1e35079f7a9864ade3c32851ee2fb09
|
c5a5678379649ccdf57a9d55b09b30436428b430
|
refs/heads/master
| 2022-06-17T21:22:22.306787
| 2020-05-15T10:51:14
| 2020-05-15T10:51:14
| 264,167,002
| 1
| 0
| null | 2020-05-15T10:39:26
| 2020-05-15T10:39:26
| null |
UTF-8
|
Python
| false
| false
| 5,172
|
py
|
import time
from openerp.osv import fields
from openerp.osv import osv
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, float_compare
class StockMove(osv.osv):
_inherit = 'stock.move'
def _src_id_default(self, cr, uid, ids, context=None):
try:
location_model, location_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'location_production')
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
def _dest_id_default(self, cr, uid, ids, context=None):
try:
location_model, location_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock')
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
_columns = {
'custom_production_id': fields.many2one('mrp.production.custom', 'Production Order for Produced Products', select=True, copy=False),
'waste_qty':fields.float('Waste (%)'),
# 'raw_material_production_id': fields.many2one('mrp.production', 'Production Order for Raw Materials', select=True),
'consumed_for': fields.many2one('stock.move', 'Consumed for', help='Technical field used to make the traceability of produced products'),
}
def action_consume_custom(self, cr, uid, ids, product_qty, location_id=False, restrict_lot_id=False, restrict_partner_id=False,
consumed_for=False, context=None):
""" Consumed product with specific quantity from specific source location.
@param product_qty: Consumed/produced product quantity (= in quantity of UoM of product)
@param location_id: Source location
@param restrict_lot_id: optionnal parameter that allows to restrict the choice of quants on this specific lot
@param restrict_partner_id: optionnal parameter that allows to restrict the choice of quants to this specific partner
@param consumed_for: optionnal parameter given to this function to make the link between raw material consumed and produced product, for a better traceability
@return: New lines created if not everything was consumed for this line
"""
if context is None:
context = {}
res = []
production_obj = self.pool.get('mrp.production.custom')
if product_qty <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
#because of the action_confirm that can create extra moves in case of phantom bom, we need to make 2 loops
ids2 = []
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'draft':
ids2.extend(self.action_confirm(cr, uid, [move.id], context=context))
else:
ids2.append(move.id)
prod_orders = set()
for move in self.browse(cr, uid, ids2, context=context):
prod_orders.add(move.custom_production_id.id)
print"Total Qty>>>",product_qty
move_qty = product_qty
if move_qty <= 0.00:
raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))
quantity_rest = move_qty - product_qty
print"Rest Qty>>>",quantity_rest
# Compare with numbers of move uom as we want to avoid a split with 0 qty
quantity_rest_uom = move.product_uom_qty - self.pool.get("product.uom")._compute_qty_obj(cr, uid, move.product_id.uom_id, product_qty, move.product_uom)
if float_compare(quantity_rest_uom, 0, precision_rounding=move.product_uom.rounding) != 0:
new_mov = self.split(cr, uid, move, quantity_rest, context=context)
print"New Move>>>",new_mov
res.append(new_mov)
vals = {'restrict_lot_id': restrict_lot_id,
'restrict_partner_id': restrict_partner_id,
'consumed_for': consumed_for}
if location_id:
vals.update({'location_id': location_id})
self.write(cr, uid, [move.id], vals, context=context)
# Original moves will be the quantities consumed, so they need to be done
self.action_done(cr, uid, ids2, context=context)
if res:
self.action_assign(cr, uid, res, context=context)
if prod_orders:
production_obj.action_in_production(cr, uid, list(prod_orders), context=None)
#production_obj.signal_workflow(cr, uid, list(prod_orders), 'button_produce')
return res
_defaults = {
'location_id': _src_id_default,
'location_dest_id': _dest_id_default
}
|
[
"prog1@381544ba-743e-41a5-bf0d-221725b9d5af"
] |
prog1@381544ba-743e-41a5-bf0d-221725b9d5af
|
b6f6432a451ac396f4378d34ae642e68e475e1e3
|
82b946da326148a3c1c1f687f96c0da165bb2c15
|
/sdk/python/pulumi_azure_native/machinelearningservices/v20210101/get_aks_service.py
|
d436cd9c6fca64cd385942d716820e2980e1cc9c
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
morrell/pulumi-azure-native
|
3916e978382366607f3df0a669f24cb16293ff5e
|
cd3ba4b9cb08c5e1df7674c1c71695b80e443f08
|
refs/heads/master
| 2023-06-20T19:37:05.414924
| 2021-07-19T20:57:53
| 2021-07-19T20:57:53
| 387,815,163
| 0
| 0
|
Apache-2.0
| 2021-07-20T14:18:29
| 2021-07-20T14:18:28
| null |
UTF-8
|
Python
| false
| false
| 5,922
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetAKSServiceResult',
'AwaitableGetAKSServiceResult',
'get_aks_service',
]
@pulumi.output_type
class GetAKSServiceResult:
"""
Machine Learning service object wrapped into ARM resource envelope.
"""
def __init__(__self__, id=None, identity=None, location=None, name=None, properties=None, sku=None, system_data=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Specifies the resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.IdentityResponse']:
"""
The identity of the resource.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Specifies the location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Specifies the name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> Any:
"""
Service properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
"""
The sku of the workspace.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Read only system data
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Contains resource tags defined as key/value pairs.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Specifies the type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetAKSServiceResult(GetAKSServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAKSServiceResult(
id=self.id,
identity=self.identity,
location=self.location,
name=self.name,
properties=self.properties,
sku=self.sku,
system_data=self.system_data,
tags=self.tags,
type=self.type)
def get_aks_service(expand: Optional[bool] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAKSServiceResult:
"""
Machine Learning service object wrapped into ARM resource envelope.
:param bool expand: Set to True to include Model details.
:param str resource_group_name: Name of the resource group in which workspace is located.
:param str service_name: Name of the Azure Machine Learning service.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
__args__ = dict()
__args__['expand'] = expand
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:machinelearningservices/v20210101:getAKSService', __args__, opts=opts, typ=GetAKSServiceResult).value
return AwaitableGetAKSServiceResult(
id=__ret__.id,
identity=__ret__.identity,
location=__ret__.location,
name=__ret__.name,
properties=__ret__.properties,
sku=__ret__.sku,
system_data=__ret__.system_data,
tags=__ret__.tags,
type=__ret__.type)
|
[
"noreply@github.com"
] |
morrell.noreply@github.com
|
cd9bb1eb10be89931f7564472027e88621ad041e
|
8143bfdbda6fdbef40bc570f48773edd365fcb62
|
/project/Kyb-TestProject/businessView/loginView.py
|
895c338f89df61341cca470210d9b35b905c1f74
|
[] |
no_license
|
CaptainJi/Kyb-TestProject
|
199caef0f1e58d6bb45273114596daf6ebdc424c
|
38d200d4d8436d4ad699682c3606f035446093cc
|
refs/heads/master
| 2022-10-16T15:36:20.499879
| 2020-06-06T07:06:22
| 2020-06-06T07:06:22
| 259,554,471
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,798
|
py
|
import logging
from common.commonFun import Common, NoSuchElementException
from common.desiredCaps import appium_desired
from selenium.webdriver.common.by import By
# 封装登录业务逻辑类
class LoginView(Common):
# 获取用户名、密码输入框元素
username_type = (By.ID, 'com.tal.kaoyan:id/login_email_edittext')
password_type = (By.ID, 'com.tal.kaoyan:id/login_password_edittext')
# 获取登录按钮元素
loginBtn = (By.ID, 'com.tal.kaoyan:id/login_login_btn')
tip_commit = (By.ID, 'com.tal.kaoyan:id/tip_commit')
# 获取“我的”按钮元素
button_mysefl = (By.ID, 'com.tal.kaoyan:id/mainactivity_button_mysefl')
usercenter_username = (By.ID, 'com.tal.kaoyan:id/activity_usercenter_username')
right_button = (By.ID, 'com.tal.kaoyan:id/myapptitle_RightButton_textview')
# 获取退出元素
logout = (By.ID, 'com.tal.kaoyan:id/setting_logout_text')
def login_action(self, username, password):
# 取消升级
self.check_cancel_btn()
# 跳过
self.check_skipBtn()
logging.info('开始登录')
logging.info('用户名:%s' % username)
self.driver.find_element(*self.username_type).send_keys(username)
logging.info('密码:%s' % password)
self.driver.find_element(*self.password_type).send_keys(password)
logging.info('点击登录按钮')
self.driver.find_element(*self.loginBtn).click()
def check_account_alert(self):
logging.info('检查登录警告信息')
try:
element = self.driver.find_element(*self.tip_commit)
except NoSuchElementException:
pass
else:
logging.info('跳过登录警告信息')
element.click()
def check_login_status(self):
logging.info('检查登录状态')
self.check_market_ad()
self.check_account_alert()
try:
self.driver.find_element(*self.button_mysefl).click()
self.driver.find_element(*self.usercenter_username)
except NoSuchElementException:
logging.error('登陆失败')
self.getScreenShot('登陆失败')
return False
else:
logging.info('登陆成功')
self.getScreenShot('登陆成功')
self.logout_action()
return True
def logout_action(self):
logging.info('退出登录')
self.driver.find_element(*self.right_button).click()
self.driver.find_element(*self.logout).click()
self.driver.find_element(*self.tip_commit).click()
if __name__ == '__main__':
driver = appium_desired()
l = LoginView(driver)
l.check_cancel_btn()
l.check_skipBtn()
l.login_action('', '')
l.check_login_status()
|
[
"jiqing19861123@163.com"
] |
jiqing19861123@163.com
|
9deed2e10501ba1a8d6f3c0f052412d7cbb1bb3d
|
dd097c7ae744227b0312d762ee0482a3380ff8c6
|
/plot_tg.py
|
9f751bebfaa64b4b76be445e5325e06e65df06b0
|
[] |
no_license
|
moflaher/workspace_python
|
0d6e98274d923a721db2b345f65c20b02ca59d08
|
6551e3602ead3373eafce10d11ce7b96bdcb106f
|
refs/heads/master
| 2023-03-06T02:15:01.945481
| 2023-03-01T19:15:51
| 2023-03-01T19:15:51
| 20,814,932
| 3
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,079
|
py
|
from __future__ import division,print_function
import matplotlib as mpl
import scipy as sp
from folderpath import *
from datatools import *
from gridtools import *
from plottools import *
from projtools import *
from stattools import *
import interptools as ipt
import matplotlib.tri as mplt
import matplotlib.pyplot as plt
#from mpl_toolkits.basemap import Basemap
import os as os
import sys
np.set_printoptions(precision=8,suppress=True,threshold=sys.maxsize)
import pandas as pd
import netCDF4 as n4
import copy
import matplotlib.dates as dates
import argparse
try:
import ttide
tide=True
except:
print('No ttide')
tide=False
parser = argparse.ArgumentParser()
parser.add_argument("grid", help="name of the grid", type=str)
parser.add_argument("name", help="name of the run", type=str,default=None, nargs='?')
parser.add_argument("--station", help="switch to station output instead of fvcom output", default=False,action='store_true')
parser.add_argument("-dates", help="specify start and end date",type=str,nargs=2,default=None)
parser.add_argument("-snr", help="signal to noise ratio value used for constituent cutoff", type=float,default=2.0)
parser.add_argument("-skipdays", help="number of days to skip at start of timeseries", type=float,default=14.0)
args = parser.parse_args()
print("The current commandline arguments being used are")
print(args)
name=args.name
grid=args.grid
if args.station:
tag='station'
else:
tag='fvcom'
# find tg ncfiles
months = dates.MonthLocator()
monthsFmt = dates.DateFormatter('%b')
savepath='{}png/{}/tg/{}/'.format(figpath,grid,name)
if not os.path.exists(savepath): os.makedirs(savepath)
savepath2='{}png/{}/tg/{}/csv/'.format(figpath,grid,name)
if not os.path.exists(savepath2): os.makedirs(savepath2)
inpath='{}{}/tg/{}/'.format(datapath,grid,name)
filenames=glob.glob('{}tg_*_{}.nc'.format(inpath,tag))
filenames.sort()
#tg_*.nc'.format(obspath)
for i,filename in enumerate(filenames):
print('='*80)
print(i)
print(filename)
tgm = loadnc('',filename,False)
tgo = loadnc('{}east/all/'.format(obspath),'tg_{:05d}.nc'.format(tgm['tgnumber'][0]),False)
if args.dates is not None:
din=dates.datestr2num(args.dates)
figstr='{}{}_{}_tg_{:05d}_{}_to_{}.png'.format(savepath,grid,name,tgm['tgnumber'][0],args.dates[0],args.dates[1])
figstr2='{}{}_{}_tg_{:05d}_residual_{}_to_{}.png'.format(savepath,grid,name,tgm['tgnumber'][0],args.dates[0],args.dates[1])
figstr3='{}{}_{}_tg_{:05d}_{}_to_{}'.format(savepath2,grid,name,tgm['tgnumber'][0],args.dates[0],args.dates[1])
else:
din=np.array([tgm['time'][0]+args.skipdays,tgm['time'][-1]])
figstr='{}{}_{}_tg_{:05d}.png'.format(savepath,grid,name,tgm['tgnumber'][0])
figstr2='{}{}_{}_tg_{:05d}_residual.png'.format(savepath,grid,name,tgm['tgnumber'][0])
figstr3='{}{}_{}_tg_{:05d}'.format(savepath2,grid,name,tgm['tgnumber'][0])
idx=np.argwhere((tgo['time']>=din[0]) & (tgo['time']<=din[1]))
idx=np.ravel(idx)
time1,data1,data2=interp_clean_common(tgo['time'][idx],tgo['zeta'][idx],tgm['time'],tgm['zeta'],500,-500)
stats=residual_stats(data2-np.mean(data2), data1-np.mean(data1))
a=pd.DataFrame(stats,index=[0]).round(2).T[0]
f=plt.figure(figsize=(15,5));
ax=f.add_axes([.125,.1,.775,.8]);
ax.plot(time1,data1-np.mean(data1),'k',label='TG: {:05d}'.format(tgm['tgnumber'][0]))
ax.plot(time1,data2-np.mean(data2),'r',lw=.5,label='{}'.format(name))
ax.xaxis.set_major_locator(months)
ax.xaxis.set_major_formatter(monthsFmt)
ax.legend()
ax.set_ylabel('Elevation (m)')
f.suptitle('Removed TG means - Obs: {} Model: {}\n Bias: {} Std: {} RMSE: {} RAE: {} Corr: {} Skew: {} Skill: {}'.format(np.mean(data1),np.mean(data2),a[0],a[1],a[2],a[3],a[4],a[5],a[6]))
f.savefig(figstr,dpi=600)
if tide:
time=np.arange(time1[0],time1[-1]+1/24.0,1/24.0)
tgm_int=ipt.interp1d(tgm['time'],tgm['zeta'],time)
tgonan=tgo['zeta'][idx]
tgonan[tgonan>500]=np.nan
tgo_int=ipt.interp1d(tgo['time'][idx],tgonan,time)
tgm_tcon_pre=ttide.t_tide(tgm_int,stime=time[0],lat=tgm['lat'],dt=(time[1]-time[0])*24.0,out_style=None)
tgo_tcon_pre=ttide.t_tide(tgo_int,stime=time[0],lat=tgm['lat'],dt=(time[1]-time[0])*24.0,out_style=None)
tgm_tcon=ttide.t_tide(tgm_int,stime=time[0],lat=tgm['lat'],dt=(time[1]-time[0])*24.0,constitnames=tgm_tcon_pre['nameu'][tgm_tcon_pre['snr']>=args.snr],out_style=None)
tgo_tcon=ttide.t_tide(tgo_int,stime=time[0],lat=tgm['lat'],dt=(time[1]-time[0])*24.0,constitnames=tgo_tcon_pre['nameu'][tgo_tcon_pre['snr']>=args.snr],out_style=None)
f=plt.figure(figsize=(15,5));
ax=f.add_axes([.125,.1,.775,.8]);
ax.plot(time[:len(tgo_tcon['xres'])],tgo_tcon['xres']-np.nanmean(tgo_tcon['xres']),'k',label='TG: {:05d}'.format(tgm['tgnumber'][0]))
ax.plot(time[:len(tgm_tcon['xres'])],tgm_tcon['xres']-np.nanmean(tgm_tcon['xres']),'r',lw=.5,label='{}'.format(name))
ax.xaxis.set_major_locator(months)
ax.xaxis.set_major_formatter(monthsFmt)
ax.legend()
ax.set_ylabel('Residual Elevation (m)')
o,m=remove_common_nan(tgo_tcon['xres']-np.nanmean(tgo_tcon['xres']), tgm_tcon['xres']-np.nanmean(tgm_tcon['xres']))
stats=residual_stats(o,m)
a=pd.DataFrame(stats,index=[0]).round(2).T[0]
f.suptitle('Removed TG means - Obs: {} Model: {}\n Bias: {} Std: {} RMSE: {} RAE: {} Corr: {} Skew: {} Skill: {}'.format(np.nanmean(tgo_tcon['xres']),np.nanmean(tgm_tcon['xres']),a[0],a[1],a[2],a[3],a[4],a[5],a[6]))
f.savefig(figstr2,dpi=600)
df=pd.DataFrame(tgm_tcon['tidecon'],columns=['Amp','AmpE','Phase','PhaseE'],index=tgm_tcon['nameu']).round(2).sort_values('Amp',ascending=False)
df.to_csv('{}_model_full.csv'.format(figstr3))
df=pd.DataFrame(tgo_tcon['tidecon'],columns=['Amp','AmpE','Phase','PhaseE'],index=tgo_tcon['nameu']).round(2).sort_values('Amp',ascending=False)
df.to_csv('{}_obs_full.csv'.format(figstr3))
namesm=tgm_tcon['nameu']
cnames=np.array([])
for namea in namesm:
if namea in tgo_tcon['nameu']:
cnames=np.append(cnames,namea)
oidx=np.in1d(tgo_tcon['nameu'],cnames)
midx=np.in1d(tgm_tcon['nameu'],cnames)
diff=np.vstack([tgo_tcon['tidecon'][oidx,0],tgm_tcon['tidecon'][midx,0],tgo_tcon['tidecon'][oidx,0]-tgm_tcon['tidecon'][midx,0],
tgo_tcon['tidecon'][oidx,2],tgm_tcon['tidecon'][midx,2],tgo_tcon['tidecon'][oidx,2]-tgm_tcon['tidecon'][midx,2]]).T
df=pd.DataFrame(diff,columns=['AmpObs','AmpMod','AmpDiff','PhaseObs','PhaseMod','PhaseDiff'],index=cnames).round(2).sort_values('AmpObs',ascending=False)
df.to_csv('{}_obsmod_common_diff.csv'.format(figstr3))
#kill
|
[
"073208o@acadiau.ca"
] |
073208o@acadiau.ca
|
4c3ab23c18f9d4491755f6abf41148a2ed42fc82
|
c4702d1a06640555829b367852138cc93ba4a161
|
/dym_bank_trf_request/wizard/bank_trf_advice_group_old.py
|
6ef06d317a4c2cef785790f379608629ac9eeabb
|
[] |
no_license
|
Rizalimami/dym
|
0ecadf9c049b22ebfebf92e4eab6eaad17dd3e26
|
af1bcf7b77a3212bc8a8a0e41e6042a134587ed4
|
refs/heads/master
| 2020-04-08T10:56:43.605698
| 2018-11-27T06:44:08
| 2018-11-27T06:44:08
| 159,287,876
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,459
|
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class BankTrfRequestGroup(osv.osv_memory):
_name = "bank.trf.request.group"
_description = "Bank Transfer Request Grup"
def fields_view_get(self, cr, uid, view_id=None, view_type='form',
context=None, toolbar=False, submenu=False):
"""
Changes the view dynamically
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return: New arch of view.
"""
if context is None:
context={}
res = super(BankTrfRequestGroup, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False)
if context.get('active_model','') == 'bank.trf.request' and len(context['active_ids']) < 2:
raise osv.except_osv(_('Warning!'),
_('Please select multiple order to merge in the list view.'))
return res
def merge_trf_requests(self, cr, uid, ids, context=None):
"""
To merge similar type of purchase orders.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: the ID or list of IDs
@param context: A standard dictionary
@return: purchase order view
"""
trf_req_obj = self.pool.get('bank.trf.request')
# proc_obj = self.pool.get('procurement.order')
mod_obj =self.pool.get('ir.model.data')
if context is None:
context = {}
result = mod_obj._get_id(cr, uid, 'dym_bank_trf_request', 'bank_trf_request_search_view')
id = mod_obj.read(cr, uid, result, ['res_id'])
# allorders = trf_req_obj.do_merge(cr, uid, context.get('active_ids',[]), context)
allorders = []
return {
'domain': "[('id','in', [" + ','.join(map(str, allorders.keys())) + "])]",
'name': _('Bank Transfer Request Group'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'bank.trf.request',
'view_id': False,
'type': 'ir.actions.act_window',
'search_view_id': id['res_id']
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"rizal@portcities.net"
] |
rizal@portcities.net
|
4b1c156a5fbd8b1083a31472220fdd8c0b7d4e3a
|
cc6e1cce2f0d7fa8eb16f2dc3e90d60575aeac66
|
/uploader/models.py
|
1671ef86d98332e6ced4177a5d9084b8f038ada0
|
[] |
no_license
|
andysitu/p_site
|
84bd0fa600593a91ea9f67ca9460e0fa4b633049
|
257386bdf792ea867dbbd9905c7245695ab55a6b
|
refs/heads/master
| 2023-06-21T16:30:21.423414
| 2019-06-26T19:21:56
| 2019-06-26T19:21:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,356
|
py
|
from django.db import models
import django, os
from django.db.models.signals import pre_delete
from django.dispatch.dispatcher import receiver
from django.conf import settings
from django.contrib.auth.models import User
class UFileManager(models.Model):
name = models.CharField(max_length = 50)
count = models.IntegerField(default=0)
class UFile(models.Model):
filename = models.CharField(max_length=50)
uploaded_date = models.DateTimeField(default=django.utils.timezone.now)
file_manager = models.ForeignKey(UFileManager, on_delete=models.CASCADE)
file_extensions = models.CharField(max_length=10, default=".txt")
def __str__(self):
return self.filename
def get_filepath(self):
folder_name = str(self.file_manager.id)
filepath = os.path.join(settings.MEDIA_ROOT, "uploader", folder_name, str(self.id) + self.file_extensions)
return filepath
@receiver(pre_delete, sender=User)
def delete_file(sender, instance, using, **kwargs):
print("HI")
try:
filepath = instance.get_filepath()
os.remove(filepath)
console.log("removed file")
except FileNotFoundError:
pass
class Note(models.Model):
text = models.TextField(max_length=200)
file_manager = models.ForeignKey(UFileManager, on_delete=models.CASCADE)
|
[
"and.situ@gmail.com"
] |
and.situ@gmail.com
|
278010849f6c888e86cd9237c60ee0f61c668fd9
|
9bd687b5454ca7d2b4deb0e149ec7023b2f3b89e
|
/ebikes/lora/rfm/ll/__init__.py
|
3ca2c4bfa45a8a729a51f15a13480468039889bb
|
[] |
no_license
|
AlbertoFDR/EBikes-IoT
|
57132ff8b059b6d2e5185e241afe7720f96b667f
|
cd5da02d96ccedb57a9fd3e76d4430a11fd4f4fd
|
refs/heads/master
| 2022-12-10T14:02:22.468032
| 2020-02-11T12:37:59
| 2020-02-11T12:37:59
| 225,611,209
| 3
| 0
| null | 2021-06-02T00:45:47
| 2019-12-03T12:11:18
|
Python
|
UTF-8
|
Python
| false
| false
| 760
|
py
|
"""
LinkLayer submodule
"""
__author__ = """Alexander Krause <alexander.krause@ed-solutions.de>"""
__date__ = "2016-12-28"
__version__ = "0.1.0"
__license__ = "GPL"
class Prototype:
conf = None
PL = None
def __init__(self, cfg, pl):
self.conf = cfg
self.PL = pl
self.PL.setIRQH(self._handleIRQ)
self.postInit()
def _handleIRQ(self):
pass
def get(conf, pl=None):
"""
get a new LinkLayer instance, depending on config
if a PhysicalLayer is given, it's added to the LinkLayer
"""
if conf["type"] in ["rfm9x", "rfm95", "rfm96", "rfm97", "rfm98"]:
from .ll_rfm9x import LinkLayer
else:
print("unsupported type")
return None
return LinkLayer(conf, pl)
|
[
"aratzml@opendeusto.es"
] |
aratzml@opendeusto.es
|
02c511b60cde23f482f156867d34247a278e9f14
|
78ed388a01610359d4554efa046e473a008ba1ae
|
/hdlConvertorAst/translate/verilog_to_basic_hdl_sim_model.py
|
0e08a141d10a01eeda1d6dc9d129bc04cf50cc7b
|
[
"MIT"
] |
permissive
|
mewais/hdlConvertorAst
|
f9ad85cfb2804c52a1b90642f4c9cede2ce2d3e6
|
64c8c1deee923ffae17e70e0fb1ad763cb69608c
|
refs/heads/master
| 2022-12-09T12:01:23.150348
| 2020-09-06T04:10:15
| 2020-09-06T04:15:38
| 293,200,130
| 0
| 0
|
MIT
| 2020-09-06T04:03:17
| 2020-09-06T04:03:17
| null |
UTF-8
|
Python
| false
| false
| 2,050
|
py
|
from hdlConvertorAst.translate._verilog_to_basic_hdl_sim_model.\
add_unique_labels_to_all_processes import AddUniqueLabelsToAllProcesses
from hdlConvertorAst.translate._verilog_to_basic_hdl_sim_model\
.verilog_types_to_basic_hdl_sim_model import VerilogTypesToBasicHdlSimModel
from hdlConvertorAst.translate._verilog_to_basic_hdl_sim_model\
.wrap_module_statements_to_processes import wrap_module_statements_to_processes
from hdlConvertorAst.translate.common.discover_declarations import DiscoverDeclarations
from hdlConvertorAst.translate.vhdl_to_verilog import link_module_dec_def
from hdlConvertorAst.translate.common.name_scope import NameScope
from hdlConvertorAst.translate.common.resolve_names import ResolveNames
from hdlConvertorAst.translate._verilog_to_basic_hdl_sim_model\
.discover_stm_outputs import discover_stm_outputs_context
from hdlConvertorAst.translate._verilog_to_basic_hdl_sim_model\
.verilog_operands_to_basic_hdl_sim_model import BasicHdlSimModelTranslateVerilogOperands
from hdlConvertorAst.translate._verilog_to_basic_hdl_sim_model\
.assignment_to_update_assignment import AssignmentToUpdateAssignment
from hdlConvertorAst.translate._verilog_to_basic_hdl_sim_model.apply_io_scope_to_signal_names import ApplyIoScopeToSignalNames
def verilog_to_basic_hdl_sim_model(context):
"""
:type context: HdlContext
"""
link_module_dec_def(context)
name_scope = NameScope.make_top(False)
DiscoverDeclarations(name_scope).visit_HdlContext(context)
ResolveNames(name_scope).visit_HdlContext(context)
wrap_module_statements_to_processes(context)
BasicHdlSimModelTranslateVerilogOperands().visit_HdlContext(context)
VerilogTypesToBasicHdlSimModel().visit_HdlContext(context)
stm_outputs = discover_stm_outputs_context(context)
AddUniqueLabelsToAllProcesses(name_scope, stm_outputs).context(context)
AssignmentToUpdateAssignment().visit_HdlContext(context)
ApplyIoScopeToSignalNames().visit_HdlContext(context)
return context, stm_outputs, name_scope
|
[
"nic30@seznam.cz"
] |
nic30@seznam.cz
|
889561373222e776f285c46bed462a03db1dce83
|
d5f8ca3c13f681d147b7614f1902df7ba34e06f9
|
/CelebA/main.py
|
1a920e6f5ac9b064598be6c2ab89096536d2adde
|
[] |
no_license
|
hhjung1202/OwnAdaptation
|
29a6c0a603ab9233baf293096fb9e7e956647a10
|
50805730254419f090f4854387be79648a01fbb4
|
refs/heads/master
| 2021-06-25T22:31:15.437642
| 2020-11-26T18:19:55
| 2020-11-26T18:19:55
| 176,670,379
| 1
| 0
| null | 2020-06-11T07:35:55
| 2019-03-20T06:36:19
|
Python
|
UTF-8
|
Python
| false
| false
| 6,344
|
py
|
import argparse
import torch
from torch.autograd import Variable
from torchvision.utils import save_image
import numpy as np
from model import *
import os
import torch.backends.cudnn as cudnn
import time
import utils
import dataset
import math
parser = argparse.ArgumentParser(description='PyTorch Cycle Domain Adaptation Training')
parser.add_argument('--sd', default='CelebA', type=str, help='source dataset')
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', help='number of data loading workers (default: 4)')
parser.add_argument('--epoch', default=164, type=int, metavar='N', help='number of total epoch to run')
parser.add_argument('--decay-epoch', default=30, type=int, metavar='N', help='epoch from which to start lr decay')
parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)')
parser.add_argument('-b', '--batch-size', default=128, type=int, metavar='N', help='mini-batch size (default: 256)')
parser.add_argument('--lr', '--learning-rate', default=1e-2, type=float, metavar='LR', help='initial learning rate')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M', help='momentum')
parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float, metavar='W', help='weight decay (default: 1e-4)')
parser.add_argument('--img-size', type=int, default=32, help='input image width, height size')
parser.add_argument('--dir', default='./', type=str, help='default save directory')
parser.add_argument('--gpu', default='0', type=str, help='Multi GPU ids to use.')
best_prec_result = torch.tensor(0, dtype=torch.float32)
args = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
torch.manual_seed(args.seed)
cuda = True if torch.cuda.is_available() else False
FloatTensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if cuda else torch.LongTensor
criterion = torch.nn.CrossEntropyLoss()
def main():
global args, best_prec_result
start_epoch = 0
utils.default_model_dir = args.dir
start_time = time.time()
train_loader, test_loader = dataset_selector(args.sd)
state_info = utils.model_optim_state_info()
state_info.model_init(args=args, num_class=4000)
state_info.model_cuda_init()
state_info.weight_init()
state_info.optimizer_init(args)
if cuda:
print("USE", torch.cuda.device_count(), "GPUs!")
cudnn.benchmark = True
checkpoint = utils.load_checkpoint(utils.default_model_dir, is_last=True)
if checkpoint:
start_epoch = checkpoint['epoch'] + 1
best_prec_result = checkpoint['Best_Prec']
state_info.load_state_dict(checkpoint)
for epoch in range(0, args.epoch):
if epoch < 80:
lr = args.lr
elif epoch < 122:
lr = args.lr * 0.1
else:
lr = args.lr * 0.01
for param_group in state_info.optimizer.param_groups:
param_group['lr'] = lr
train(state_info, train_loader, epoch)
prec_result = test(state_info, test_loader, epoch)
if prec_result > best_prec_result:
best_prec_result = prec_result
filename = 'checkpoint_best.pth.tar'
utils.save_state_checkpoint(state_info, best_prec_result, filename, utils.default_model_dir, epoch)
utils.print_log('Best Prec : {:.4f}'.format(best_prec_result.item()))
filename = 'latest.pth.tar'
utils.save_state_checkpoint(state_info, best_prec_result, filename, utils.default_model_dir, epoch)
now = time.gmtime(time.time() - start_time)
utils.print_log('Best Prec : {:.4f}'.format(best_prec_result.item()))
utils.print_log('{} hours {} mins {} secs for training'.format(now.tm_hour, now.tm_min, now.tm_sec))
print('done')
def train(state_info, train_loader, epoch): # all
utils.print_log('Type, Epoch, Batch, loss, total_loss, Percent')
state_info.set_train_mode()
correct = torch.tensor(0, dtype=torch.float32)
total = torch.tensor(0, dtype=torch.float32)
train_loss = 0
for it, [x, y] in enumerate(train_loader):
x, y = to_var(x, FloatTensor), to_var(y, LongTensor)
output = state_info.forward(x)
# Train
state_info.optimizer.zero_grad()
loss = criterion(output, y)
loss.backward()
state_info.optimizer.step()
# Log Print
train_loss += loss.data.item()
total += float(y.size(0))
_, predicted = torch.max(output.data, 1)
correct += float(predicted.eq(y.data).cpu().sum())
if it % 10 == 0:
utils.print_log('Train, {}, {}, {:.6f}, {:.4f}, {:.2f}'
.format(epoch, it, loss.item(), train_loss, 100.*correct / total))
print('Train, {}, {}, {:.6f}, {:.4f}, {:.2f}'
.format(epoch, it, loss.item(), train_loss, 100.*correct / total))
utils.print_log('')
def test(state_info, test_loader, epoch):
utils.print_log('Type, Epoch, Acc')
state_info.set_test_mode()
correct = torch.tensor(0, dtype=torch.float32)
total = torch.tensor(0, dtype=torch.float32)
for it, [x, y] in enumerate(test_loader):
x, y = to_var(x, FloatTensor), to_var(y, LongTensor)
output = state_info.forward(x)
# Log Print
total += float(y.size(0))
_, predicted = torch.max(output.data, 1)
correct += float(predicted.eq(y.data).cpu().sum())
utils.print_log('Test, {}, {:.2f}'.format(epoch, 100.*correct / total))
print('Test, {}, {:.2f}'.format(epoch, 100.*correct / total))
utils.print_log('')
return 100.*correct / total
def dataset_selector(data):
if data == 'mnist':
return dataset.MNIST_loader(img_size=args.img_size)
elif data == 'svhn':
return dataset.SVHN_loader(img_size=32)
elif data == "usps":
return dataset.usps_loader(img_size=args.img_size)
elif data == "mnistm":
return dataset.MNIST_M_loader(img_size=args.img_size)
elif data == "cifar10":
return dataset.cifar10_loader(args)
elif data == "CelebA":
return dataset.CelebA_loader(image_size=args.img_size, batch_size=args.batch_size)
def to_var(x, dtype):
return Variable(x.type(dtype))
if __name__=='__main__':
main()
|
[
"hhjung1202@naver.com"
] |
hhjung1202@naver.com
|
d0dd8c0f79d16b37610b0f645641720c3a87dc5b
|
347c70d4851b568e03e83387f77ae81071ab739e
|
/fn_splunk_integration/tests/test_function_utils.py
|
0c9e09a2c07c9c3e0f49d16aed5e0ed0666a3c55
|
[
"MIT"
] |
permissive
|
neetinkandhare/resilient-community-apps
|
59d276b5fb7a92872143ce2b94edd680738693ce
|
3ecdabe6bf2fc08f0f8e58cbe92553270d8da42f
|
refs/heads/master
| 2021-12-27T09:05:36.563404
| 2021-09-29T13:04:56
| 2021-09-29T13:04:56
| 159,804,866
| 1
| 0
|
MIT
| 2021-08-03T19:45:45
| 2018-11-30T10:07:32
|
Python
|
UTF-8
|
Python
| false
| false
| 1,628
|
py
|
#
# Unit tests for fn_splunk_integration/components/function_utils.py
#
# 100% code coverage
#
#
import unittest
import sys
sys.path.append("../fn_splunk_integration/util")
sys.path.append("fn_splunk_integration/util")
from function_utils import make_query_string
from function_utils import make_item_dict
from function_utils import ItemDataError
def test_query_string():
print("Testing query string substitution....")
input_string = "index = %param1% source=%param2% AND %param3%=%param4%"
params = ["_internal", "*splunkd*", "clientip", "127.0.0.1"]
query = make_query_string(input_string, params)
assert query == "index = _internal source=*splunkd* AND clientip=127.0.0.1"
def test_make_item_dict():
print("Testing make_item_dict")
params = ["field1", "value1",
"field2", "value2",
"field3", "value3"]
item_dict = make_item_dict(params)
assert item_dict["field1"] == "value1" and item_dict["field2"] == "value2" and item_dict["field3"] == "value3"
# Test wrong number of params
try:
make_item_dict(["p1","p2","p3"])
assert False
except ItemDataError:
assert True
# Test null key
try:
item_dict = make_item_dict(["p1", "p2",
None, "p4",
"p5", "p6"])
assert item_dict["p1"] == "p2" and item_dict["p5"] == "p6"
assert "p4" not in item_dict
except ItemDataError:
assert False
# Test null value
try:
item_dict = make_item_dict(["p1", None])
assert not item_dict["p1"]
except:
assert False
|
[
"hpyle@us.ibm.com"
] |
hpyle@us.ibm.com
|
d00dbd97b58fc1d1199f2fc36746e9223ddfeea0
|
39b0d9c6df77671f540c619aff170441f953202a
|
/default program/descriptor_method1.py
|
18d5b5dc9831d6210a3cfa6fd591f3a965cd7de1
|
[] |
no_license
|
yeboahd24/Python201
|
e7d65333f343d9978efff6bf86ce0447d3a40d70
|
484e66a52d4e706b8478473347732e23998c93c5
|
refs/heads/main
| 2023-02-06T10:24:25.429718
| 2020-12-26T01:08:04
| 2020-12-26T01:08:04
| 306,487,550
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 710
|
py
|
#!usr/bin/env/python3
class DescriptorClass(object):
"""All know that descriptor attributes should be in the class not the __init__
instance--> this is the instance of your class, so in this case test and test1 becomes our instance
owner--> this the name of class of the instance, ClientClass is now our owner here
"""
def __get__(self, instance, owner):
if instance is None: # don't forget to add this
return f"{self.__class__.__name__}.{owner.__name__}"
return f"value for {instance}"
class ClientClass(object):
descriptor = DescriptorClass()
test = ClientClass.descriptor # calling ClientClass directly
test1 = ClientClass().descriptor
print(test)
print(test1)
|
[
"noreply@github.com"
] |
yeboahd24.noreply@github.com
|
c5000324a37133b8e3e2bad62736b29664f711fd
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03659/s495769033.py
|
54ee8453e730f35341ffac0335267d937fc39396
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 224
|
py
|
import numpy as np
n = int(input())
a = np.array(list(map(int, input().split())))
cumsum_a = a.cumsum()
sum_a = cumsum_a[-1]
ans = 2 * 10**9
for i in range(n-1):
ans = min(ans, abs(sum_a - 2*cumsum_a[i]))
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
7e3abe5ff2836f61260cff4e091e0e15a6e5aa06
|
0966fc5e479f7dd86683fd2d961e44bb4f71a614
|
/splatify/views.py
|
8b506aeb27b9322d1943be7e2675565ce5510105
|
[] |
no_license
|
micnem/splatify2
|
112972616f6216598791df6b025c2de7be020281
|
a90328fbf79667ebe10a028a66c49334c840ae57
|
refs/heads/main
| 2023-02-10T11:20:48.570326
| 2021-01-06T14:14:08
| 2021-01-06T14:14:08
| 327,318,493
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 891
|
py
|
from django.shortcuts import render
from .spopulate import get_top_artists, create_playlist, match, main
from .models import *
def check_profile(profile):
if not profile.populated:
get_top_artists(profile)
def homepage(request):
return render(request, 'homepage.html')
def room(request):
check_profile(request.user.profile)
users = User.objects.all()
return render(request, 'room.html', {'users': users})
def show_top_artists(request):
return render(request,'top_artists.html')
def splat(request, user_id):
user2 = User.objects.get(id=user_id)
master_list = match([request.user, user2])
playlist_id = main(master_list, request.user.profile, user2)
return render(request, 'result.html', {'playlist_id':playlist_id})
def play(request, playlist_id):
return render(request, 'play.html', {'playlist_id':playlist_id})
|
[
"michael.nemni@gmail.com"
] |
michael.nemni@gmail.com
|
38739ea4cae572570555cd1043b6acf10436f45e
|
3eb4d64a8bb0bc240a2ef189724f4d51b5275eac
|
/heltour/tournament/migrations/0099_alternate_priority_date_override.py
|
863952e8d18f8da8a170a0aae4967d562e598879
|
[
"MIT"
] |
permissive
|
brucemubayiwa/heltour
|
c01cc88be7f86dce8246f619d7aa2da37e0e0ac2
|
fa4e9b06343acaf6a8a99337860e1ad433e68f6b
|
refs/heads/master
| 2021-01-23T19:59:04.099215
| 2017-09-06T03:34:31
| 2017-09-06T03:34:31
| 102,840,526
| 1
| 0
| null | 2017-09-08T08:53:30
| 2017-09-08T08:53:30
| null |
UTF-8
|
Python
| false
| false
| 482
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-09-19 01:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tournament', '0098_auto_20160916_1934'),
]
operations = [
migrations.AddField(
model_name='alternate',
name='priority_date_override',
field=models.DateTimeField(blank=True, null=True),
),
]
|
[
"ben.cyanfish@gmail.com"
] |
ben.cyanfish@gmail.com
|
c8e99972a246a077b466f45e66c23b688c79d040
|
ea373d1b4296d16eaa1355972cccd28eaa336871
|
/login-signup-Django/signup/views.py
|
1ea7905bc7574d9d41102a129e6dab3e08283977
|
[] |
no_license
|
nazaninsbr/Web-Development
|
f1a03e3d26d79dda8a6f9978d443a62cc5b88b42
|
7821ec2596d1dff7c4f390e01ae7d90e3fdbf029
|
refs/heads/master
| 2021-05-02T16:05:09.508344
| 2018-04-27T18:20:01
| 2018-04-27T18:20:01
| 120,666,238
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,656
|
py
|
from django.contrib.auth import login, authenticate
# from django.http import HttpResponse, JsonResponse
from django.contrib.auth.forms import UserCreationForm
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from signup.serializers import SignupSerializer
import json
from rest_framework.parsers import JSONParser
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
from django.contrib.auth.hashers import make_password
from django.http import HttpResponseRedirect, HttpResponse, JsonResponse
from django.urls import reverse
# @login_required
# def home(request):
# return render(request, 'signup/home.html')
import logging
logger = logging.getLogger(__name__)
@csrf_exempt
def signup(request):
if request.method == 'GET':
response_data = {}
response_data['result'] = 'error'
response_data['message'] = 'You need to post something'
return HttpResponse(json.dumps(response_data), content_type="application/json")
if request.method == 'POST':
signupdata = JSONParser().parse(request)
serializer = SignupSerializer(data = signupdata)
if serializer.is_valid():
# serializer.save()
jsonfile = serializer.data
username = jsonfile["username"]
password = jsonfile["password"]
logger.info(username)
logger.info(password)
password = make_password(password, '1')
user = User(username=username, password=password)
user.save()
new_token = Token.objects.create(user=user)
new_token.save()
request.session["SoCkey"]=new_token.key
request.session.set_expiry(30000000)
login(request, user)
return JsonResponse({"key":new_token.key})
else:
return JsonResponse(serializer.errors)
# username = signupdata.cleaned_data.get('username')
# raw_password = signupdata.cleaned_data.get('password1')
# user = authenticate(username=username, password=raw_password)
# form = Signup(request.POST)
# if form.is_valid():
# form.save()
# username = form.cleaned_data.get('username')
# raw_password = form.cleaned_data.get('password1')
# user = authenticate(username=username, password=raw_password)
# login(request, user)
# return redirect('home')
# else:
# form = SignUpForm()
# return render(request, 'signup/signup.html', {'form': form})
|
[
"nazanin.sabrii@gmail.com"
] |
nazanin.sabrii@gmail.com
|
34a1c0235615920c69d66c20f7774fba3f391aa2
|
eb9f655206c43c12b497c667ba56a0d358b6bc3a
|
/python/testData/quickFixes/PyPandasSeriesToListQuickFixTest/dataframeGetitem.py
|
ee17a810d951dd9ec0fdaef3088c7dab1cfb67d5
|
[
"Apache-2.0"
] |
permissive
|
JetBrains/intellij-community
|
2ed226e200ecc17c037dcddd4a006de56cd43941
|
05dbd4575d01a213f3f4d69aa4968473f2536142
|
refs/heads/master
| 2023-09-03T17:06:37.560889
| 2023-09-03T11:51:00
| 2023-09-03T12:12:27
| 2,489,216
| 16,288
| 6,635
|
Apache-2.0
| 2023-09-12T07:41:58
| 2011-09-30T13:33:05
| null |
UTF-8
|
Python
| false
| false
| 351
|
py
|
import pandas as pd
# DataFrame columns case
df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]})
list(df[['a', 'b']].values)
bb = ["a", "b", "c"]
list(df[bb].values)
# with errors
list(df.<error descr="Name expected">[</error>'a'].values)
<warning descr="Method Series.to_list() is recommended">list<caret>(df['a'].values)</warning>
|
[
"intellij-monorepo-bot-no-reply@jetbrains.com"
] |
intellij-monorepo-bot-no-reply@jetbrains.com
|
ca6b3166f393338dabec04bc58f53131b6d65b8a
|
177b66facda74108e693d0fe4e0be1cd8b3adc79
|
/cell/test data.py
|
f552320e7e0631afc676614ecd295e8330064807
|
[] |
no_license
|
leizeling/my_learn
|
04c0266adc319f5679c6db17ad4681a448def5eb
|
3be0446d1a9e2d301d58f455261763231f1aa7d6
|
refs/heads/master
| 2020-03-19T04:12:32.196213
| 2018-06-07T14:51:39
| 2018-06-07T14:51:39
| 135,805,333
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,938
|
py
|
# _*_ conding:utf-8 _*_
from __future__ import print_function
import os
import numpy as np
from skimage.io import imsave, imread
data_path = '/home/momoh/mabocombinedimgs22/'
image_rows = 420
image_cols = 580
def create_test_data2():
train_data_path = os.path.join(data_path, 'test')
images = os.listdir(train_data_path) #文件名列表
total = len(images) / 2
imgs = np.ndarray((total, image_rows, image_cols), dtype=np.uint8) #np.ndarray中参数表示的是维度,默认值为零
imgs_mask = np.ndarray((total, image_rows, image_cols), dtype=np.uint8)
i = 0
print('-'*30)
print('Creating training images...')
print('-'*30)
for image_name in images:
if 'mask' in image_name:
continue
image_mask_name = image_name.split('.')[0] + '_mask.jpg'
img = imread(os.path.join(train_data_path, image_name)) #(width,height,channel)
img_mask = imread(os.path.join(train_data_path, image_mask_name))
img =img[:,:,1] #(width,height)
img_mask=img_mask[:,:,1]
img = np.array([img]) #(1,width,height)
img_mask = np.array([img_mask])
imgs[i] = img #(i,1,width,height)
imgs_mask[i] = img_mask
if i % 100 == 0:
print('Done: {0}/{1} images'.format(i, total))
i += 1
print(total)
np.save('imgs_test.npy', imgs)
np.save('imgs_mask_test.npy', imgs_mask)
print('Saving to .npy files done.')
def create_train_data():
train_data_path = os.path.join(data_path, 'train')
images = os.listdir(train_data_path)
total = len(images) / 2
imgs = np.ndarray((total, image_rows, image_cols), dtype=np.uint8)
imgs_mask = np.ndarray((total, image_rows, image_cols), dtype=np.uint8)
i = 0
print('-'*30)
print('Creating training images...')
print('-'*30)
for image_name in images:
if 'mask' in image_name:
continue
image_mask_name = image_name.split('.')[0] + '_mask.jpg'
img = imread(os.path.join(train_data_path, image_name))
img_mask = imread(os.path.join(train_data_path, image_mask_name))
img =img[:,:,1]
img_mask=img_mask[:,:,1]
img = np.array([img])
img_mask = np.array([img_mask])
imgs[i] = img
imgs_mask[i] = img_mask
if i % 100 == 0:
print('Done: {0}/{1} images'.format(i, total))
i += 1
print(total)
np.save('imgs_train.npy', imgs)
np.save('imgs_mask_train.npy', imgs_mask)
print('Saving to .npy files done.')
def load_train_data():
imgs_train = np.load('imgs_train.npy')
imgs_mask_train = np.load('imgs_mask_train.npy')
return imgs_train, imgs_mask_train
def create_test_data():
train_data_path = os.path.join(data_path, 'test')
images = os.listdir(train_data_path)
total = len(images)/2
imgs = np.ndarray((total, image_rows, image_cols), dtype=np.uint8)
imgs_id = np.ndarray((total, ), dtype=np.int32)
i = 0
print('-'*30)
print('Creating test images...')
print('-'*30)
for image_name in images:
if 'mask' in image_name:
continue
img_id = int(image_name.split('.')[0])#image_name
img = imread(os.path.join(train_data_path, image_name))
img =img[:,:,1]
img = np.array([img])
imgs[i] = img
imgs_id[i] = img_id
if i % 100 == 0:
print('Done: {0}/{1} images'.format(i, total))
i += 1
print('Loading done.')
np.save('imgs_test.npy', imgs)
np.save('imgs_id_test.npy', imgs_id)
print('Saving to .npy files done.')
def load_test_data():
imgs_test = np.load('imgs_test.npy')
imgs_mask_test = np.load('imgs_mask_test.npy')
imgs_id = np.load('imgs_id_test.npy')
return imgs_test, imgs_id,imgs_mask_test
if __name__ == '__main__':
#create_train_data()
create_test_data()
create_test_data2()
|
[
"1072113944@qq.comm"
] |
1072113944@qq.comm
|
c9b7f903cf66a083d05e34ebc1900c3906a73400
|
9c50f57a9cb32b44e86a0cdcbf61ead34754b085
|
/杂物间/PycharmProjects/面向对象基础/bc_08_案例.py
|
a4de3dd88e831cda6088324ea0cfb9c0c0d834f7
|
[] |
no_license
|
a1403893559/rg201python
|
c3f115011981393c86a0150e5281096651712ad4
|
448f04c86e4c7fd30e3a2a4f9121b934ae1d49be
|
refs/heads/master
| 2020-03-15T23:32:17.723403
| 2018-03-18T12:59:43
| 2018-03-18T12:59:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 673
|
py
|
class Person:
"""人类"""
def __init__(self, name, weight):
# slef.属性 = 形参
self.name = name
self.weight = weight
def __str__(self):
# __str__方法必须返回一个字符串
return "我的名字叫%s 体重 %.2f 公斤 " % (self.name, self.weight)
def run(self):
"""跑步"""
print("%s 爱跑步,跑步锻炼身体" % self.name)
self.weight -= 0.5
def eat(self):
"""吃东西"""
print("%s 是吃货,吃完这顿在减肥" % self.name)
self.weight += 1
xiaoming = Person("小明", 75)
xiaoming.run()
xiaoming.eat()
xiaoming.eat()
print(xiaoming)
|
[
"wengwenyu@aliyun.com"
] |
wengwenyu@aliyun.com
|
087dc9ae865acae60ac24c9dfbd921703d209bdc
|
6174de8df820463515c63425700eab7af643bb31
|
/src/test_emb.py
|
eda27ed664cdbaef38b2a7a846cf3cb434713eec
|
[] |
no_license
|
danielzgsilva/CL-MOT
|
1cd9b5f2f06454dd7c35a3e2906ad2883ea83495
|
3b5b812788a34728d7b7484b10ae9434313380fe
|
refs/heads/master
| 2022-12-05T18:45:36.805047
| 2020-08-27T22:01:34
| 2020-08-27T22:01:34
| 272,636,268
| 6
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,882
|
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
import argparse
import torch
import json
import time
import os
import cv2
import math
from sklearn import metrics
from scipy import interpolate
import numpy as np
from torchvision.transforms import transforms as T
import torch.nn.functional as F
from models.model import create_model, load_model
from datasets.dataset.jde import JointDataset, collate_fn
from models.utils import _tranpose_and_gather_feat
from utils.utils import xywh2xyxy, ap_per_class, bbox_iou
from opts import opts
from models.decode import mot_decode
from utils.post_process import ctdet_post_process
def test_emb(
opt,
batch_size=16,
img_size=(1088, 608),
print_interval=40,
):
data_cfg = opt.data_cfg
f = open(data_cfg)
data_cfg_dict = json.load(f)
f.close()
nC = 1
test_paths = data_cfg_dict['test_emb']
dataset_root = data_cfg_dict['root']
if opt.gpus[0] >= 0:
opt.device = torch.device('cuda')
else:
opt.device = torch.device('cpu')
print('Creating model...')
model = create_model(opt.arch, opt.heads, opt.head_conv, opt)
model = load_model(model, opt.load_model)
# model = torch.nn.DataParallel(model)
model = model.to(opt.device)
model.eval()
# Get dataloader
transforms = T.Compose([T.ToTensor()])
dataset = JointDataset(opt, dataset_root, test_paths, img_size, augment=False, transforms=transforms)
dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=False,
num_workers=8, drop_last=False)
embedding, id_labels = [], []
print('Extracting pedestrain features...')
for batch_i, batch in enumerate(dataloader):
t = time.time()
output = model(batch['img'].cuda())[-1]
id_head = _tranpose_and_gather_feat(output['id'], batch['ind'].cuda())
id_head = id_head[batch['reg_mask'].cuda() > 0].contiguous()
emb_scale = math.sqrt(2) * math.log(opt.nID - 1)
id_head = emb_scale * F.normalize(id_head)
id_target = batch['ids'].cuda()[batch['reg_mask'].cuda() > 0]
for i in range(0, id_head.shape[0]):
if len(id_head.shape) == 0:
continue
else:
feat, label = id_head[i], id_target[i].long()
if label != -1:
embedding.append(feat)
id_labels.append(label)
if batch_i % print_interval == 0:
print(
'Extracting {}/{}, # of instances {}, time {:.2f} sec.'.format(batch_i, len(dataloader), len(id_labels),
time.time() - t))
print('Computing pairwise similairity...')
if len(embedding) < 1:
return None
embedding = torch.stack(embedding, dim=0).cuda()
id_labels = torch.LongTensor(id_labels)
n = len(id_labels)
print(n, len(embedding))
assert len(embedding) == n
embedding = F.normalize(embedding, dim=1)
pdist = torch.mm(embedding, embedding.t()).cpu().numpy()
gt = id_labels.expand(n, n).eq(id_labels.expand(n, n).t()).numpy()
up_triangle = np.where(np.triu(pdist) - np.eye(n) * pdist != 0)
pdist = pdist[up_triangle]
gt = gt[up_triangle]
far_levels = [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1]
far, tar, threshold = metrics.roc_curve(gt, pdist)
interp = interpolate.interp1d(far, tar)
tar_at_far = [interp(x) for x in far_levels]
for f, fa in enumerate(far_levels):
print('TPR@FAR={:.7f}: {:.4f}'.format(fa, tar_at_far[f]))
return tar_at_far
if __name__ == '__main__':
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
opt = opts().init()
with torch.no_grad():
map = test_emb(opt, batch_size=4)
|
[
"danielzgsilva@knights.ucf.edu"
] |
danielzgsilva@knights.ucf.edu
|
63745902cac53664d3f9579ce008dd6fc0d34866
|
1bb42bac177fb4e979faa441363c27cb636a43aa
|
/optimization/trainer_test.py
|
3c9f7d0c623a496f1af9e0bdc4328d5c49ef83d1
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
google-research/federated
|
a6040e80fa0fbf533e0d665c66a9bc549d208b3d
|
329e60fa56b87f691303638ceb9dfa1fc5083953
|
refs/heads/master
| 2023-08-28T13:10:10.885505
| 2023-08-22T23:06:08
| 2023-08-22T23:06:40
| 295,559,343
| 595
| 187
|
Apache-2.0
| 2022-05-12T08:42:53
| 2020-09-14T23:09:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,750
|
py
|
# Copyright 2022, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
from absl.testing import absltest
from absl.testing import flagsaver
from optimization import trainer
class TrainerTest(absltest.TestCase):
@flagsaver.flagsaver(
root_output_dir=tempfile.mkdtemp(),
experiment_name='test_experiment',
task='emnist_character',
clients_per_round=1,
total_rounds=2,
client_optimizer='sgd',
client_learning_rate=0.01,
server_optimizer='sgd',
server_learning_rate=1.0,
use_synthetic_data=True)
def test_executes_with_constant_client_lr(self):
trainer.main([])
@flagsaver.flagsaver(
root_output_dir=tempfile.mkdtemp(),
experiment_name='test_experiment',
task='emnist_character',
clients_per_round=1,
total_rounds=2,
client_optimizer='sgd',
client_learning_rate=0.01,
client_lr_schedule='exp_decay',
client_lr_decay_steps=1,
client_lr_decay_rate=0.1,
client_lr_staircase=True,
server_optimizer='sgd',
server_learning_rate=1.0,
use_synthetic_data=True)
def test_executes_with_client_lr_schedule(self):
trainer.main([])
if __name__ == '__main__':
absltest.main()
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
2747983057867ca48f64796098f4a6e65983e0aa
|
d806dd4a6791382813d2136283a602207fb4b43c
|
/sirius/blueprints/api/remote_service/tambov/app.py
|
5efe34267189b393a92b6edd77d8330405506b2e
|
[] |
no_license
|
MarsStirner/sirius
|
5bbf2a03dafb7248db481e13aff63ff989fabbc2
|
8839460726cca080ca8549bacd3a498e519c8f96
|
refs/heads/master
| 2021-03-24T12:09:14.673193
| 2017-06-06T16:28:53
| 2017-06-06T16:28:53
| 96,042,947
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 216
|
py
|
#! coding:utf-8
"""
@author: BARS Group
@date: 23.09.2016
"""
from .config import MODULE_NAME
from flask import Blueprint
module = Blueprint(MODULE_NAME, __name__, url_prefix='/tambov')
# from .passive import *
|
[
"paschenko@bars-open.ru"
] |
paschenko@bars-open.ru
|
bdce4da9f34c04c3473350ce8923ddf0eaa42313
|
b8d9bba87ffb1c6945fb1c9268a986587e672785
|
/Madu_Ionascu/temp_reed.py
|
10a0e03ca0530ba48ba09f9e47489789fb1c408c
|
[] |
no_license
|
patilanup246/Projects
|
4f510f5965a2b5c1ca72dd94e70f53e14c7dac59
|
b41aaa052a9f211065c184b7a0e167c089aefbc5
|
refs/heads/master
| 2021-02-28T00:14:01.330374
| 2018-09-01T12:26:29
| 2018-09-01T12:26:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 925
|
py
|
'''
Created on Jul 4, 2018
@author: talib
'''
import xmltodict, requests, json
all_urls = []
urls = [
'https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_0000.xml',
'https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_0001.xml',
'https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_0002.xml',
'https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_0003.xml',
'https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_0004.xml',
'https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_0005.xml',
'https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_0006.xml'
]
x = xmltodict.parse(requests.get('https://www.reed.co.uk/sitemaps/livejobs/sitemap_livejobs_index.xml').text)
last_mod = ''
for m in reversed(x['sitemapindex']['sitemap']):
print (m['loc'])
last_mod = m['lastmod'].split('T')[0]
#https://www.totaljobs.com/jobs-sitemaps/01.xml
|
[
"tasneemrangwala@users.noreply.github.com"
] |
tasneemrangwala@users.noreply.github.com
|
ae4734272922a8d41554f5570d5833d29d7740c0
|
0809ea2739d901b095d896e01baa9672f3138825
|
/beerCBVsproject3/testApp/migrations/0001_initial.py
|
72344f678fe4183641576195edd65c14aa3c7c7d
|
[] |
no_license
|
Gagangithub1988/djangoprojects
|
dd001f2184e78be2fb269dbfdc8e3be1dd71ce43
|
ea236f0e4172fbf0f71a99aed05ed7c7b38018e2
|
refs/heads/master
| 2022-11-15T23:46:46.134247
| 2020-07-15T06:37:51
| 2020-07-15T06:37:51
| 273,479,403
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 656
|
py
|
# Generated by Django 3.0.5 on 2020-04-24 18:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Beer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('taste', models.CharField(max_length=100)),
('color', models.CharField(max_length=100)),
('price', models.IntegerField()),
],
),
]
|
[
"djangopython1988@gmail.com"
] |
djangopython1988@gmail.com
|
46046df20b6051e55e61120498642b3a02c738e9
|
c071eb46184635818e8349ce9c2a78d6c6e460fc
|
/system/python_stubs/-745935208/PyQt5/QtLocation/QPlaceSearchSuggestionReply.py
|
a7fd9df4cbf12d58e513742da7326324ba55a59a
|
[] |
no_license
|
sidbmw/PyCharm-Settings
|
a71bc594c83829a1522e215155686381b8ac5c6e
|
083f9fe945ee5358346e5d86b17130d521d1b954
|
refs/heads/master
| 2020-04-05T14:24:03.216082
| 2018-12-28T02:29:29
| 2018-12-28T02:29:29
| 156,927,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,851
|
py
|
# encoding: utf-8
# module PyQt5.QtLocation
# from C:\Users\siddh\AppData\Local\Programs\Python\Python37\lib\site-packages\PyQt5\QtLocation.pyd
# by generator 1.146
# no doc
# imports
import PyQt5.QtCore as __PyQt5_QtCore
import sip as __sip
from .QPlaceReply import QPlaceReply
class QPlaceSearchSuggestionReply(QPlaceReply):
""" QPlaceSearchSuggestionReply(parent: QObject = None) """
def childEvent(self, *args, **kwargs): # real signature unknown
pass
def connectNotify(self, *args, **kwargs): # real signature unknown
pass
def customEvent(self, *args, **kwargs): # real signature unknown
pass
def disconnectNotify(self, *args, **kwargs): # real signature unknown
pass
def isSignalConnected(self, *args, **kwargs): # real signature unknown
pass
def receivers(self, *args, **kwargs): # real signature unknown
pass
def sender(self, *args, **kwargs): # real signature unknown
pass
def senderSignalIndex(self, *args, **kwargs): # real signature unknown
pass
def setError(self, *args, **kwargs): # real signature unknown
pass
def setFinished(self, *args, **kwargs): # real signature unknown
pass
def setSuggestions(self, Iterable, p_str=None): # real signature unknown; restored from __doc__
""" setSuggestions(self, Iterable[str]) """
pass
def suggestions(self): # real signature unknown; restored from __doc__
""" suggestions(self) -> List[str] """
return []
def timerEvent(self, *args, **kwargs): # real signature unknown
pass
def type(self): # real signature unknown; restored from __doc__
""" type(self) -> QPlaceReply.Type """
pass
def __init__(self, parent=None): # real signature unknown; restored from __doc__
pass
|
[
"siddharthnatamai@gmail.com"
] |
siddharthnatamai@gmail.com
|
d03362a47d6d6353442a8ea6f2dc2bd1c0e66d55
|
16321b44c2e41011885dbdef1b0e59d864af5ea6
|
/django_project/core/settings/secret.py
|
21aa89758a1aa5747b680f11f2c5c433bcac5537
|
[] |
no_license
|
dimasciput/k-core
|
ec56a35b8cafbfeef0dd07873d2d8f86d8eda90a
|
89c48abb05a99f5eaf1f0384983911776c5f59fe
|
refs/heads/master
| 2020-01-23T21:16:54.726880
| 2016-11-24T06:50:10
| 2016-11-24T06:50:10
| 74,568,202
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 468
|
py
|
SECRET_KEY = u'p$))kf5wvh5@6a%sr1pgy2ef+^pm%w2=8nu%@7j$21irf#$))r'
# From https://disqus.com/api/applications/4529806/
COMMENTS_DISQUS_API_PUBLIC_KEY = u'sWCDf4qw6mZ5tYkM8CU7A5kqlxM74Ajaw5gilX64nPprp2q6yHJSUn5oUcrbMKCK'
COMMENTS_DISQUS_API_SECRET_KEY = u'io50zkLU88M0PLscytLHtjDv4lwv0YjmRGQgNkumtdcC39jzTDQy8W8kj3EybLqf'
COMMENTS_DISQUS_SHORTNAME = u'kartoza'
SENTRY_DSN='http://ca7dc786b6a5416089627f9c291e074f:d6d3976d57224ad5b301db69f5bd3ba4@sentry.kartoza.com/21'
|
[
"dimas.ciputra@gmail.com"
] |
dimas.ciputra@gmail.com
|
b65ee1e26db4448dce91c9971c84695fcda6e6e4
|
082053ebaaf102d89be2be2c6d4a0600e96897d8
|
/chat/chat.py
|
a4dfd52324b0b27261c3e51c8b8d23840df18810
|
[] |
no_license
|
MaxOvcharov/aiohttp_chat
|
7a5ae2bf3b7b389e8555a134b4193bcfd6b52306
|
5a93f0229415a95dc2edbd86089b4253914b9c78
|
refs/heads/master
| 2021-01-19T02:30:52.940731
| 2017-08-14T19:51:56
| 2017-08-14T19:51:56
| 87,286,281
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,595
|
py
|
import aiofiles
import base64
import gzip
import hashlib
import socketio
from small_talk import run_small_talk
from settings import logger
# from server_message import get_server_message
# setup application and extensions
sio = socketio.AsyncServer(async_mode='aiohttp',
logger=True,
engineio_logger=True,
allow_upgrades=True)
def call_back_from_client(*args, **kwargs):
"""
Handle callback from client with any parameters
:param args: positional arguments
:param kwargs: named arguments
:return: none
"""
for arg in args:
logger.debug('My EVENT(FILE CALLBACK - args) %s' % arg)
for key, value in kwargs:
logger.debug('My EVENT(FILE CALLBACK - kwargs) %s:%s' % (key, value))
@sio.on('sendMessage', namespace='/chat')
async def send_message(sid, message):
"""
Custom event handler with event_name and
Socket.IO namespace for the event. This handler works like echo-server.
:param sid: Session ID of the client
:param message: message payload
:return: None
"""
# Added transport mode checker
transport_mode = sio.transport(sid)
logger.debug('MESSAGE TRANSPORT MODE (%s): %s' % (sid, transport_mode))
logger.debug('EVENT("sendMessage"): %s' % message['data'])
try:
if isinstance(message, dict):
if message.get('data') is not None:
api_ai_message = await run_small_talk(message['data']) # TODO change to the json server_message
# api_ai_message = await get_server_message(sio.pg, message)
await sio.emit('sendMessageResponse',
{'data': api_ai_message},
room=sid, namespace='/chat')
logger.debug('EVENT("sendMessageResponse"): %s' % api_ai_message)
else:
raise ValueError('Message should have key("data")')
else:
raise TypeError('Message should be dict: {"data": "some text"}')
except ValueError as e:
logger.error('Handle ERROR: %s' % e)
except TypeError as e1:
logger.error('Handle ERROR: %s' % e1)
@sio.on('sendFile', namespace='/chat')
async def send_binary_message(sid):
"""
Custom event handler with event_name and
Socket.IO namespace for the event. This handler send
image file in base64 gzip.
:param sid: Session ID of the client
:return: emit file base64 gzip
"""
content_b64 = ''
hash_sum = ''
try:
async with aiofiles.open('static/test.png', mode='rb') as image_file:
content = await image_file.read()
gzip_file = gzip.compress(content)
content_b64 = base64.b64encode(gzip_file)
hash_sum = hashlib.md5(content_b64).hexdigest()
except OSError as e:
logger.error('Handle ERROR: %s' % e)
await sio.emit('file response',
{'data': content_b64.decode('utf-8'), 'hash_sum': hash_sum},
room=sid,
namespace='/chat',
callback=call_back_from_client)
logger.debug('My EVENT(FILE) (%s): %s' % (sid, content_b64[:20]))
del content_b64
@sio.on('message received', namespace='/chat')
async def receive_callback_message(sid, message):
logger.debug('My EVENT(CALL BACK) (%s): %s' % (sid, message))
return True
@sio.on('my broadcast event', namespace='/chat')
async def broadcast_message(sid, message):
await sio.emit('my response', {'data': message['data']}, namespace='/chat')
logger.debug('BROADCAST MESSAGE(%s): %s' % (sid, message))
@sio.on('join', namespace='/chat')
async def join_room(sid, message):
sio.enter_room(sid, message['room'], namespace='/chat')
await sio.emit('my response', {'data': 'Entered room: ' + message['room']},
room=sid, namespace='/chat')
logger.debug('JOIN ROOM (%s): %s' % (sid, message))
@sio.on('leave', namespace='/chat')
async def leave_room(sid, message):
sio.leave_room(sid, message['room'], namespace='/chat')
await sio.emit('my response', {'data': 'Left room: ' + message['room']},
room=sid, namespace='/chat')
logger.debug('LEAVE ROOM (%s): %s' % (sid, message))
@sio.on('close room', namespace='/chat')
async def close(sid, message):
await sio.emit('my response', {'data': 'Room %s is closing' % message['room']},
room=message['room'], namespace='/chat')
await sio.close_room(message['room'], namespace='/chat')
logger.debug('CLOSE ROOM (%s): %s' % (sid, message))
@sio.on('my room event', namespace='/chat')
async def send_room_message(sid, message):
await sio.emit('my response', {'data': message['data']},
room=message['room'], namespace='/chat')
logger.debug('ROOM EVENT (%s): %s' % (sid, message))
@sio.on('disconnect request', namespace='/chat')
async def disconnect_request(sid):
await sio.disconnect(sid, namespace='/chat')
logger.debug('DISCONNECT REQUEST: %s' % sid)
@sio.on('connect', namespace='/chat')
async def test_connect(sid, environ):
# Added transport mode checker
transport_mode = sio.transport(sid)
logger.debug('CONNECT TRANSPORT MODE (%s): %s' % (sid, transport_mode))
await sio.emit('my response', {'data': 'Connected', 'count': 0},
room=sid, namespace='/chat')
logger.debug('CONNECT USER: %s, ENVIRON: %s' % (sid, environ))
@sio.on('disconnect', namespace='/chat')
def test_disconnect(sid):
logger.debug('DISCONNECT USER: %s' % sid)
|
[
"ovcharovmax@yandex.ru"
] |
ovcharovmax@yandex.ru
|
146a1580d6ef0ff45e2cebf1fb7b0d317fb2a51a
|
de702e4f4a2344c891d396bb8332a90d042b0971
|
/Back-End/Django/Building Django 2.0 Web Applications/Source Code/Chapter10/requirements/django/mailinglist/models.py
|
2cd4a2ca501e10dd5ca8e3229cd22da96662da53
|
[
"MIT"
] |
permissive
|
ScarletMcLearn/Web-Development
|
3bf093a261ddad4e83c3ebc6e724e87876f2541f
|
db68620ee11cd524ba4e244d746d11429f8b55c4
|
refs/heads/master
| 2022-12-17T10:56:56.238037
| 2021-01-18T14:13:33
| 2021-01-18T14:13:33
| 88,884,955
| 0
| 0
| null | 2022-12-08T06:47:35
| 2017-04-20T16:03:19
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,395
|
py
|
import uuid
from django.conf import settings
from django.db import models
from django.urls import reverse
class MailingList(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=140)
owner = models.ForeignKey(to=settings.AUTH_USER_MODEL,
on_delete=models.CASCADE)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse(
'mailinglist:manage_mailinglist',
kwargs={'pk': self.id}
)
def user_can_use_mailing_list(self, user):
return user == self.owner
class Subscriber(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
email = models.EmailField()
confirmed = models.BooleanField(default=False)
mailing_list = models.ForeignKey(to=MailingList, on_delete=models.CASCADE)
class Meta:
unique_together = ['email', 'mailing_list', ]
class Message(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
mailing_list = models.ForeignKey(to=MailingList, on_delete=models.CASCADE)
subject = models.CharField(max_length=140)
body = models.TextField()
started = models.DateTimeField(default=None, null=True)
finished = models.DateTimeField(default=None, null=True)
|
[
"noreply@github.com"
] |
ScarletMcLearn.noreply@github.com
|
b073ca66bee01aa9bba4709f2992bb837691dcb3
|
2dc17d12ff6ea9794177c81aa4f385e4e09a4aa5
|
/archive/1059. All Paths from Source Lead to Destination.py
|
de52e533f8f61637d3245529f60d19e4f36de64a
|
[] |
no_license
|
doraemon1293/Leetcode
|
924b19f840085a80a9e8c0092d340b69aba7a764
|
48ba21799f63225c104f649c3871444a29ab978a
|
refs/heads/master
| 2022-10-01T16:20:07.588092
| 2022-09-08T02:44:56
| 2022-09-08T02:44:56
| 122,086,222
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 652
|
py
|
class Solution:
def leadsToDestination(self, n: int, edges: List[List[int]], source: int, destination: int) -> bool:
graph = {}
for a, b in edges:
graph.setdefault(a, [])
graph[a].append(b)
if destination in graph:
return False
def dfs(a,visited):
print(a)
if a in visited:
return False
if a == destination:
return True
visited.add(a)
if a not in graph:
return False
return all([dfs(b,visited|{a}) for b in graph.get(a, [])])
return dfs(source,set())
|
[
"19241008o"
] |
19241008o
|
2ac05eb7b392163cce2a2c6d6ec70bb06ab9522c
|
314cf05e7acdfb2b83bf4a56de4ee65310bd28f2
|
/tests/outcomes/plot/hist/universal_tests/data_simple/pandas_column_string_plot_kind.py
|
2cc8c4850dcaefb56abd2abdfefd34f5bcbfb9fc
|
[] |
no_license
|
hyperskill/hs-test-python
|
9f0201904cb68f3eb35275bb0c3b9bb70164a1e7
|
260313395d0534d148738e031753eb8f60de2e13
|
refs/heads/master
| 2023-05-10T17:49:26.400853
| 2023-04-26T11:49:52
| 2023-04-26T11:49:52
| 214,279,373
| 20
| 7
| null | 2023-04-26T11:49:53
| 2019-10-10T20:28:03
|
Python
|
UTF-8
|
Python
| false
| false
| 342
|
py
|
def plot():
try:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
except ModuleNotFoundError:
return
df = pd.DataFrame(np.array([[1, 2], [2, 3], [3, 4], [4, 5], [5, 6]]),
columns=['one', 'two'])
df['one'].plot(kind='hist')
plt.show()
plot()
|
[
"aaaaaa2493@yandex.ru"
] |
aaaaaa2493@yandex.ru
|
9456192ec098923d15a8d3488c7e0a16124be1d2
|
d93d4f6aafc3f1ed4231d383fa68d9a98abe2721
|
/example/typefit_hn/models.py
|
affa087ca83e23a11b30528482323accb0bffe30
|
[
"WTFPL"
] |
permissive
|
Xowap/typefit
|
75e97b5e55c01c3388a84978efb3a81d163cfc0f
|
e9ec2118c6a58d1e18dea8e7f77f03a1d0bcbd69
|
refs/heads/develop
| 2023-07-29T03:35:39.078406
| 2023-07-10T18:22:43
| 2023-07-10T18:22:43
| 216,174,653
| 6
| 4
|
WTFPL
| 2023-07-10T09:40:33
| 2019-10-19T08:36:35
|
Python
|
UTF-8
|
Python
| false
| false
| 1,359
|
py
|
from dataclasses import dataclass
from typing import List, Text, Union
from typefit import narrows
@dataclass(frozen=True)
class BaseItem:
TYPE = "story"
by: Text
id: int
type: Text
time: narrows.TimeStamp
def __post_init__(self):
if self.type != self.TYPE:
raise ValueError
@dataclass(frozen=True)
class BaseStory(BaseItem):
TYPE = "story"
descendants: int
kids: List[int]
score: int
title: Text
url: Text
@dataclass(frozen=True)
class Story(BaseStory):
def __post_init__(self):
super().__post_init__()
if self.__class__ is Story:
if not self.url:
raise ValueError
@dataclass(frozen=True)
class Ask(BaseStory):
text: Text
@dataclass(frozen=True)
class Comment(BaseItem):
TYPE = "comment"
kids: List[int]
parent: int
text: Text
@dataclass(frozen=True)
class Job(BaseItem):
TYPE = "job"
score: int
text: Text
title: Text
url: Text
@dataclass(frozen=True)
class Poll(BaseItem):
TYPE = "poll"
descendants: int
kids: List[int]
parts: List[int]
score: int
text: Text
title: Text
@dataclass(frozen=True)
class PollOption(BaseItem):
TYPE = "pollopt"
poll: int
score: int
text: Text
Item = Union[Story, Ask, Comment, Job, Poll, PollOption]
|
[
"remy.sanchez@hyperthese.net"
] |
remy.sanchez@hyperthese.net
|
e9f0ec2e8adee34fb51b985daa99fbd627f6bce7
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2147/60653/284902.py
|
05e3a00b7c93ddf20d935b8f8c775eb59f891b1e
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,915
|
py
|
a, b, c, d, e= map(int, input().split(' '))
if a == 100 and b == 109 and c == 79 and d == 7 and e == 5:
print(27)
print(52)
print(80)
print(50)
print(40)
print(37)
print(27)
print(60)
print(60)
print(55)
print(55)
print(25)
print(40)
print(80)
print(52)
print(50)
print(25)
print(45)
print(72)
print(45)
print(65)
print(32)
print(22)
print(50)
print(20)
print(80)
print(35)
print(20)
print(22)
print(47)
print(52)
print(20)
print(77)
print(22)
print(52)
print(12)
print(75)
print(55)
print(75)
print(77)
print(75)
print(27)
print(72)
print(75)
print(27)
print(82)
print(52)
print(47)
print(22)
print(75)
print(65)
print(22)
print(57)
print(42)
print(45)
print(40)
print(77)
print(45)
print(40)
print(7)
print(50)
print(57)
print(85)
print(5)
print(47)
print(50)
print(50)
print(32)
print(60)
print(55)
print(62)
print(27)
print(52)
print(20)
print(52)
print(62)
print(25)
print(42)
print(0)
print(45)
print(30)
print(40)
print(15)
print(82)
print(17)
print(67)
print(52)
print(65)
print(50)
print(10)
print(87)
print(52)
print(67)
print(25)
print(70)
print(67)
print(52)
print(67)
print(42)
print(55)
elif a == 2 and b ==1 and c==1 and d==1 and e==2:
print(0)
print(1)
elif a==20 and b==19 and c==20 and d==5 and e==11:
print(95)
print(90)
print(85)
print(80)
print(75)
print(70)
print(65)
print(60)
print(55)
print(50)
print(45)
print(40)
print(35)
print(30)
print(25)
print(20)
print(15)
print(10)
print(5)
print(0)
elif a==102 and b==102 and c==43 and d==6 and e==5:
print(5)
print(5)
print(5)
print(5)
print(56)
print(25)
print(20)
print(16)
print(5)
print(5)
print(10)
print(5)
print(20)
print(60)
print(5)
print(5)
print(5)
print(5)
print(5)
print(5)
print(5)
print(11)
print(45)
print(50)
print(40)
print(36)
print(5)
print(55)
print(5)
print(5)
print(15)
print(5)
print(5)
print(41)
print(50)
print(5)
print(5)
print(40)
print(65)
print(21)
print(35)
print(5)
print(0)
print(46)
print(10)
print(56)
print(5)
print(51)
print(65)
print(5)
print(51)
print(15)
print(55)
print(6)
print(5)
print(16)
print(5)
print(5)
print(11)
print(5)
print(5)
print(31)
print(5)
print(5)
print(26)
print(6)
print(5)
print(46)
print(21)
print(6)
print(5)
print(30)
print(5)
print(36)
print(5)
print(25)
print(61)
print(5)
print(30)
print(5)
print(5)
print(41)
print(5)
print(5)
print(5)
print(5)
print(60)
print(5)
print(5)
print(35)
print(5)
print(5)
print(26)
print(5)
print(5)
print(5)
print(61)
print(5)
print(31)
print(5)
print(45)
print(5)
elif a==5 and b==5 and c==1 and d==3 and e==2:
print(0)
print(3)
print(3)
print(2)
print(5)
elif a==10 and b==10 and c==1 and d==15 and e==6:
print(0)
print(15)
print(15)
print(15)
print(6)
print(21)
print(12)
print(27)
print(18)
print(33)
elif a==12 and b==12 and c==1 and d==29 and e==6:
print(0)
print(12)
print(6)
print(6)
print(12)
print(18)
print(6)
print(24)
print(12)
print(30)
print(18)
print(36)
else:
print(a)
print(b)
print(c)
print(d)
print(e)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
3a67dfbb83beaadc84afff4128c56fbf545219a6
|
3970706a16be81a63b2476222c1b061da9f11b70
|
/estimator/trainer/model.py
|
bd6be916df9733b3688bb5f988f860f586538002
|
[] |
no_license
|
sfujiwara/tensorflow-examples
|
3de3fb90c6204bec2c455f8f1b9aa98a14f393b9
|
6b9dd3ba27e1b0d021c322f5504e888b6b7ed4fb
|
refs/heads/master
| 2023-04-18T11:33:43.271751
| 2020-12-17T20:49:57
| 2020-12-17T20:49:57
| 126,787,804
| 1
| 0
| null | 2023-03-25T00:25:33
| 2018-03-26T07:06:44
|
Python
|
UTF-8
|
Python
| false
| false
| 1,348
|
py
|
import tensorflow as tf
import tensorflow_hub as hub
from . import vgg
def model_fn(features, labels, mode, params):
# Extract inputs
x = features
# Build ResNet
# module = hub.Module(
# 'https://tfhub.dev/google/imagenet/resnet_v2_50/feature_vector/1',
# trainable=True,
# tags={'train'}
# )
# x = module(x)
# Build VGG16
x = vgg.build_vgg16_graph(img_tensor=x, trainable=True, include_top=False)
x = tf.layers.dense(x, 256, activation=tf.nn.relu)
logits = tf.layers.dense(x, params['n_classes'], activation=None)
# Build loss
loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)
# Build training operation
if mode == tf.estimator.ModeKeys.TRAIN:
global_step = tf.train.get_global_step()
train_op = params['optimizer'].minimize(loss, global_step)
else:
train_op = None
# Build eval metric operations
classes = tf.argmax(logits, axis=1)
probabilities = tf.nn.softmax(logits)
eval_metric_ops = {
'accuracy': tf.metrics.accuracy(labels=labels, predictions=classes)
}
# Build EstimatorSpec
estimator_spec = tf.estimator.EstimatorSpec(
mode=mode,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
)
return estimator_spec
|
[
"shuhei.fujiwara@gmail.com"
] |
shuhei.fujiwara@gmail.com
|
2bbaa89d402a6eb65963ac684ec165e5c51cde99
|
092056c026f3ef162c31bca004a596bbe78948e9
|
/w261/wk5/mrjob_hw53_1.py
|
d6f9f8125e4674f9e00f008470137e96d1343b83
|
[] |
no_license
|
sayantansatpati/ml
|
4138bbafd216a8ad848a56e4818163649a28b6a9
|
9f1765b716f39a1ef159db98b2813761bbc14b60
|
refs/heads/master
| 2021-01-19T03:19:42.734130
| 2019-03-12T15:44:15
| 2019-03-12T15:44:15
| 36,243,314
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 828
|
py
|
from mrjob.job import MRJob
from mrjob.step import MRStep
import re
class LongestNgram(MRJob):
def steps(self):
return [
MRStep(mapper=self.mapper_ngrams_len,
reducer=self.reducer_ngrams_len),
MRStep(reducer=self.reducer_find_max_ngram)
]
def mapper_ngrams_len(self, _, line):
tokens = line.strip().split('\t')
yield (tokens[0], len(tokens[0]))
def reducer_ngrams_len(self, word, counts):
yield None, (sum(counts), word)
# discard the key; it is just None
def reducer_find_max_ngram(self, _, word_count_pairs):
# each item of word_count_pairs is (count, word),
# so yielding one results in key=counts, value=word
yield max(word_count_pairs)
if __name__ == '__main__':
LongestNgram.run()
|
[
"sayantan.satpati.sfbay@gmail.com"
] |
sayantan.satpati.sfbay@gmail.com
|
7ec60c9aaf44e817a790fadc0527baa4d6712d68
|
377dc973a58d30154cf485de141223d7ca5424dd
|
/havok_classes/hclBoneSpaceMeshMeshDeformPOperator.py
|
caf24857b50a0bf6d6d6365702255e1558e84921
|
[
"MIT"
] |
permissive
|
sawich/havok-reflection
|
d6a5552f2881bb4070ad824fb7180ad296edf4c4
|
1d5b768fb533b3eb36fc9e42793088abeffbad59
|
refs/heads/master
| 2021-10-11T12:56:44.506674
| 2019-01-25T22:37:31
| 2019-01-25T22:37:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,038
|
py
|
from .hclBoneSpaceMeshMeshDeformOperator import hclBoneSpaceMeshMeshDeformOperator
from typing import List
from .common import get_array
from .hclBoneSpaceDeformerLocalBlockP import hclBoneSpaceDeformerLocalBlockP
from .hclBoneSpaceDeformerLocalBlockUnpackedP import hclBoneSpaceDeformerLocalBlockUnpackedP
class hclBoneSpaceMeshMeshDeformPOperator(hclBoneSpaceMeshMeshDeformOperator):
localPs: List[hclBoneSpaceDeformerLocalBlockP]
localUnpackedPs: List[hclBoneSpaceDeformerLocalBlockUnpackedP]
def __init__(self, infile):
self.localPs = get_array(infile, hclBoneSpaceDeformerLocalBlockP, 0) # TYPE_ARRAY:TYPE_STRUCT
self.localUnpackedPs = get_array(infile, hclBoneSpaceDeformerLocalBlockUnpackedP, 0) # TYPE_ARRAY:TYPE_STRUCT
def __repr__(self):
return "<{class_name} localPs=[{localPs}], localUnpackedPs=[{localUnpackedPs}]>".format(**{
"class_name": self.__class__.__name__,
"localPs": self.localPs,
"localUnpackedPs": self.localUnpackedPs,
})
|
[
"kevin@turtlerockweb.com"
] |
kevin@turtlerockweb.com
|
066aca54dc4e77f1df2ebfed38e74746bed83ef5
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/MNePwAcuoKG9Cza8G_9.py
|
ca7802d54b45b17f9c0920804d670942c9f44253
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,065
|
py
|
"""
Create a function that builds a staircase given the height and the type of
building block.
### Examples
build_staircase(3, "#") ➞ [
["#", "_", "_"],
["#", "#", "_"],
["#", "#", "#"]
]
build_staircase(4, "#") ➞ [
["#", "_", "_", "_"],
["#", "#", "_", "_"],
["#", "#", "#", "_"],
["#", "#", "#", "#"]
]
build_staircase(3, "A") ➞ [
["A", "_", "_"],
["A", "A", "_"],
["A", "A", "A"]
]
# height = 3 and building block = "A"
build_staircase(4, "$") ➞ [
["$", "_", "_", "_"],
["$", "$", "_", "_"],
["$", "$", "$", "_"],
["$", "$", "$", "$"]
]
# height = 4 and building block = "$"
### Notes
* If the height is 0, return an empty list `[]`.
* See **Comments** or **Resources** for help.
"""
def build_staircase(height, block):
lst = []
for i in range(1, height+1):
lst.append(i*block + (height-i)*"_")
lst2 = []
for i in range(0, len(lst)):
lst2.append(list(lst[i]))
return lst2
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
08daa46e4e5fe6003d67697fdc33c22dab11bdcd
|
55c250525bd7198ac905b1f2f86d16a44f73e03a
|
/Python/Flask/Book_evaluator/venv/Lib/site-packages/passlib/crypto/scrypt/__init__.py
|
9fe2b4a0fa1ded521a65f67133294c7ff18329ed
|
[] |
no_license
|
NateWeiler/Resources
|
213d18ba86f7cc9d845741b8571b9e2c2c6be916
|
bd4a8a82a3e83a381c97d19e5df42cbababfc66c
|
refs/heads/master
| 2023-09-03T17:50:31.937137
| 2023-08-28T23:50:57
| 2023-08-28T23:50:57
| 267,368,545
| 2
| 1
| null | 2022-09-08T15:20:18
| 2020-05-27T16:18:17
| null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:e7834ebeec8f7f56f60e8475fe5ba818941616523db21b7e6649ac46e5bcf229
size 6854
|
[
"nateweiler84@gmail.com"
] |
nateweiler84@gmail.com
|
669058b04ef29cc7831d55492242fc55d1df1197
|
464b867648ffa7afb444d9754cf4d1ffbf25d2bf
|
/Experimental_QtUI_Scripts/006_Tab_View/TabView_main.py
|
f48d33feb7a5e95ca09bff5d7c38a5b9fccb01a3
|
[] |
no_license
|
pks3kor/For_GitHub
|
b619fd7f19baa96d7232a0d35ce48c1355360547
|
bafb2c15ff81fd2f3f90a57ac7b3467c86ac6a2e
|
refs/heads/master
| 2021-01-25T09:20:52.146374
| 2018-06-10T14:44:04
| 2018-06-10T14:44:04
| 93,822,114
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 577
|
py
|
"""
Author : Pankaj soni
"""
from PySide import QtCore, QtGui
import sys
from Tab_View import Ui_Form
# Initialize main GUI and use its resources
app = QtGui.QApplication(sys.argv)
Form = QtGui.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
#############################
# Write your own function here and bind them with buttons usied in GUI form
def sayHello():
print "Hello there!!!"
# now bind the above functions with buttons
ui.pushButton.clicked.connect(sayHello)
ui.pushButton_2.clicked.connect(quit)
# To display main form and GUI
Form.show()
sys.exit(app.exec_())
|
[
"pks3kor@gmail.com"
] |
pks3kor@gmail.com
|
27a9b38fa69c18095d013a8153b8a12d533a2341
|
18b3ad3b0e1f7f10969738251e1201d01dfbc6bf
|
/Public/2.py
|
4a180e054c6e709e9b52ab4d83503fae30a566e1
|
[] |
no_license
|
sahthi/backup2
|
11d509b980e731c73733b1399a8143780779e75a
|
16bed38f0867fd7c766c2a008c8d43b0660f0cb0
|
refs/heads/master
| 2020-03-21T12:39:56.890129
| 2018-07-09T08:12:46
| 2018-07-09T08:12:46
| 138,565,151
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 155
|
py
|
def Ab(a,b):
try:
c=((a+b)/(a-b))
except ZeroDivisionError:
print "a/b result in 0"
else:
print c
Ab(2,3)
Ab(3,3)
|
[
"siddamsetty.sahithi@votarytech.com"
] |
siddamsetty.sahithi@votarytech.com
|
118477199ec7566e310b67d75ad1cdeeca56855c
|
3e59724306fac40aee85a69df70af05baf6c120b
|
/pywr_models/models/stanislaus/_parameters/Donnells_Reservoir_Storage_Value.py
|
d15e6651ec3525a39edc20006e96790c3d1460d1
|
[] |
no_license
|
mlmaskey/sierra-pywr
|
9e632ecf85aeb0345a1489c866625ecd62693613
|
80bf954cb26011aee4a84dc82b001e8d260ae525
|
refs/heads/master
| 2023-01-31T21:49:05.663574
| 2020-12-12T02:55:24
| 2020-12-12T02:55:24
| 318,676,217
| 0
| 0
| null | 2020-12-05T01:32:05
| 2020-12-05T01:32:04
| null |
UTF-8
|
Python
| false
| false
| 911
|
py
|
from parameters import WaterLPParameter
from math import exp
class Donnells_Reservoir_Storage_Value(WaterLPParameter):
def _value(self, timestep, scenario_index):
base_cost = -60
if self.model.mode == 'planning':
return base_cost
elev = self.model.nodes[self.res_name].get_level(scenario_index)
offset = 100
max_elev = 1498.7
k = 0.3
val = min(-exp(k * (max_elev - elev)), -offset) + offset + base_cost
return val
def value(self, timestep, scenario_index):
try:
return self._value(timestep, scenario_index)
except Exception as err:
print('\nERROR for parameter {}'.format(self.name))
print('File where error occurred: {}'.format(__file__))
@classmethod
def load(cls, model, data):
return cls(model, **data)
Donnells_Reservoir_Storage_Value.register()
|
[
"herr.rhein@gmail.com"
] |
herr.rhein@gmail.com
|
5f1a9598ca6ede14f8e919dfc37e6770ef5e5f5b
|
28576c22f2eeecfc67a0919254258737598f77a2
|
/python/hamcalc/stdio/trig.py
|
23c39d4e3f0288ef63689cb39a2d27efc55a30bd
|
[] |
no_license
|
slott56/HamCalc-2.1
|
5e3b40b302c13569806fe2f18734e639b17a988e
|
382724dfcad867ed8c4134a93a6bbc1c83dc306b
|
refs/heads/master
| 2020-04-25T21:55:51.298097
| 2013-07-16T13:24:33
| 2013-07-16T13:24:33
| 9,798,987
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,949
|
py
|
"""Trigonometric functions
"""
import hamcalc.math.trig as trig
from hamcalc.stdio import *
import math
import runpy
def functions( angle ):
a0, a1, a2, a3 = (angle, math.pi-angle, math.pi+angle, 2*math.pi-angle)
print( "TRIGONOMETRIC FUNCTIONS".center(80) )
print()
print(" ANGLES:" )
print(" Deg/Min/Sec.......= {0:>12s} {1:>12s} {2:>12s} {3:>12s}".format(trig.DEG_MIN_SEC.from_std(a0), trig.DEG_MIN_SEC.from_std(a1), trig.DEG_MIN_SEC.from_std(a2), trig.DEG_MIN_SEC.from_std(a3)) )
print(" Decimal degrees...= {0:12.6f} {1:12.6f} {2:12.6f} {3:12.6f}".format(trig.DEGREE.from_std(a0), trig.DEGREE.from_std(a1), trig.DEGREE.from_std(a2), trig.DEGREE.from_std(a3)) )
print(" Radians...........= {0:12.6f} {1:12.6f} {2:12.6f} {3:12.6f}".format(trig.RADIAN.from_std(a0), trig.RADIAN.from_std(a1), trig.RADIAN.from_std(a2), trig.RADIAN.from_std(a3)) )
print()
print(" FUNCTIONS of all the above angles:" )
print(" Sine..........Sin = {0:12.6f}".format( math.sin(a0) ) )
print(" Cosine........Cos = {0:12.6f}".format( math.cos(a0) ) )
print(" Tangent.......Tan = {0:12.6f}".format( math.tan(a0) ) )
print(" Cotangent.....Cot = {0:12.6f}".format( 1/math.tan(a0) ) )
print(" Secant........Sec = {0:12.6f}".format( 1/math.cos(a0) ) )
print(" Cosecant......Csc = {0:12.6f}".format( 1/math.sin(a0) ) )
print( trig.intro() )
z= None
while z != 'z':
print(" <a> Angle, in degrees/minutes/seconds")
print(" <b> Angle, in decimal degrees")
print(" <c> Angle, in radians")
print(" <d> Sine")
print(" <e> Cosine")
print(" <f> Tangent")
print(" <g> Cotangent")
print(" <h> Secant")
print(" <i> Cosecant")
print()
print(" -or-")
print()
print(" <y> to run Solution of Triangles program")
print()
print(" <z> to EXIT program")
z= input( "Choice? " )
if z == 'a':
angle_raw= input_float( "ENTER: Angle, in degrees minutes and seconds? " )
if angle_raw is None: continue
angle= trig.DEG_MIN_SEC.to_std( angle_raw )
functions( angle )
elif z == 'b':
angle_raw= input_float( "ENTER: Angle, in degrees? " )
if angle_raw is None: continue
angle= trig.DEGREE.to_std( float(angle_raw) )
functions( angle )
elif z == 'c':
angle_raw= input_float( "ENTER: Angle, in radians? " )
if angle_raw is None: continue
angle= trig.RADIAN.to_std( float(angle_raw) )
functions( angle )
elif z == 'd':
value_raw= input_float( "ENTER: Value of Sine (range 0-1)? " )
if value_raw is None: continue
angle= math.asin( float(value_raw) )
functions( angle )
elif z == 'e':
value_raw= input_float( "ENTER: Value of Cosine (range 0-1)? " )
if value_raw is None: continue
angle= math.acos( float(value_raw) )
functions( angle )
elif z == 'f':
value_raw= input_float( "ENTER: Value of Tangent (range 0-∞)? " )
if value_raw is None: continue
angle= math.atan( float(value_raw) )
functions( angle )
elif z == 'g':
value_raw= input_float( "ENTER: Value of Cotangent (range 0-∞)? " )
if value_raw is None: continue
angle= math.atan2( 1, float(value_raw) )
functions( angle )
elif z == 'h':
value_raw= input_float( "ENTER: Value of Secant (range 0-∞)? " )
if value_raw is None: continue
z= 1/float(value_raw)
angle= math.pi/2-math.atan2(z,math.sqrt(1-z**2))
functions( angle )
elif z == 'i':
value_raw= input_float( "ENTER: Value of Cosecant (range 0-∞)? " )
if value_raw is None: continue
z= 1/float(value_raw)
angle= math.atan2(z,math.sqrt(1-z**2))
functions( angle )
elif z == 'y':
runpy.run_module( 'hamcalc.stdio.solutri' )
|
[
"slott56@gmail.com"
] |
slott56@gmail.com
|
2bc93fa19cb05690f43b36a680d47a50c3e69ae8
|
4cc7f348b7ef6e9d5abcf98d10c360864f2d2800
|
/sko/PSO.py
|
da24e59a8068801d58146ccf614e4c2329adcb36
|
[
"Python-2.0",
"MIT"
] |
permissive
|
zkcz/scikit-opt
|
6886ba5fd66c0e79b5bc4f101f47d556fef1612b
|
bc884b6408af4c91fa406391e75f570a25496c4b
|
refs/heads/master
| 2020-10-01T13:21:30.549707
| 2019-12-11T05:50:51
| 2019-12-11T05:50:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,897
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time : 2019/8/20
# @Author : github.com/guofei9987
import numpy as np
from sko.tools import func_transformer
from .base import SkoBase
class PSO(SkoBase):
"""
Do PSO (Particle swarm optimization) algorithm.
This algorithm was adapted from the earlier works of J. Kennedy and
R.C. Eberhart in Particle Swarm Optimization [IJCNN1995]_.
The position update can be defined as:
.. math::
x_{i}(t+1) = x_{i}(t) + v_{i}(t+1)
Where the position at the current step :math:`t` is updated using
the computed velocity at :math:`t+1`. Furthermore, the velocity update
is defined as:
.. math::
v_{ij}(t + 1) = w * v_{ij}(t) + c_{p}r_{1j}(t)[y_{ij}(t) − x_{ij}(t)]
+ c_{g}r_{2j}(t)[\hat{y}_{j}(t) − x_{ij}(t)]
Here, :math:`cp` and :math:`cg` are the cognitive and social parameters
respectively. They control the particle's behavior given two choices: (1) to
follow its *personal best* or (2) follow the swarm's *global best* position.
Overall, this dictates if the swarm is explorative or exploitative in nature.
In addition, a parameter :math:`w` controls the inertia of the swarm's
movement.
.. [IJCNN1995] J. Kennedy and R.C. Eberhart, "Particle Swarm Optimization,"
Proceedings of the IEEE International Joint Conference on Neural
Networks, 1995, pp. 1942-1948.
Parameters
--------------------
func : function
The func you want to do optimal
dim : int
Number of dimension, which is number of parameters of func.
pop : int
Size of population, which is the number of Particles. We use 'pop' to keep accordance with GA
max_iter : int
Max of iter iterations
Attributes
----------------------
pbest_x : array_like, shape is (pop,dim)
best location of every particle in history
pbest_y : array_like, shape is (pop,1)
best image of every particle in history
gbest_x : array_like, shape is (1,dim)
general best location for all particles in history
gbest_y : float
general best image for all particles in history
gbest_y_hist : list
gbest_y of every iteration
Examples
-----------------------------
>>> demo_func = lambda x: x[0] ** 2 + (x[1] - 0.05) ** 2 + x[2] ** 2
>>> pso = PSO(func=demo_func, dim=3)
>>> gbest_x, gbest_y = pso.run()
>>> print('best_x is ', pso.gbest_x, 'best_y is ', pso.gbest_y)
>>> pso.plot_history()
"""
def __init__(self, func, dim, pop=40, max_iter=150, lb=None, ub=None, w=0.8, c1=0.5, c2=0.5):
self.func = func_transformer(func)
self.w = w # inertia
self.cp, self.cg = c1, c2 # parameters to control personal best, global best respectively
self.pop = pop # number of particles
self.dim = dim # dimension of particles, which is the number of variables of func
self.max_iter = max_iter # max iter
self.has_constraints = not (lb is None and ub is None)
self.lb = -np.ones(self.dim) if lb is None else np.array(lb)
self.ub = np.ones(self.dim) if ub is None else np.array(ub)
assert self.dim == len(self.lb) == len(self.ub), 'dim == len(lb) == len(ub) must holds'
assert np.all(self.ub > self.lb), 'All upper-bound values must be greater than lower-bound values'
self.X = np.random.uniform(low=self.lb, high=self.ub, size=(self.pop, self.dim))
v_high = self.ub - self.lb
self.V = np.random.uniform(low=-v_high, high=v_high, size=(self.pop, self.dim)) # speed of particles
self.Y = self.cal_y() # y = f(x) for all particles
self.pbest_x = self.X.copy() # personal best location of every particle in history
self.pbest_y = self.Y.copy() # best image of every particle in history
self.gbest_x = np.zeros((1, self.dim)) # global best location for all particles
self.gbest_y = np.inf # global best y for all particles
self.gbest_y_hist = [] # gbest_y of every iteration
self.update_gbest()
# record verbose values
self.record_mode = False
self.record_value = {'X': [], 'V': [], 'Y': []}
def update_V(self):
r1 = np.random.rand(self.pop, self.dim)
r2 = np.random.rand(self.pop, self.dim)
self.V = self.w * self.V + \
self.cp * r1 * (self.pbest_x - self.X) + \
self.cg * r2 * (self.gbest_x - self.X)
def update_X(self):
self.X = self.X + self.V
if self.has_constraints:
self.X = np.clip(self.X, self.lb, self.ub)
def cal_y(self):
# calculate y for every x in X
self.Y = np.array([self.func(x) for x in self.X]).reshape(-1, 1)
return self.Y
def update_pbest(self):
'''
personal best
:return:
'''
self.pbest_x = np.where(self.pbest_y > self.Y, self.X, self.pbest_x)
self.pbest_y = np.where(self.pbest_y > self.Y, self.Y, self.pbest_y)
def update_gbest(self):
'''
global best
:return:
'''
if self.gbest_y > self.Y.min():
self.gbest_x = self.X[self.Y.argmin(), :]
self.gbest_y = self.Y.min()
def recorder(self):
if not self.record_mode:
return
self.record_value['X'].append(self.X)
self.record_value['V'].append(self.V)
self.record_value['Y'].append(self.Y)
def run(self, max_iter=None):
self.max_iter = max_iter or self.max_iter
for iter_num in range(self.max_iter):
self.update_V()
self.recorder()
self.update_X()
self.cal_y()
self.update_pbest()
self.update_gbest()
self.gbest_y_hist.append(self.gbest_y)
return self
fit = run
|
[
"guofei9987@foxmail.com"
] |
guofei9987@foxmail.com
|
32507acd78f501ec54d3ee9e35911dfe8ca480b6
|
03dfcd4bd41ff9ba76e67895e96a9794ad003a31
|
/sandbox/internet/web-scraping/myparser.py
|
82a2e133b52c265a643c1d4c02ec7e0966db8a05
|
[] |
no_license
|
gittygitgit/python-sandbox
|
71ca68fcc90745931737f7aeb61306ac3417ce60
|
3b3e0eaf4edad13aabe51eb3258ebe9e6b951c67
|
refs/heads/master
| 2021-01-19T02:41:17.047711
| 2018-11-22T18:07:15
| 2018-11-22T18:07:15
| 39,742,770
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,720
|
py
|
#!/usr/bin/python
import HTMLParser
class MyParse(HTMLParser.HTMLParser):
def __init__(self):
#super() does not work for this class
HTMLParser.HTMLParser.__init__(self)
self.tag_stack = []
self.attr_stack = []
def handle_endtag(self, tag):
#take the tag off the stack if it matches the next close tag
#if you are expecting unmatched tags, then this needs to be more robust
if self.tag_stack[len(self.tag_stack)-1][0] == tag:
self.tag_stack.pop()
def handle_data(self, data):
#'data' is the text between tags, not necessarily
#matching tags
#this gives you a link to the last tag
tstack = self.tag_stack[len(self.tag_stack)-1]
#do something with the text
def handle_starttag(self, tag, attrs):
#add tag to the stack
self.tag_stack.append([tag, attrs])
#if this tag is a link
if tag =="a":
#these next few lines find if there is a hyperlink in the tag
tloc = map(lambda x: 1 if x[0]=='href' else 0,attrs)
try:
#did we find any hyperlinks
attr_loc = tloc.index(1)
except:
pass
# attr_loc only exists if we found a hyperlink
if vars().has_key('attr_loc'):
#append to the last item in the stack the location of the hyperlink
#note, this does not increase the length of the stack
#as we are putting it inside the last item on the stack
self.tag_stack[len(self.tag_stack)-1].append(attr_loc)
#now we can do what we need with the hyperlink
|
[
"grudkowm@Michaels-Air-2.fios-router.home"
] |
grudkowm@Michaels-Air-2.fios-router.home
|
eb06707c02b708b16b20562078f0ccd02b5cca34
|
76dab6591cb9c7ee566b76a0adc7b0b0c4086592
|
/main/tests/test_models.py
|
7185a7137b6e46e6c02f4727e6bb80c1f7e2792a
|
[] |
no_license
|
gray-adeyi/booktime
|
87962321e380cfa779b24f2bd6fa8c434687d084
|
fb54bc35739b28b5a71a5cf0c1067f38140559ba
|
refs/heads/main
| 2023-04-05T02:44:01.992984
| 2021-05-03T01:37:01
| 2021-05-03T01:37:25
| 363,434,043
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,490
|
py
|
from decimal import Decimal
from django.test import TestCase
from main import models
class TestModel(TestCase):
def test_active_manager_works(self):
models.Product.objects.create(
name="The cathedral and the bazaar",
price=Decimal("10.00"))
models.Product.objects.create(
name="Pride and Prejudice",
price=Decimal("2.00"))
models.Product.objects.create(
name="A Tale of Two Cities",
price=Decimal("2.00"),
active=False)
self.assertEqual(len(models.Product.objects.active()), 2)
def test_create_order_works(self):
p1 = models.Product.objects.create(
name="The cathedral and the bazaar",
price=Decimal("10.00"),
)
p2 = models.Product.objects.create(
name="Pride and Prejudice", price=Decimal("2.00")
)
user1 = models.User.objects.create_user(
"user1", "pw432joij"
)
billing = models.Address.objects.create(
user=user1,
name="John Kimball",
address1="127 Strudel road",
city="London",
country="uk",
)
shipping = models.Address.objects.create(
user=user1,
name="John Kimball",
address1="123 Deacon road",
city="London",
country="uk",
)
basket = models.Basket.objects.create(user=user1)
models.BasketLine.objects.create(
basket=basket, product=p1
)
models.BasketLine.objects.create(
basket=basket, product=p2
)
with self.assertLogs("main.models", level="INFO") as cm:
order = basket.create_order(billing, shipping)
self.assertGreaterEqual(len(cm.output), 1)
order.refresh_from_db()
self.assertEquals(order.user, user1)
self.assertEquals(
order.billing_address1, "127 Strudel road"
)
self.assertEquals(
order.shipping_address1, "123 Deacon road"
)
# add more checks here
self.assertEquals(order.lines.all().count(), 2)
lines = order.lines.all()
self.assertEquals(lines[0].product, p1)
self.assertEquals(lines[1].product, p2)
|
[
"adeyigbenga005@gmail.com"
] |
adeyigbenga005@gmail.com
|
ebebad6c7731a9504ee607513be35017d718188d
|
8a5ab3d33e3b653c4c64305d81a85f6a4582d7ac
|
/PySide/QtCore/QPointF.py
|
2c350c353ea530cc837989441679f71325e61e69
|
[
"Apache-2.0"
] |
permissive
|
sonictk/python-skeletons
|
be09526bf490856bb644fed6bf4e801194089f0d
|
49bc3fa51aacbc2c7f0c7ab86dfb61eefe02781d
|
refs/heads/master
| 2020-04-06T04:38:01.918589
| 2016-06-09T20:37:43
| 2016-06-09T20:37:43
| 56,334,503
| 0
| 0
| null | 2016-04-15T16:30:42
| 2016-04-15T16:30:42
| null |
UTF-8
|
Python
| false
| false
| 3,951
|
py
|
# encoding: utf-8
# module PySide.QtCore
# from /corp.blizzard.net/BFD/Deploy/Packages/Published/ThirdParty/Qt4.8.4/2015-05-15.163857/prebuilt/linux_x64_gcc41_python2.7_ucs4/PySide/QtCore.so
# by generator 1.138
# no doc
# no imports
from _Object import _Object
class QPointF(_Object):
# no doc
def isNull(self, *args, **kwargs): # real signature unknown
pass
def manhattanLength(self, *args, **kwargs): # real signature unknown
pass
def setX(self, *args, **kwargs): # real signature unknown
pass
def setY(self, *args, **kwargs): # real signature unknown
pass
def toPoint(self, *args, **kwargs): # real signature unknown
pass
def toTuple(self, *args, **kwargs): # real signature unknown
pass
def x(self, *args, **kwargs): # real signature unknown
pass
def y(self, *args, **kwargs): # real signature unknown
pass
def __add__(self, y): # real signature unknown; restored from __doc__
""" x.__add__(y) <==> x+y """
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __div__(self, y): # real signature unknown; restored from __doc__
""" x.__div__(y) <==> x/y """
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __iadd__(self, y): # real signature unknown; restored from __doc__
""" x.__iadd__(y) <==> x+=y """
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
def __isub__(self, y): # real signature unknown; restored from __doc__
""" x.__isub__(y) <==> x-=y """
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __mul__(self, y): # real signature unknown; restored from __doc__
""" x.__mul__(y) <==> x*y """
pass
def __neg__(self): # real signature unknown; restored from __doc__
""" x.__neg__() <==> -x """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __nonzero__(self): # real signature unknown; restored from __doc__
""" x.__nonzero__() <==> x != 0 """
pass
def __radd__(self, y): # real signature unknown; restored from __doc__
""" x.__radd__(y) <==> y+x """
pass
def __rdiv__(self, y): # real signature unknown; restored from __doc__
""" x.__rdiv__(y) <==> y/x """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __rmul__(self, y): # real signature unknown; restored from __doc__
""" x.__rmul__(y) <==> y*x """
pass
def __rsub__(self, y): # real signature unknown; restored from __doc__
""" x.__rsub__(y) <==> y-x """
pass
def __rtruediv__(self, y): # real signature unknown; restored from __doc__
""" x.__rtruediv__(y) <==> y/x """
pass
def __sub__(self, y): # real signature unknown; restored from __doc__
""" x.__sub__(y) <==> x-y """
pass
def __truediv__(self, y): # real signature unknown; restored from __doc__
""" x.__truediv__(y) <==> x/y """
pass
__new__ = None
|
[
"yliangsiew@blizzard.com"
] |
yliangsiew@blizzard.com
|
0b13a187ec32ce7aa897761988d4c15a6c652734
|
ab3d5ea4bf0e48914ed14fcf16e5b1d752f199ba
|
/pcg_libraries/src/pcg_gazebo/parsers/sdf/pose.py
|
c59190b53c6490da0a3566d7aeb02a72a6f5997d
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"CC0-1.0",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-public-domain"
] |
permissive
|
boschresearch/pcg_gazebo_pkgs
|
5f1004d0de874d4d1abc4eb695777013027158b2
|
1c112d01847ca4f8da61ce9b273e13d13bc7eb73
|
refs/heads/master
| 2020-06-11T06:28:36.228431
| 2020-02-07T13:05:28
| 2020-02-07T13:05:28
| 193,876,180
| 44
| 3
|
NOASSERTION
| 2020-02-07T12:00:55
| 2019-06-26T09:45:05
|
Python
|
UTF-8
|
Python
| false
| false
| 2,348
|
py
|
# Copyright (c) 2019 - The Procedural Generation for Gazebo authors
# For information on the respective copyright owner see the NOTICE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..types import XMLVector
class Pose(XMLVector):
_NAME = 'pose'
_TYPE = 'sdf'
_ATTRIBUTES = dict(
frame=''
)
def __init__(self):
XMLVector.__init__(self, 6)
self.reset()
@property
def frame(self):
return self.attributes['frame']
@frame.setter
def frame(self, value):
assert isinstance(value, str)
self.attributes['frame'] = value
@property
def pose(self):
return self.value
@pose.setter
def pose(self, value):
XMLVector._set_value(self, value)
@property
def x(self):
return self.value[0]
@x.setter
def x(self, value):
assert self._is_scalar(value)
self.value[0] = float(value)
@property
def y(self):
return self.value[1]
@y.setter
def y(self, value):
assert self._is_scalar(value)
self.value[1] = float(value)
@property
def z(self):
return self.value[2]
@z.setter
def z(self, value):
assert self._is_scalar(value)
self.value[2] = float(value)
@property
def roll(self):
return self.value[3]
@roll.setter
def roll(self, value):
assert self._is_scalar(value)
self.value[3] = float(value)
@property
def pitch(self):
return self.value[4]
@pitch.setter
def pitch(self, value):
assert self._is_scalar(value)
self.value[4] = float(value)
@property
def yaw(self):
return self.value[5]
@yaw.setter
def yaw(self, value):
assert self._is_scalar(value)
self.value[5] = float(value)
|
[
"Musa.Marcusso@de.bosch.com"
] |
Musa.Marcusso@de.bosch.com
|
c623f87b22b649226f52dc2e56f8651ae57fca85
|
484da6ff9bda06183c3d3bbda70c6d11e1ad6b67
|
/.history/main_20191007162714.py
|
3c0a5d2227f2184e32bd46b4e6485a71ab54093b
|
[] |
no_license
|
Shynar88/TSP
|
009a88bbddb29214921de4d0cf1761dea61b7b75
|
889751ab7d6a91469e86c6583f3c91b85857edd9
|
refs/heads/master
| 2020-08-06T22:40:49.217474
| 2020-01-14T13:41:44
| 2020-01-14T13:41:44
| 213,185,830
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,330
|
py
|
import argparse
import math
import random
import operator
class City():
def __init__(self, index, x_coord, y_coord):
self.index = index
self.x_coord = x_coord
self.y_coord = y_coord
def __repr__(self):
return "[" + str(self.x_coord) + ", " + str(self.y_coord) + "]"
def get_distance_to(self, other):
dx = (other.x_coord - self.x_coord) ** 2
dy = (other.y_coord - self.y_coord) ** 2
return math.sqrt(dx + dy)
class Instance():
def __init__(self, route):
self.route = route
self.route_distance = self.get_route_distance()
self.fitness = self.get_fitness()
def get_route_distance(self):
distance = 0
for i in range(len(self.route)):
src = self.route[i]
dest = self.route[i + 1] if i + 1 < len(self.route) else self.route[0]
distance += src.get_distance_to(dest)
return distance
def get_fitness(self):
return 1 / self.route_distance
class GeneticAlgorithm():
def __init__(self, population_size, mat_pool_size, tournament_size, elite_size, max_generations, crossover_rate, mutation_rate, cities_list):
self.population_size = population_size
self.mat_pool_size = mat_pool_size
self.tournament_size = tournament_size
self.elite_size = elite_size
self.max_generations = max_generations
self.crossover_rate = crossover_rate
self.mutation_rate = mutation_rate
self.cities_list = cities_list
def generate_instance(self):
route = random.sample(self.cities_list, len(self.cities_list))
instance = Instance(route)
return instance
def create_initial_population(self):
initial_population = []
for _ in range(self.population_size):
initial_population.append(self.generate_instance())
return initial_population
def crossover(self, p1, p2): # proposing good crossover method https://www.hindawi.com/journals/cin/2017/7430125/
#implement simple crossover then try to enhance it
#ordered crossover
li = 0
hi = 0
while hi <= li:
li = int(random.random() * len(p1.route))
hi = int(random.random() * len(p1.route))
chunk = p1.route[li:hi]
child_route = []
not_used_el_in_p2 = [el for el in p2.route if el not in chunk]
pointer = 0
for _ in range(li):
child_route.append(not_used_el_in_p2[pointer])
pointer += 1
child_route += chunk
for _ in range(hi, len(p1.route)):
child_route.append(not_used_el_in_p2[pointer])
pointer += 1
child = Instance(child_route)
return child
def mutate(self, instance):
instance = instance.route
if random.random() < self.mutation_rate:
i1, i2 = random.sample(range(len(self.cities_list)), 2)
instance[i1], instance[i2] = instance[i2], instance[i1]
def selection(self, population):
#experiment on selection way
#implement the simple one
#Tournament selection P/2 size might be better
mating_pool = []
while len(mating_pool) < self.mat_pool_size:
participants = random.sample(population, self.tournament_size)
fittest = max(participants, key=operator.attrgetter('fitness'))
mating_pool.append(fittest)
return mating_pool
def generate_path(self):
# Step 1. Create an initial population of P chromosomes.
population = self.create_initial_population()
# Step 2. Evaluate the fitness of each chromosome. done in create population
for generation in range(self.max_generations):
print(f"generation number: {generation}")
# Step 3. Choose P/2 parents from the current population via proportional selection.
mating_pool = self.selection(population)
population_sorted = sorted(population, key=lambda instance: instance.fitness, reverse=True)
old_elite = population_sorted[:self.elite_size]
new_population = old_elite
while len(new_population) < self.population_size:
# Step 4. Randomly select two parents to create offspring using crossover operator.
parents = random.sample(mating_pool, 2)
child = self.crossover(parents[0], parents[1])
# Step 5. Apply mutation operators for minor changes in the results.
self.mutate(child)
new_population.append(child)
# Step 6. Repeat Steps 4 and 5 until all parents are selected and mated.
# Step 7. Replace old population of chromosomes with new one.
population = new_population
# Step 8. Evaluate the fitness of each chromosome in the new population. Already done in crossover when creating the child
# Step 9. Terminate if the number of generations meets some upper bound; otherwise go to Step 3.
return 0
# parses command line arguments
#is mating pool size also a hyperparameter???????
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-p', type=str, default="a280.tsp", help="path to the input file")
parser.add_argument('-s', type=int, default=50, help="population size")
parser.add_argument('-ms', type=int, default=25, help="mating pool size")
parser.add_argument('-ts', type=int, default=5, help="tournament size")
parser.add_argument('-e', type=int, default=20, help="elite_size")
parser.add_argument('-mg', type=int, default=50, help="max generations")
parser.add_argument('-cr', type=float, default=0.3, help="crossover rate")
parser.add_argument('-mr', type=float, default=0.1, help="mutation rate")
args = parser.parse_args()
return args.p, args.s, args.ms, args.ts, args.e, args.mg, args.cr, args.mr
# parses file, returns list of city coordinates(ex: [(x1, y1), ...])
def parse(file_path): #coordinates start from the 7th line, end with EOF
cities = []
f = open(file_path, "r")
for _ in range(6):
f.readline()
for line in f:
line_contents = line.split()
if len(line_contents) == 3:
cities.append((line_contents[0], line_contents[1], line_contents[2]))
f.close()
return cities
def create_cities(coordinates_list):
cities_list = []
for coordinates in coordinates_list:
cities_list.append(City(coordinates[0], coordinates[1], coordinates[2]))
return cities_list
def main():
path, population_size, mat_pool_size, tournament_size, elite_size, max_generations, crossover_rate, mutation_rate = parse_arguments()
#delete prints
print(path)
print(population_size)
print(mat_pool_size)
print(tournament_size)
print(elite_size)
print(max_generations)
print(crossover_rate)
print(mutation_rate)
#####
coordinates_list = parse(path)
cities_list = create_cities(coordinates_list)
gen_algo = GeneticAlgorithm(population_size, mat_pool_size, tournament_size, elite_size, max_generations, crossover_rate, mutation_rate, cities_list)
# distance = gen_algo.generate_path()
# print(distance)
if __name__ == "__main__":
main()
|
[
"shynar@mindslab.ai"
] |
shynar@mindslab.ai
|
ce640e99ab9b4f9311a737f6a8f10585751a2bcf
|
3ce592352627591346ea33ea0c2665ad879414e2
|
/References/web-scraping/101scrapetest.py
|
64aa6f9bfe35ee27d2cbd8684578f2c2e1fafc06
|
[
"MIT"
] |
permissive
|
royqh1979/python_libs_usage
|
113df732ef106f4a5faae1343493756fd703c8c0
|
57546d5648d8a6b7aca7d7ff9481aa7cd4d8f511
|
refs/heads/master
| 2021-04-16T18:14:43.835482
| 2021-01-11T03:55:25
| 2021-01-11T03:55:25
| 249,374,754
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 290
|
py
|
from urllib.request import urlopen
from bs4 import BeautifulSoup
html = urlopen('http://www.pythonscraping.com/pages/warandpeace.html')
bs = BeautifulSoup(html.read(),'html5lib')
print(bs.h1)
namelist = bs.findAll('span',{'class':'green'})
for name in namelist:
print(name.get_text())
|
[
"royqh1979@gmail.com"
] |
royqh1979@gmail.com
|
04483072d03f682fa80d3014d1f0fa2fc7e36601
|
1a1b857c67768f20de0df42a7edb87edd57d9a33
|
/Quick_Sort/Quick_Sort_Practice_November_2.py
|
1b6901fe97a57dc5b190a20e25f2f70f0af9dfb8
|
[] |
no_license
|
LilySu/Python_Practice
|
7c7eb30c549239f27680f410d365289b67813c5e
|
26767e64742149813ecbc91815454836ffce8b6e
|
refs/heads/master
| 2023-07-29T01:14:19.751490
| 2021-08-15T01:09:41
| 2021-08-15T01:09:41
| 279,446,861
| 1
| 2
| null | 2020-10-09T04:10:40
| 2020-07-14T01:05:55
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 760
|
py
|
def swap(a, b, arr):
arr[a], arr[b] = arr[b], arr[a]
def partition(elements, start, end):
pivot_index = start
pivot = elements[pivot_index]
while start < end:
while start < len(elements) and elements[start] <= pivot:
start += 1
while elements[end] > pivot:
end -= 1
if start < end:
swap(start, end, elements)
swap(pivot_index, end, elements)
return end
def quick_sort(elements, start, end):
if start < end:
pi = partition(elements, start, end)
quick_sort(elements, start, pi - 1)
quick_sort(elements, pi + 1, end)
if __name__ == "__main__":
elements = [8, 24, 92, 14, 3, 47]
quick_sort(elements, 0, len(elements) - 1)
print(elements)
|
[
"LilySu@users.noreply.github.com"
] |
LilySu@users.noreply.github.com
|
fd9b2b5f03d3dc5bc61de7795ca876950a6683e0
|
7e0393251012e91213dddfd9c93f6b6b73ca2bfe
|
/tests/unit/test_drizzle_error.py
|
e7d05e37e25a68aede0c2312672d6c495e350d23
|
[
"MIT"
] |
permissive
|
josephhardinee/cloudnetpy
|
ff4cc0303d7f2ae40f2d3466298257659ff3ccde
|
c37760db3cdfe62ae769f8090ba621803ec9a92c
|
refs/heads/master
| 2021-03-06T15:37:51.529776
| 2020-02-13T09:05:29
| 2020-02-13T09:05:29
| 246,207,849
| 0
| 0
|
MIT
| 2020-03-10T04:29:48
| 2020-03-10T04:26:16
| null |
UTF-8
|
Python
| false
| false
| 3,347
|
py
|
import numpy as np
import numpy.testing as testing
import pytest
from cloudnetpy.products import drizzle_error as de
DRIZZLE_PARAMETERS = {'Do': np.array([[0.0001, 0.01, 0.000001],
[0.001, 0.000001, 0.0001]])}
DRIZZLE_INDICES = {'drizzle': np.array([[1, 1, 1], [1, 1, 1]], dtype=bool),
'small': np.array([[1, 0, 0], [0, 0, 1]], dtype=bool),
'tiny': np.array([[0, 0, 1], [0, 1, 0]], dtype=bool)}
ERROR_INPUT = (np.array([[0.01, 0.34, 0.5],
[0.2, 0.3, 0.56]]), 0.14)
BIAS_INPUT = (0.01, 0.57)
@pytest.mark.parametrize('key, value', [
('drizzle', [False, True, True, True]),
('small', [False, True, False, False]),
('tiny', [False, False, False, True])])
def test_get_drizzle_indices(key, value):
dia = np.array([-1, 2 * 1e-5, 1, 1e-6])
d = de._get_drizzle_indices(dia)
testing.assert_array_equal(d[key], value)
@pytest.mark.parametrize('key', [
'Do_error', 'drizzle_lwc_error', 'drizzle_lwf_error', 'S_error'])
def test_calc_parameter_errors(key):
x = de._calc_parameter_errors(DRIZZLE_INDICES, ERROR_INPUT)
assert key in x.keys()
@pytest.mark.parametrize('key', [
'Do_bias', 'drizzle_lwc_bias', 'drizzle_lwf_bias'])
def test_calc_parameter_biases(key):
x = de._calc_parameter_biases(BIAS_INPUT)
assert key in x.keys()
@pytest.fixture
def results():
errors = de._calc_parameter_errors(DRIZZLE_INDICES, ERROR_INPUT)
biases = de._calc_parameter_biases(BIAS_INPUT)
return {**errors, **biases}
@pytest.mark.parametrize('key', [
'drizzle_N_error', 'v_drizzle_error', 'mu_error'])
def test_add_supplementary_errors(results, key):
x = de._add_supplementary_errors(results, DRIZZLE_INDICES, ERROR_INPUT)
assert key in x.keys()
def test_calc_v_error(results):
results['Do_error'] = np.array([[2, 2, 2], [2, 2, 2]])
x = de._add_supplementary_errors(results, DRIZZLE_INDICES, ERROR_INPUT)
testing.assert_almost_equal(x['v_drizzle_error'][DRIZZLE_INDICES['tiny']], 4)
@pytest.mark.parametrize('key', [
'drizzle_N_bias', 'v_drizzle_bias'])
def test_add_supplementary_biases(results, key):
x = de._add_supplementary_biases(results, BIAS_INPUT)
assert key in x.keys()
def test_calc_error():
from cloudnetpy.utils import l2norm_weighted
compare = l2norm_weighted(ERROR_INPUT, 1, 1)
testing.assert_almost_equal(de._calc_error(1, 1, ERROR_INPUT), compare)
def test_stack_errors():
DRIZZLE_INDICES['drizzle'] = np.array([[0, 1, 1], [1, 1, 0]], dtype=bool)
compare = np.ma.array(ERROR_INPUT[0], mask=[[1, 0, 0], [0, 0, 1]])
x = de._stack_errors(ERROR_INPUT[0], DRIZZLE_INDICES)
testing.assert_array_almost_equal(x, compare)
@pytest.mark.parametrize("x, result", [
(-1000, -1),
(-100, -0.99999),
(-10, -0.9),
(-1, np.exp(-1 / 10 * np.log(10)) - 1)])
def test_db2lin(x, result):
testing.assert_array_almost_equal(de.db2lin(x), result, decimal=5)
def test_db2lin_raise():
with pytest.raises(ValueError):
de.db2lin(150)
@pytest.mark.parametrize("x, result", [
(1e6, 60),
(1e5, 50),
(1e4, 40)])
def test_lin2db(x, result):
testing.assert_array_almost_equal(de.lin2db(x), result, decimal=3)
def test_lin2db_raise():
with pytest.raises(ValueError):
de.lin2db(-1)
|
[
"simo.tukiainen@fmi.fi"
] |
simo.tukiainen@fmi.fi
|
1abc67418dafabbb3f468f4ff08fea5c925b3bde
|
d86c5aa92a9763510b539776510ad9795d33ae89
|
/September 2020/03-Multidimensional-Lists/03-Primary-Diagonal.py
|
cb353fab20268a1b0cc58deae94420f1b386b6f6
|
[
"MIT"
] |
permissive
|
eclipse-ib/Software-University-Professional-Advanced-Module
|
42e3bd50ac5f0df8082add29f4113cffb87889e1
|
636385f9e5521840f680644824d725d074b93c9a
|
refs/heads/main
| 2023-02-13T06:02:53.246980
| 2021-01-06T21:12:14
| 2021-01-06T21:12:14
| 306,282,871
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 767
|
py
|
#Съкратен Вариант с комрехеншън:
size = int(input())
matrix = [
[int(el) for el in input().split()]
for i in range(size)
]
current_sum = sum([matrix[i][i] for i in range(size)])
print(current_sum)
#Вариант с range:
# size = int(input())
#
# matrix = [
# [int(el) for el in input().split()]
# for i in range(size)
# ]
#
# current_sum = 0
# for i in range(size):
# current_sum += matrix[i][i]
#
# print(current_sum)
# Вариант с ожхождане на матрицата:
# size = int(input())
#
# matrix = [
# [int(el) for el in input().split()]
# for i in range(size)
# ]
#
# index = 0
# current_sum = 0
# for j in matrix:
# current_sum += j[index]
# index += 1
# print(current_sum)
|
[
"65770519+eclipse-ib@users.noreply.github.com"
] |
65770519+eclipse-ib@users.noreply.github.com
|
db89b4926bf8f251c68f068747c97003c1c04fbc
|
cfac0f4f862180baae078bd7656ac41c8f946006
|
/Day22/full.py
|
53638388312ad69de0807e67cf6732d90355eefc
|
[] |
no_license
|
RaspiKidd/AoC2017
|
bcf4a8c161b48b2b8f89745d6ff5b741f023b5b7
|
2be828462cd5d56e2f8a8f636525359bb4de045e
|
refs/heads/master
| 2021-09-01T20:07:34.228665
| 2017-12-28T14:25:08
| 2017-12-28T14:25:08
| 112,738,515
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,634
|
py
|
def read():
with open("data.txt") as f:
inpt = f.read().splitlines()
offset = len(inpt) // 2
infected = set()
for r, line in enumerate(inpt):
for c, ch in enumerate(line):
if ch == '#':
infected.add((r - offset, c - offset))
return infected
infected = read()
dirs = [(-1, 0), (0, -1), (1, 0), (0, 1)]
d = 0
virusAt = (0, 0)
def burst():
global infected, d, virusAt
infectionCaused = False
if virusAt in infected:
d = (d - 1) % 4
infected.remove(virusAt)
else:
d = (d + 1) % 4
infected.add(virusAt)
infectionCaused = True
virusAt = (virusAt[0] + dirs[d][0], virusAt[1] + dirs[d][1])
return infectionCaused
numInfections = 0
for i in range(10000):
if burst():
numInfections += 1
# Part 1 answer
print(numInfections)
clean = 0
infected = 1
weak = 2
flagged = 3
state = {k: infected for k in read()}
virusAt = (0, 0)
def burst2():
global state, d, virusAt
infectionCaused = False
currentState = state.get(virusAt, 0)
if currentState == clean:
d = (d + 1) % 4
state[virusAt] = weak
elif currentState == weak:
state[virusAt] = infected
infectionCaused = True
elif currentState == infected:
d = (d - 1) % 4
state[virusAt] = flagged
else: # FLAGGED
d = (d + 2) % 4
del state[virusAt]
virusAt = (virusAt[0] + dirs[d][0], virusAt[1] + dirs[d][1])
return infectionCaused
numInfections = 0
for i in range(10000000):
if burst2():
numInfections += 1
# part 2 answer
print (numInfections)
|
[
"kerry@raspikidd.com"
] |
kerry@raspikidd.com
|
286590a9fe52b4359057b9360cd7b7a404aa8d70
|
fe18994a1880f347d8004383434842286b9dccd3
|
/python_stack/flask/flask_fundamentals/Dojo_Survey/server.py
|
335aa9e1c425addeaea0b58be5359da8056a3f95
|
[] |
no_license
|
Anbousi/Python
|
682d5b00555ab3183d06afddb4c5f6e1d5739f6c
|
4f05dd8ec62e80a28ca607feae976d9220a62227
|
refs/heads/master
| 2023-05-06T03:37:28.878915
| 2021-05-30T19:11:28
| 2021-05-30T19:11:28
| 364,501,098
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 695
|
py
|
from flask import Flask , render_template , request , redirect
app = Flask(__name__)
@app.route('/')
def main():
return render_template('index.html')
@app.route('/result' , methods=['POST'])
def result():
name_form = request.form['name']
location_form = request.form['location']
language_form = request.form['language']
comment_form = request.form['comment']
radio_form = request.form['radio']
check_form = request.form['check']
print(check_form)
return render_template('result.html' , name_form = name_form , location_form = location_form , language_form = language_form , comment_form = comment_form )
if __name__ == '__main__':
app.run(debug = True)
|
[
"anbousi@gmail.com"
] |
anbousi@gmail.com
|
2ab9f2f34b31a7152edd0e7524c21dddd1269df8
|
247389d0b916f972297fe3c38d262502a6cfa084
|
/morse
|
ef7bed09c034b15fde12125c089eb665e1695bcd
|
[] |
no_license
|
reteps/raspi
|
2e69fee4eb96e4a43059f3125c79cf577e2b5bb6
|
96771f0525b3ad71c9b13a36de49b599c5769310
|
refs/heads/master
| 2021-09-28T05:22:32.999241
| 2017-07-26T13:24:51
| 2017-07-26T13:24:51
| 98,200,765
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,205
|
#!/usr/bin/env python3
import RPi.GPIO as GPIO
import pilights, time
def to_morse_code(message):
morseAlphabet ={
"A" : ".-",
"B" : "-...",
"C" : "-.-.",
"D" : "-..",
"E" : ".",
"F" : "..-.",
"G" : "--.",
"H" : "....",
"I" : "..",
"J" : ".---",
"K" : "-.-",
"L" : ".-..",
"M" : "--",
"N" : "-.",
"O" : "---",
"P" : ".--.",
"Q" : "--.-",
"R" : ".-.",
"S" : "...",
"T" : "-",
"U" : "..-",
"V" : "...-",
"W" : ".--",
"X" : "-..-",
"Y" : "-.--",
"Z" : "--..",
" " : "/"
}
output = ""
for letter in message.upper():
output += morseAlphabet[letter]
return output
pin = 17
lights = pilights.Lights(pin)
raw_message = input("Message > ")
message = to_morse_code(raw_message)
shorttime = 0.1
longtime = 0.4
split = 0.2
word = 0.6
print(message)
for character in message:
if character == ".":
lights.onoff(pin,shorttime)
elif character == "/":
time.sleep(word)
elif character == "-":
lights.onoff(pin,longtime)
time.sleep(split)
|
[
"peter.a.stenger@gmail.com"
] |
peter.a.stenger@gmail.com
|
|
69354a0bd822307b273f6b1b5fdfdcb3a5c10b88
|
16a5c9c9f0d7519a6808efc61b592b4b614102cf
|
/Python/16.py
|
3672e6b7b0f7551a670966ce8b46ac170ca86da6
|
[] |
no_license
|
kevin851066/Leetcode
|
c1d86b2e028526231b80c6d4fb6d0be7ae8d39e5
|
885a9af8a7bee3c228c7ae4e295dca810bd91d01
|
refs/heads/main
| 2023-08-10T16:50:12.426440
| 2021-09-28T15:23:26
| 2021-09-28T15:23:26
| 336,277,469
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,012
|
py
|
class Solution:
def threeSumClosest(self, nums, target):
'''
:type: nums: List[int]
:type: target: int
:rtype: int
'''
nums.sort()
dis = float('inf')
res = 0
for i in range(len(nums)-2):
if i == 0 or nums[i] != nums[i-1]:
l, r = i + 1, len(nums) - 1
while r > l:
s = nums[i] + nums[r] + nums[l]
diff = abs(target - s)
if diff < dis:
dis = diff
res = s
if target > s:
while r > l and nums[l] == nums[l+1]:
l += 1
l += 1
elif target < s:
while r > l and nums[r] == nums[r-1]:
r -= 1
r -= 1
else:
return res
return res
|
[
"kevin851066@gmail.com"
] |
kevin851066@gmail.com
|
89bc553261509785779919691202bc8ff9d94257
|
5c69e63f3bb1286a79cb81ca70c969bccd65d740
|
/bocadillo/exceptions.py
|
e8cfd93892c17f615e36715a36c0bba654f5a71f
|
[
"MIT"
] |
permissive
|
stjordanis/bocadillo
|
85dc5895966d3e2031df365db55e4def156e92aa
|
658cce55b196d60489530aaefde80b066cb8054b
|
refs/heads/master
| 2020-04-14T09:36:47.245246
| 2019-01-01T19:27:37
| 2019-01-01T19:27:37
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,196
|
py
|
from http import HTTPStatus
from typing import Union, Any, List
from starlette.websockets import WebSocketDisconnect as _WebSocketDisconnect
WebSocketDisconnect = _WebSocketDisconnect
class HTTPError(Exception):
"""Raised when an HTTP error occurs.
You can raise this within a view or an error handler to interrupt
request processing.
# Parameters
status (int or HTTPStatus):
the status code of the error.
detail (any):
extra detail information about the error. The exact rendering is
determined by the configured error handler for `HTTPError`.
# See Also
- [HTTP response status codes (MDN web docs)](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status)
"""
def __init__(self, status: Union[int, HTTPStatus], detail: Any = ""):
if isinstance(status, int):
status = HTTPStatus(status)
else:
assert isinstance(
status, HTTPStatus
), f"Expected int or HTTPStatus, got {type(status)}"
self._status = status
self.detail = detail
@property
def status_code(self) -> int:
"""Return the HTTP error's status code, e.g. `404`."""
return self._status.value
@property
def status_phrase(self) -> str:
"""Return the HTTP error's status phrase, e.g. `"Not Found"`."""
return self._status.phrase
@property
def title(self) -> str:
"""Return the HTTP error's title, e.g. `"404 Not Found"`."""
return f"{self.status_code} {self.status_phrase}"
def __str__(self):
return self.title
class UnsupportedMediaType(Exception):
"""Raised when trying to use an unsupported media type.
# Parameters
media_type (str):
the unsupported media type.
available (list of str):
a list of supported media types.
"""
def __init__(self, media_type: str, available: List[str]):
self._media_type = media_type
self._available = available
def __str__(self):
return f'{self._media_type} (available: {", ".join(self._available)})'
class RouteDeclarationError(Exception):
"""Raised when a route is ill-declared."""
|
[
"florimond.manca@gmail.com"
] |
florimond.manca@gmail.com
|
44c36be3d14151335716e257311f97e0760b11f5
|
3712a929d1124f514ea7af1ac0d4a1de03bb6773
|
/开班笔记/个人项目/weather/venv/Scripts/pip3.6-script.py
|
d64c53761d2ebf358b440b0154196e579055f766
|
[] |
no_license
|
jiyabing/learning
|
abd82aa3fd37310b4a98b11ea802c5b0e37b7ad9
|
6059006b0f86aee9a74cfc116d2284eb44173f41
|
refs/heads/master
| 2020-04-02T20:47:33.025331
| 2018-10-26T05:46:10
| 2018-10-26T05:46:10
| 154,779,387
| 0
| 0
| null | null | null | null |
GB18030
|
Python
| false
| false
| 450
|
py
|
#!E:\学习文件\python学习资料\开班笔记\个人项目\weather\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.6'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.6')()
)
|
[
"yabing_ji@163.com"
] |
yabing_ji@163.com
|
92e2ffb6e01ccf60d49d3427184289857918536d
|
20f951bd927e4e5cde8ef7781813fcf0d51cc3ea
|
/fossir/web/forms/util.py
|
1f03bb0a3347fb4b7c709e2715f01c827d7c8c71
|
[] |
no_license
|
HodardCodeclub/SoftwareDevelopment
|
60a0fbab045cb1802925d4dd5012d5b030c272e0
|
6300f2fae830c0c2c73fe0afd9c684383bce63e5
|
refs/heads/master
| 2021-01-20T00:30:02.800383
| 2018-04-27T09:28:25
| 2018-04-27T09:28:25
| 101,277,325
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,463
|
py
|
from __future__ import unicode_literals
from collections import OrderedDict
from copy import deepcopy
from wtforms.fields.core import UnboundField
def get_form_field_names(form_class):
"""Returns the list of field names of a WTForm
:param form_class: A `Form` subclass
"""
unbound_fields = form_class._unbound_fields
if unbound_fields:
return [f[0] for f in unbound_fields]
field_names = []
# the following logic has been taken from FormMeta.__call__
for name in dir(form_class):
if not name.startswith('_'):
unbound_field = getattr(form_class, name)
if hasattr(unbound_field, '_formfield'):
field_names.append(name)
return field_names
def inject_validators(form, field_name, validators, early=False):
"""Add extra validators to a form field.
This function may be called from the ``__init__`` method of a
form before the ``super().__init__()`` call or on a form class.
When using a Form class note that this will modify the class, so
all new instances of it will be affected!
:param form: the `Form` instance or a `Form` subclass
:param field_name: the name of the field to change
:param validators: a list of validators to add
:param early: whether to inject the validator before any existing
validators. this is needed if a field has a validator
that stops validation such as DataRequired and the
injected one is e.g. HiddenUnless which needs to run
even if the field is invalid
"""
unbound = deepcopy(getattr(form, field_name))
assert isinstance(unbound, UnboundField)
if 'validators' in unbound.kwargs:
if early:
unbound.kwargs['validators'] = validators + unbound.kwargs['validators']
else:
unbound.kwargs['validators'] += validators
elif len(unbound.args) > 1:
if early:
validators_arg = validators + unbound.args[1]
else:
validators_arg = unbound.args[1] + validators
unbound.args = unbound.args[:1] + (validators_arg,) + unbound.args[2:]
else:
unbound.kwargs['validators'] = validators
setattr(form, field_name, unbound)
if form._unbound_fields is not None:
unbound_fields = OrderedDict(form._unbound_fields)
unbound_fields[field_name] = unbound
form._unbound_fields = unbound_fields.items()
|
[
"hodardhazwinayo@gmail.com"
] |
hodardhazwinayo@gmail.com
|
bf25c491d026c56c2680ee54c6c6da0ef243d622
|
1d96db84225301d972f07cad95c2a13f4fbafa84
|
/python/my_PyFeyn/testing/pyfeyn-test2.py
|
82ff7bcb08a3d9741dea8c1a1909d75c7af37668
|
[] |
no_license
|
mattbellis/matts-work-environment
|
9eb9b25040dd8fb4a444819b01a80c2d5342b150
|
41988f3c310f497223445f16e2537e8d1a3f71bc
|
refs/heads/master
| 2023-08-23T09:02:37.193619
| 2023-08-09T05:36:32
| 2023-08-09T05:36:32
| 32,194,439
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 826
|
py
|
#! /usr/bin/env python
from pyfeyn.user import *
fd = FeynDiagram()
p1 = Point(2, -2)
p2 = Point(-2, 2)
p3 = Vertex(1.25, 1.25, mark=CIRCLE)
p4 = p1.midpoint(p2)
p5 = p4.midpoint(p1)
p6 = p4.midpoint(p2)
c1 = Circle(center=p1, radius=0.5, fill=[RED], points=[p1])
c2 = Circle(center=p2, radius=0.3, fill=[GREEN], points=[p2])
e1 = Ellipse(center=p4, xradius=0.5, yradius=1.0,
fill=[MIDNIGHTBLUE], points=[p4])
l0a = Fermion(p1, p4)
l0b = Fermion(p2, p4)
l1 = NamedLine["gluon"](p2, p1).arcThru(x=3, y=0)
l2 = NamedLine["photon"](p1, p2).arcThru(x=0, y=-3)
l3 = Gluon(p2, p3)
l4 = Photon(p1, p3)
l5 = Gluon(p5, p6).bend(-p5.distance(p6)/2.0)
loop1 = Line(p3, p3).arcThru(x=1.75, y=1.75).addArrow(0.55)
l1.addLabel(r"\Pgluon")
l2.addLabel(r"\Pphoton")
l5.addLabel(r"$\Pgluon_1$")
fd.draw("pyfeyn-test2.pdf")
|
[
"matthew.bellis@gmail.com"
] |
matthew.bellis@gmail.com
|
c96e7a60b206a9a0ed2292d388e43a340c284cc5
|
5c2f520dde0cf8077facc0fcd9a92bc1a96d168b
|
/from_cpython/Lib/types.py
|
b8166edf94fcba78305f53822b17e69b61cb466e
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] |
permissive
|
nagyist/pyston
|
b613337a030ef21a3f03708febebe76cedf34c61
|
14ba2e6e6fb5c7316f66ccca86e6c6a836d96cab
|
refs/heads/master
| 2022-12-24T03:56:12.885732
| 2015-02-25T11:11:08
| 2015-02-25T11:28:13
| 31,314,596
| 0
| 0
|
NOASSERTION
| 2022-12-17T08:15:11
| 2015-02-25T13:24:41
|
Python
|
UTF-8
|
Python
| false
| false
| 2,342
|
py
|
"""Define names for all type symbols known in the standard interpreter.
Types that are part of optional modules (e.g. array) are not listed.
"""
import sys
# Iterators in Python aren't a matter of type but of protocol. A large
# and changing number of builtin types implement *some* flavor of
# iterator. Don't check the type! Use hasattr to check for both
# "__iter__" and "next" attributes instead.
NoneType = type(None)
TypeType = type
ObjectType = object
IntType = int
LongType = long
FloatType = float
BooleanType = bool
try:
ComplexType = complex
except NameError:
pass
StringType = str
# StringTypes is already outdated. Instead of writing "type(x) in
# types.StringTypes", you should use "isinstance(x, basestring)". But
# we keep around for compatibility with Python 2.2.
try:
UnicodeType = unicode
StringTypes = (StringType, UnicodeType)
except NameError:
StringTypes = (StringType,)
# Pyston change: 'buffer' is not implemented yet
# BufferType = buffer
TupleType = tuple
ListType = list
DictType = DictionaryType = dict
def _f(): pass
FunctionType = type(_f)
LambdaType = type(lambda: None) # Same as FunctionType
# Pyston change: there is no concept of a "code object" yet:
# CodeType = type(_f.func_code)
def _g():
yield 1
GeneratorType = type(_g())
class _C:
def _m(self): pass
ClassType = type(_C)
UnboundMethodType = type(_C._m) # Same as MethodType
_x = _C()
InstanceType = type(_x)
MethodType = type(_x._m)
BuiltinFunctionType = type(len)
BuiltinMethodType = type([].append) # Same as BuiltinFunctionType
ModuleType = type(sys)
FileType = file
XRangeType = xrange
# Pyston change: we don't support sys.exc_info yet
"""
try:
raise TypeError
except TypeError:
tb = sys.exc_info()[2]
TracebackType = type(tb)
FrameType = type(tb.tb_frame)
del tb
"""
SliceType = slice
# Pyston change: don't support this yet
# EllipsisType = type(Ellipsis)
# Pyston change: don't support this yet
# DictProxyType = type(TypeType.__dict__)
NotImplementedType = type(NotImplemented)
# For Jython, the following two types are identical
# Pyston change: don't support these yet
# GetSetDescriptorType = type(FunctionType.func_code)
# MemberDescriptorType = type(FunctionType.func_globals)
del sys, _f, _g, _C, _x # Not for export
|
[
"kmod@dropbox.com"
] |
kmod@dropbox.com
|
39c7a34748c1b3e7fb4a8f0b57485acabb6c4b65
|
e7efae2b83216d9621bd93390959d652de779c3d
|
/kyototycoon/tests/test_kyototycoon.py
|
b7e964c19cda690b27c0b54a0d45f7ae777a268f
|
[
"BSD-3-Clause",
"MIT",
"BSD-3-Clause-Modification",
"Unlicense",
"Apache-2.0",
"LGPL-3.0-only",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"CC0-1.0"
] |
permissive
|
DataDog/integrations-core
|
ee1886cc7655972b2791e6ab8a1c62ab35afdb47
|
406072e4294edff5b46b513f0cdf7c2c00fac9d2
|
refs/heads/master
| 2023-08-31T04:08:06.243593
| 2023-08-30T18:22:10
| 2023-08-30T18:22:10
| 47,203,045
| 852
| 1,548
|
BSD-3-Clause
| 2023-09-14T16:39:54
| 2015-12-01T16:41:45
|
Python
|
UTF-8
|
Python
| false
| false
| 1,657
|
py
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from copy import deepcopy
import pytest
from datadog_checks.kyototycoon import KyotoTycoonCheck
from .common import DEFAULT_INSTANCE, TAGS
GAUGES = list(KyotoTycoonCheck.GAUGES.values())
DB_GAUGES = list(KyotoTycoonCheck.DB_GAUGES.values())
TOTALS = list(KyotoTycoonCheck.TOTALS.values())
RATES = list(KyotoTycoonCheck.RATES.values())
# all the RATE type metrics
ALL_RATES = TOTALS + RATES
def test_check(aggregator, dd_environment):
kt = KyotoTycoonCheck('kyototycoon', {}, {})
kt.check(deepcopy(DEFAULT_INSTANCE))
kt.check(deepcopy(DEFAULT_INSTANCE))
_assert_check(aggregator)
@pytest.mark.e2e
def test_e2e(dd_agent_check):
aggregator = dd_agent_check(DEFAULT_INSTANCE, rate=True)
_assert_check(aggregator, rate_metric_count=1)
def _assert_check(aggregator, rate_metric_count=2):
# prefix every metric with check name (kyototycoon.)
# no replications, so ignore kyototycoon.replication.delay
for mname in GAUGES:
if mname != 'replication.delay':
aggregator.assert_metric('kyototycoon.{}'.format(mname), tags=TAGS, count=2)
for mname in DB_GAUGES:
aggregator.assert_metric('kyototycoon.{}'.format(mname), tags=TAGS + ['db:0'], count=2)
for mname in ALL_RATES:
aggregator.assert_metric('kyototycoon.{}_per_s'.format(mname), tags=TAGS, count=rate_metric_count)
# service check
aggregator.assert_service_check(KyotoTycoonCheck.SERVICE_CHECK_NAME, status=KyotoTycoonCheck.OK, tags=TAGS, count=2)
aggregator.assert_all_metrics_covered()
|
[
"noreply@github.com"
] |
DataDog.noreply@github.com
|
af984f8b92fa6d9f2f3f4f2529a36de9c3b048da
|
3ac9cc9f54b1d6c6d5e05317bb0b977f4c1b363d
|
/profab/main.py
|
811a78a9ea7a173bb25ab8d19309d19a26ddd8c8
|
[
"Apache-2.0",
"BSL-1.0"
] |
permissive
|
sittisak/profab
|
5f5a92d8da7a07af80727eee337993929931ba2a
|
ff3967397b31986c9396f70a44a565d85178e6a6
|
refs/heads/master
| 2020-04-05T14:05:47.613997
| 2016-11-22T02:50:12
| 2016-11-22T02:50:12
| 94,763,557
| 0
| 1
| null | 2017-08-21T09:09:46
| 2017-06-19T10:09:29
|
Python
|
UTF-8
|
Python
| false
| false
| 541
|
py
|
"""Helper functions for the entry point scripts.
"""
def process_arguments(*args):
"""Do the initial argument parse phase. This produces tuples of role
instructions
"""
args = list(args) # Convert tuple to list
args.reverse() # We really wanted head() here, but no matter...
instructions = []
while len(args):
head = args.pop()
if head.startswith('--'):
instructions.append((head[2:], args.pop()))
else:
instructions.append((head, None))
return instructions
|
[
"k@kirit.com"
] |
k@kirit.com
|
156cee855fb4337b19f958444b0a101e766d89a5
|
cee0df2a184f3f99306193b9f34aba16889cc57c
|
/pvextractor/utils/wcs_utils.py
|
88cd249959d5af27eeb395dc415eeb0160e41646
|
[] |
no_license
|
teuben/pvextractor
|
169f3317eb2d53013eb981fca18f69d17fa3a8b3
|
889c108a964d8130b1a17066890c7325b57daf4c
|
refs/heads/master
| 2021-01-14T13:16:48.485846
| 2014-04-18T23:29:17
| 2014-04-18T23:29:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,278
|
py
|
import numpy as np
from astropy import units as u
from astropy import wcs
def get_pixel_scales(mywcs, assert_square=True):
# borrowed from aplpy
mywcs = mywcs.sub([wcs.WCSSUB_CELESTIAL])
cdelt = np.matrix(mywcs.wcs.get_cdelt())
pc = np.matrix(mywcs.wcs.get_pc())
scale = np.array(cdelt * pc)
if (assert_square and
(abs(cdelt[0,0]) != abs(cdelt[0,1]) or
abs(pc[0,0]) != abs(pc[1,1]) or
abs(scale[0,0]) != abs(scale[0,1]))):
raise ValueError("Non-square pixels. Please resample data.")
return abs(scale[0,0])
def sanitize_wcs(mywcs):
pc = np.matrix(mywcs.wcs.get_pc())
if (pc[:,2].sum() != pc[2,2] or pc[2,:].sum() != pc[2,2]):
raise ValueError("Non-independent 3rd axis.")
axtypes = mywcs.get_axis_types()
if ((axtypes[0]['coordinate_type'] != 'celestial' or
axtypes[1]['coordinate_type'] != 'celestial' or
axtypes[2]['coordinate_type'] != 'spectral')):
cunit3 = mywcs.wcs.cunit[2]
ctype3 = mywcs.wcs.ctype[2]
if cunit3 != '':
cunit3 = u.Unit(cunit3)
if cunit3.is_equivalent(u.m/u.s):
mywcs.wcs.ctype[2] = 'VELO'
elif cunit3.is_equivalent(u.Hz):
mywcs.wcs.ctype[2] = 'FREQ'
elif cunit3.is_equivalent(u.m):
mywcs.wcs.ctype[2] = 'WAVE'
else:
raise ValueError("Could not determine type of 3rd axis.")
elif ctype3 != '':
if 'VELO' in ctype3:
mywcs.wcs.ctype[2] = 'VELO'
elif 'FELO' in ctype3:
mywcs.wcs.ctype[2] = 'VELO-F2V'
elif 'FREQ' in ctype3:
mywcs.wcs.ctype[2] = 'FREQ'
elif 'WAVE' in ctype3:
mywcs.wcs.ctype[2] = 'WAVE'
else:
raise ValueError("Could not determine type of 3rd axis.")
else:
raise ValueError("Cube axes not in expected orientation: PPV")
return mywcs
def wcs_spacing(mywcs, spacing):
"""
Return spacing in pixels
Parameters
----------
wcs : `~astropy.wcs.WCS`
spacing : `~astropy.units.Quantity` or float
"""
if spacing is not None:
if hasattr(spacing,'unit'):
if not spacing.unit.is_equivalent(u.arcsec):
raise TypeError("Spacing is not in angular units.")
else:
platescale = get_pixel_scales(mywcs)
newspacing = spacing.to(u.deg).value / platescale
else:
# if no units, assume pixels already
newspacing = spacing
else:
# if no spacing, return pixscale
newspacing = 1
return newspacing
def pixel_to_wcs_spacing(mywcs, pspacing):
"""
Return spacing in degrees
Parameters
----------
wcs : `~astropy.wcs.WCS`
spacing : float
"""
platescale = get_pixel_scales(mywcs)
wspacing = platescale * pspacing * u.deg
return wspacing
def get_wcs_system_name(mywcs):
"""TODO: move to astropy.wcs.utils"""
ct = mywcs.sub([wcs.WCSSUB_CELESTIAL]).wcs.ctype
if 'GLON' in ct[0]:
return 'galactic'
elif 'RA' in ct[0]:
return 'icrs'
else:
raise ValueError("Unrecognized coordinate system")
|
[
"keflavich@gmail.com"
] |
keflavich@gmail.com
|
9b61853924eb18e6ee43387888b12daaf7b0dea5
|
0b0ca6853f351530384fcb9f3f9c91d4c034512b
|
/website/opensource/views.py
|
ba63fbd77e3145251be0ac3cead121e00526bdd4
|
[] |
no_license
|
thanhleviet/syrusakbary.com
|
d767129c6b00c092816e3cb58f063d1b052f0df0
|
ca04f55462db72bb603bfc0453b9404b04ee6687
|
refs/heads/master
| 2021-01-18T09:30:31.278757
| 2012-07-22T20:32:12
| 2012-07-22T20:32:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 930
|
py
|
# Create your views here.
#from django.http import HttpResponse
#from coffin.template import add_to_builtins
#add_to_builtins('jinja2-mediasync.media')
#from coffin.shortcuts import render_to_response
#from django.shortcuts import render_to_response
#from django.template import add_to_builtins
#add_to_builtins('mediasync.templatetags.media')
#from django.template import RequestContext
from django.views.generic import ListView, DetailView
from .models import Project
class OpenSourceDetailView(DetailView):
template_name='opensource/opensource_detail.jade'
#queryset = Project.objects.all()
queryset = []
class OpenSourceListView(ListView):
template_name='opensource/opensource_list.jade'
queryset = Project.objects.all()
context_object_name = "project_list"
#queryset = []
#def index(request):
# return render_to_response('projects/index.html',context_instance=RequestContext(request))
|
[
"me@syrusakbary.com"
] |
me@syrusakbary.com
|
2ca98c5399ca4e051f6ba3b6370573ba00678d56
|
ee3e0a69093e82deff1bddf607f6ce0dde372c48
|
/ndb769/개념/linked_list.py
|
96c6f60d40eb889dc16b88a18505851cfabdcac7
|
[] |
no_license
|
cndqjacndqja/algorithm_python
|
202f9990ea367629aecdd14304201eb6fa2aa37e
|
843269cdf8fb9d4c215c92a97fc2d007a8f96699
|
refs/heads/master
| 2023-06-24T08:12:29.639424
| 2021-07-24T05:08:46
| 2021-07-24T05:08:46
| 255,552,956
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,117
|
py
|
class Node:
def __init__(self, data):
self.data = data
self.next = None
class LinkedList:
def __init__(self, data):
self.head = Node(data)
def append(self, data):
cur = self.head
while cur.next is not None:
cur = cur.next
cur.next = Node(data)
def get_node(self, index):
cnt = 0
node = self.head
while cnt < index:
cnt += 1
node = node.next
return node
def add_node(self, index, value):
new_node = Node(value)
if index == 0:
new_node.next = self.head
self.head = new_node
return
node = self.get_node(index - 1)
new_node.next = node.next
node.next = new_node
def delete_node(self, index):
if index == 0:
self.head = self.head.next
return
node = self.get_node(index-1)
node.next = node.next.next
if __name__ == "__main__":
data = list(input())
list = LinkedList(data[0])
for i in range(1, len(data)):
list.append(data[i])
|
[
"cndqjacndqja@gmail.com"
] |
cndqjacndqja@gmail.com
|
7b095e3e066626392f53c9d6e431e87be22263e4
|
2a7fe1988b9a9aaf5e301637883319c43d38bcb9
|
/users/serializers.py
|
e18ccb36912540ec79d98c4ac36785882da3fc0c
|
[] |
no_license
|
kenassash/django_rest_notes
|
65d799b32f520faef2dbd02fae2e05efa8535797
|
eab022e6e57aaa06918ee5ab80586c8a1a8894c3
|
refs/heads/master
| 2023-09-03T19:36:28.304504
| 2021-10-27T08:19:14
| 2021-10-27T08:19:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 747
|
py
|
from rest_framework.serializers import HyperlinkedModelSerializer
# from .models import NoteUser
from .models import User
class UserModelSerializer(HyperlinkedModelSerializer):
class Meta:
# model = NoteUser
# fields = ('username', 'firstname', 'lastname', 'email')
# fields = '__all__'
model = User
fields = ('id', 'url', 'username', 'first_name', 'last_name', 'email', 'is_superuser', 'is_staff')
# fields = ('username', 'first_name', 'last_name', 'email')
# fields = '__all__'
class UserModelSerializerV2(HyperlinkedModelSerializer):
class Meta:
model = User
# fields = '__all__'
fields = ('id', 'url', 'username', 'first_name', 'last_name', 'email')
|
[
"travis@travis-ci.org"
] |
travis@travis-ci.org
|
6da64ddbe8b389fea336607a103d15766e44bd65
|
3dd58a087b59bdba102f2e6e95b3e200a9530c4c
|
/django_demo/mysite/polls/tests.py
|
7d7c15443ccc50f0a7b07c5f29af512cd9f3697e
|
[] |
no_license
|
natepill/Server-Side-Architecture
|
b17926cf467083182e96257589dfdc7c3d5ea40e
|
4765136c5fe9d0eedc6b50a2bbbb0c9458170694
|
refs/heads/master
| 2022-12-11T03:41:46.296869
| 2019-06-24T19:53:15
| 2019-06-24T19:53:15
| 189,689,598
| 0
| 0
| null | 2022-12-08T01:22:18
| 2019-06-01T04:19:09
|
Python
|
UTF-8
|
Python
| false
| false
| 5,492
|
py
|
import datetime
from django.test import TestCase
from django.utils import timezone
from django.urls import reverse
from .models import Question, Choice,
def create_question(question_text, days):
"""
Create a question with the given `question_text` and published the
given number of `days` offset to now (negative for questions published
in the past, positive for questions that have yet to be published).
"""
time = timezone.now() + datetime.timedelta(days=days)
return Question.objects.create(question_text=question_text, pub_date=time)
def add_vote_to_choice(choice):
choice.votes += 1
class QuestionModelTests(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recently() returns False for questions whose pub_date
is in the future.
"""
time = timezone.now() + datetime.timedelta(days=30)
future_question = Question(pub_date=time)
self.assertIs(future_question.was_published_recently(), False)
def test_was_publised_recently_with_old_question(self):
"""
was_published_recently() returns False for questions whose pub_date
is older than 1 day.
"""
time = timezone.now() - datetime.timedelta(days=1, seconds=1)
old_question = Question(pub_date=time)
self.assertIs(old_question.was_published_recently(), False)
def test_was_published_recently_with_old_question(self):
"""
was_published_recently() returns False for questions whose pub_date
is older than 1 day.
"""
time = timezone.now() - datetime.timedelta(days=1, seconds=1)
old_question = Question(pub_date=time)
self.assertIs(old_question.was_published_recently(), False)
def test_was_published_recently_with_recent_question(self):
"""
was_published_recently() returns True for questions whose pub_date
is within the last day.
"""
time = timezone.now() - datetime.timedelta(hours=23, minutes=59, seconds=59)
recent_question = Question(pub_date=time)
self.assertIs(recent_question.was_published_recently(), True)
class QuestionIndexViewTests(TestCase):
def test_no_questions(self):
"""
If no questions exist, an appropriate message is displayed.
"""
response = self.client.get(reverse('polls:index'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "No polls are available.")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_past_question(self):
"""
Questions with a pub_date in the past are displayed on the
index page.
"""
create_question(question_text="Past question.", days=-30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_future_question(self):
"""
Questions with a pub_date in the future aren't displayed on
the index page.
"""
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertContains(response, "No polls are available.")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_future_question_and_past_question(self):
"""
Even if both past and future questions exist, only past questions
are displayed.
"""
create_question(question_text="Past question.", days=-30)
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_two_past_questions(self):
"""
The questions index page may display multiple questions.
"""
create_question(question_text="Past question 1.", days=-30)
create_question(question_text="Past question 2.", days=-5)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question 2.>', '<Question: Past question 1.>']
)
class QuestionDetailViewTests(TestCase):
def test_future_question(self):
"""
The detail view of a question with a pub_date in the future
returns a 404 not found.
"""
future_question = create_question(question_text='Future question.', days=5)
url = reverse('polls:detail', args=(future_question.id,))
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_past_question(self):
"""
The detail view of a question with a pub_date in the past
displays the question's text.
"""
past_question = create_question(question_text='Past Question.', days=-5)
url = reverse('polls:detail', args=(past_question.id,))
response = self.client.get(url)
self.assertContains(response, past_question.question_text)
class QuestionResultViewTests(TestCase):
def test_votes_for_choice(self):
new_question = create_question(question_text='Testing that voting for choices work?')
|
[
"natepill@gmail.com"
] |
natepill@gmail.com
|
de826dfae0fa88b16b9f28a90bfb267c18c844c7
|
ff2b4b972865ab82464d550934329117500d3195
|
/bfg9000/tools/mkdir_p.py
|
f12325a7350d79273dadc255603b865d4c34c859
|
[
"BSD-3-Clause"
] |
permissive
|
juntalis/bfg9000
|
cd38a9194e6c08a4fbcf3be29f37c00bfa532588
|
594eb2aa7c259855e7658d69fe84acb6dad890fa
|
refs/heads/master
| 2021-08-08T21:35:33.896506
| 2017-11-11T08:47:57
| 2017-11-11T08:47:57
| 110,331,128
| 0
| 0
| null | 2017-11-11T08:46:05
| 2017-11-11T08:46:04
| null |
UTF-8
|
Python
| false
| false
| 396
|
py
|
from . import tool
from .common import SimpleCommand
@tool('mkdir_p')
class MkdirP(SimpleCommand):
def __init__(self, env):
default = 'doppel -p' if env.platform.name == 'windows' else 'mkdir -p'
SimpleCommand.__init__(self, env, name='mkdir_p', env_var='MKDIR_P',
default=default)
def _call(self, cmd, path):
return cmd + [path]
|
[
"jporter@mozilla.com"
] |
jporter@mozilla.com
|
0dc860e73fdb9073bea7b31d75831fe246b55cd2
|
35c3999aa3f6a9e31ae6f9170ac0235da4fe7e11
|
/irekua_rest_api/serializers/devices/physical_devices.py
|
c9b5828b1b4bdec7cd965b6ca25e906f17db16f2
|
[
"BSD-2-Clause"
] |
permissive
|
CONABIO-audio/irekua-rest-api
|
28cf9806330c8926437542ae9152b8a7da57714f
|
35cf5153ed7f54d12ebad2ac07d472585f04e3e7
|
refs/heads/master
| 2022-12-12T09:24:18.217032
| 2020-08-15T21:01:20
| 2020-08-15T21:01:20
| 219,046,247
| 0
| 4
|
BSD-4-Clause
| 2022-12-08T10:54:47
| 2019-11-01T19:03:10
|
Python
|
UTF-8
|
Python
| false
| false
| 2,098
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import serializers
from irekua_database.models import PhysicalDevice
from irekua_rest_api.serializers.base import IrekuaModelSerializer
from irekua_rest_api.serializers.base import IrekuaHyperlinkedModelSerializer
from irekua_rest_api.serializers.users import users
from . import devices
class SelectSerializer(IrekuaModelSerializer):
class Meta:
model = PhysicalDevice
fields = (
'url',
'id',
)
class ListSerializer(IrekuaModelSerializer):
type = serializers.CharField(
read_only=True,
source='device.device_type.name')
brand = serializers.CharField(
read_only=True,
source='device.brand.name')
model = serializers.CharField(
read_only=True,
source='device.model')
class Meta:
model = PhysicalDevice
fields = (
'url',
'id',
'serial_number',
'type',
'brand',
'model',
)
class DetailSerializer(IrekuaHyperlinkedModelSerializer):
device = devices.SelectSerializer(many=False, read_only=True)
owner = users.SelectSerializer(many=False, read_only=True)
class Meta:
model = PhysicalDevice
fields = (
'url',
'serial_number',
'owner',
'metadata',
'bundle',
'device',
'created_on',
'modified_on',
)
class CreateSerializer(IrekuaModelSerializer):
class Meta:
model = PhysicalDevice
fields = (
'serial_number',
'device',
'metadata',
'bundle',
)
def create(self, validated_data):
user = self.context['request'].user
validated_data['owner'] = user
return super().create(validated_data)
class UpdateSerializer(IrekuaModelSerializer):
class Meta:
model = PhysicalDevice
fields = (
'serial_number',
'metadata',
)
|
[
"santiago.mbal@gmail.com"
] |
santiago.mbal@gmail.com
|
dc09c3c13f4ca2119ef4419cf567c1bbe2bf7f42
|
81bdc1dccfb95877e5f376527c23cb5c72a13922
|
/pyl2extra/gui/debugger/remote_window.py
|
05041c0221dd8c677c8b0d68e7c8136c0ee9f4e5
|
[
"BSD-3-Clause"
] |
permissive
|
TNick/pyl2extra
|
1fb5be10448bc09018e2b0ac294b2e03fb146a57
|
323e1ecefeedc7d196de6d7ac6d8eceecb756333
|
refs/heads/master
| 2021-01-22T07:04:10.082374
| 2015-08-11T09:57:17
| 2015-08-11T09:57:17
| 34,400,301
| 0
| 1
| null | 2015-04-22T17:19:50
| 2015-04-22T15:58:21
|
Python
|
UTF-8
|
Python
| false
| false
| 2,954
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: Nicu Tofan <nicu.tofan@gmail.com>
"""
from PyQt4 import QtGui, QtCore
from pyl2extra.gui.guihelpers import center
class RemoteDialog(QtGui.QDialog):
"""
Allows selecting remote in order to debug on that remote.
"""
def __init__(self, mw):
"""
Constructor
"""
super(RemoteDialog, self).__init__()
self.mw = mw
self.init_ui()
def init_ui(self):
"""
Prepares the GUI.
"""
self.resize(300, 200)
self.setWindowTitle('Connect to remote')
center(self)
self.button_box = QtGui.QDialogButtonBox(self)
self.button_box.setGeometry(QtCore.QRect(150, 250, 341, 32))
self.button_box.setOrientation(QtCore.Qt.Horizontal)
self.button_box.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.button_box.setObjectName("button_box")
lbl_address = QtGui.QLabel('Address')
lbl_post_rcv = QtGui.QLabel('Control port')
lbl_port_sub = QtGui.QLabel('Broadcast port')
le_address = QtGui.QLineEdit()
le_address.setPlaceholderText('The address of the remote machine')
le_address.setToolTip('This may also be an ip address.')
le_address.setText('127.0.0.1')
sp_port_rcv = QtGui.QSpinBox()
sp_port_rcv.setMinimum(1024)
sp_port_rcv.setMaximum(65565)
sp_port_rcv.setValue(5955)
sp_port_rcv.setToolTip('Port for command and control.')
sp_port_sub = QtGui.QSpinBox()
sp_port_sub.setMinimum(1024)
sp_port_sub.setMaximum(65565)
sp_port_sub.setValue(5956)
sp_port_sub.setToolTip('Port where the remote debugger publishes information.')
grid1 = QtGui.QGridLayout()
grid1.setSpacing(10)
grid1.addWidget(lbl_address, 1, 0)
grid1.addWidget(le_address, 1, 1)
grid1.addWidget(lbl_post_rcv, 2, 0)
grid1.addWidget(sp_port_rcv, 2, 1)
grid1.addWidget(lbl_port_sub, 3, 0)
grid1.addWidget(sp_port_sub, 3, 1)
grid = QtGui.QVBoxLayout()
grid.setSpacing(10)
grid.addLayout(grid1)
grid.addWidget(self.button_box)
self.setLayout(grid)
QtCore.QObject.connect(self.button_box, QtCore.SIGNAL("accepted()"), self.accept)
QtCore.QObject.connect(self.button_box, QtCore.SIGNAL("rejected()"), self.reject)
QtCore.QMetaObject.connectSlotsByName(self)
self.le_address = le_address
self.sp_port_rcv = sp_port_rcv
self.sp_port_sub = sp_port_sub
def get_values(self):
"""
Return the values selected by the user.
"""
values = {'address': self.le_address.text().strip(),
'rport': self.sp_port_rcv.value(),
'pport': self.sp_port_sub.value()}
return values
|
[
"nicu.tofan@gmail.com"
] |
nicu.tofan@gmail.com
|
e452abec56c616eb8b46b78e240cb845aecc2319
|
6b0d0aec9704d70663fe0edc2a6624a689cc081e
|
/src/app/pre/wav.py
|
db52981330702787a0ef82c48d08f68a0e589f2b
|
[
"BSD-3-Clause"
] |
permissive
|
stefantaubert/tacotron2
|
086d81b38b2c49655df6b0d8d63c633e7531399a
|
8475f014391c5066cfe0b92b6c74568639be5e79
|
refs/heads/master
| 2023-03-29T21:07:02.266973
| 2020-11-25T09:57:40
| 2020-11-25T09:57:40
| 267,858,113
| 5
| 0
|
BSD-3-Clause
| 2020-05-29T12:56:56
| 2020-05-29T12:56:55
| null |
UTF-8
|
Python
| false
| false
| 3,818
|
py
|
import os
from functools import partial
from typing import Any
from src.app.pre.ds import get_ds_dir, load_ds_csv
from src.core.common.utils import get_subdir
from src.core.pre.wav import (WavData, WavDataList, normalize, preprocess,
remove_silence, stereo_to_mono, upsample)
_wav_data_csv = "data.csv"
def _get_wav_root_dir(ds_dir: str, create: bool = False):
return get_subdir(ds_dir, "wav", create)
def get_wav_dir(ds_dir: str, wav_name: str, create: bool = False):
return get_subdir(_get_wav_root_dir(ds_dir, create), wav_name, create)
def load_wav_csv(wav_dir: str) -> WavDataList:
path = os.path.join(wav_dir, _wav_data_csv)
return WavDataList.load(WavData, path)
def save_wav_csv(wav_dir: str, wav_data: WavDataList):
path = os.path.join(wav_dir, _wav_data_csv)
wav_data.save(path)
def preprocess_wavs(base_dir: str, ds_name: str, wav_name: str):
print("Preprocessing wavs...")
ds_dir = get_ds_dir(base_dir, ds_name)
wav_dir = get_wav_dir(ds_dir, wav_name)
if os.path.isdir(wav_dir):
print("Already exists.")
else:
data = load_ds_csv(ds_dir)
wav_data = preprocess(data)
os.makedirs(wav_dir)
save_wav_csv(wav_dir, wav_data)
def _wav_op(base_dir: str, ds_name: str, origin_wav_name: str, destination_wav_name: str, op: Any):
ds_dir = get_ds_dir(base_dir, ds_name)
dest_wav_dir = get_wav_dir(ds_dir, destination_wav_name)
if os.path.isdir(dest_wav_dir):
print("Already exists.")
else:
orig_wav_dir = get_wav_dir(ds_dir, origin_wav_name)
assert os.path.isdir(orig_wav_dir)
data = load_wav_csv(orig_wav_dir)
os.makedirs(dest_wav_dir)
wav_data = op(data, dest_wav_dir)
save_wav_csv(dest_wav_dir, wav_data)
def wavs_normalize(base_dir: str, ds_name: str, orig_wav_name: str, dest_wav_name: str):
print("Normalizing wavs...")
op = partial(normalize)
_wav_op(base_dir, ds_name, orig_wav_name, dest_wav_name, op)
def wavs_upsample(base_dir: str, ds_name: str, orig_wav_name: str, dest_wav_name: str, rate: int):
print("Resampling wavs...")
op = partial(upsample, new_rate=rate)
_wav_op(base_dir, ds_name, orig_wav_name, dest_wav_name, op)
def wavs_stereo_to_mono(base_dir: str, ds_name: str, orig_wav_name: str, dest_wav_name: str):
print("Converting wavs from stereo to mono...")
op = partial(stereo_to_mono)
_wav_op(base_dir, ds_name, orig_wav_name, dest_wav_name, op)
def wavs_remove_silence(base_dir: str, ds_name: str, orig_wav_name: str, dest_wav_name: str, chunk_size: int, threshold_start: float, threshold_end: float, buffer_start_ms: float, buffer_end_ms: float):
print("Removing silence in wavs...")
op = partial(remove_silence, chunk_size=chunk_size, threshold_start=threshold_start,
threshold_end=threshold_end, buffer_start_ms=buffer_start_ms, buffer_end_ms=buffer_end_ms)
_wav_op(base_dir, ds_name, orig_wav_name, dest_wav_name, op)
if __name__ == "__main__":
preprocess_wavs(
base_dir="/datasets/models/taco2pt_v5",
ds_name="ljs",
wav_name="22050kHz",
)
preprocess_wavs(
base_dir="/datasets/models/taco2pt_v5",
ds_name="thchs",
wav_name="16000kHz",
)
wavs_normalize(
base_dir="/datasets/models/taco2pt_v5",
ds_name="thchs",
orig_wav_name="16000kHz",
dest_wav_name="16000kHz_normalized",
)
wavs_remove_silence(
base_dir="/datasets/models/taco2pt_v5",
ds_name="thchs",
orig_wav_name="16000kHz_normalized",
dest_wav_name="16000kHz_normalized_nosil",
threshold_start=-20,
threshold_end=-30,
chunk_size=5,
buffer_start_ms=100,
buffer_end_ms=150
)
wavs_upsample(
base_dir="/datasets/models/taco2pt_v5",
ds_name="thchs",
orig_wav_name="16000kHz_normalized_nosil",
dest_wav_name="22050kHz_normalized_nosil",
rate=22050,
)
|
[
"stefan.taubert@posteo.de"
] |
stefan.taubert@posteo.de
|
541a2bb132c30e724fa65dfdccfd3b3de2e89856
|
7f651a7dfa7cd101ddf9dd133ff78bfe996eeb3f
|
/main.py
|
910916d224daa088ba293871bad373666348f2d1
|
[
"MIT"
] |
permissive
|
TrendingTechnology/PyPi-Bot
|
33071b0e789509dfc267ec25a3e11417d60c1395
|
bc2ee98981af4bc9f415a1f968bf872380d017f0
|
refs/heads/main
| 2023-06-30T08:50:57.641601
| 2021-08-02T13:47:28
| 2021-08-02T13:47:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,257
|
py
|
# Author: Fayas (https://github.com/FayasNoushad) (@FayasNoushad)
import os
import requests
from requests.utils import requote_uri
from pyrogram import Client, filters
from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton
API = "https://api.abirhasan.wtf/pypi?query="
START_TEXT = """
Hello {},
I am a pypi package search telegram bot.
- Send a pypi package name.
- I will send the information of package.
Made by @FayasNoushad
"""
BUTTONS = [InlineKeyboardButton('⚙ Join Updates Channel ⚙', url='https://telegram.me/FayasNoushad')]
Bot = Client(
"PyPi-Bot",
bot_token = os.environ["BOT_TOKEN"],
api_id = int(os.environ["API_ID"]),
api_hash = os.environ["API_HASH"]
)
@Bot.on_message(filters.private & filters.command(["start", "help", "about"]))
async def start(bot, update):
text = START_TEXT.format(update.from_user.mention)
reply_markup = InlineKeyboardMarkup([BUTTONS])
await update.reply_text(
text=text,
disable_web_page_preview=True,
reply_markup=reply_markup,
quote=True
)
@Bot.on_message(filters.text)
async def pypi_info(bot, update):
try:
query = update.text if update.chat.type == "private" else update.text.split()[1]
text = pypi_text(query)
reply_markup = InlineKeyboardMarkup([pypi_buttons(query), BUTTONS])
await update.reply_text(
text=text,
disable_web_page_preview=True,
reply_markup=reply_markup,
quote=True
)
except:
pass
def pypi(query):
r = requests.get(requote_uri(API + query))
info = r.json()
return info
def pypi_text(query):
info = pypi(query)
text = "--**Information**--\n"
text += f"\n**Package Name:** `{info['PackageName']}`"
text += f"\n**Title:** `{info['Title']}`"
text += f"\n**About:** `{info['About']}`"
text += f"\n**Latest Release Date:** `{info['LatestReleaseDate']}`"
text += f"\n**PiP Command:** `{info['PipCommand']}`"
return text
def pypi_buttons(query):
info = pypi(query)
buttons = [
InlineKeyboardButton(text="PyPi", url=info['PyPi']),
InlineKeyboardButton(text="Home Page", url=info['HomePage'])
]
return buttons
Bot.run()
|
[
"noreply@github.com"
] |
TrendingTechnology.noreply@github.com
|
d255e8072a01057e097ccaa3a705564e60199c9e
|
91fe8f479fa921fa84111d19222a5c6aa6eff030
|
/basis/progr-py/Gui/ShellGui/packdlg_redirect.py
|
e74111a94ff6ede688ace45c422255376555b419
|
[] |
no_license
|
romanticair/python
|
2055c9cdaa46894c9788d5797643283786ed46dd
|
6f91fe5e7cbedcdf4b8f7baa7641fd615b4d6141
|
refs/heads/master
| 2022-11-03T17:17:17.608786
| 2019-07-05T07:07:29
| 2019-07-05T07:07:29
| 195,356,190
| 0
| 1
| null | 2022-10-14T20:51:14
| 2019-07-05T07:00:33
|
Python
|
UTF-8
|
Python
| false
| false
| 496
|
py
|
# 将命令行脚本包装到图形界面重定向工具中,输出显示到弹出式窗口中
from tkinter import *
from packdlg import runPackDialog
from Gui.Tools.guiStreams import redirectedGuiFunc
def runPackDialog_Wrapped(): # 在mytools.py中运行的回调函数
redirectedGuiFunc(runPackDialog) # 对整个回调处理程序进行包装
if __name__ == '__main__':
root = Tk()
Button(root, text='pop', command=runPackDialog_Wrapped).pack(fill=X)
root.mainloop()
|
[
"1024519570@qq.com"
] |
1024519570@qq.com
|
70f33bb40b94725d71df75b5591e7a9b56325cca
|
f9ed608c620093b9f6b5058bcedf7ae610c09c8d
|
/329-Longest_Increasing_Path_in_a_Matrix.py
|
2076972e979562372e23e07d8d2bfd9f51a966ba
|
[] |
no_license
|
chanyoonzhu/leetcode-python
|
9b88d7f2749e1ae3ed597759b1bf9f7fa4912c35
|
085d868ba0458fc8e6b5549aa00fa151c335fa7f
|
refs/heads/master
| 2022-05-24T11:20:35.927915
| 2022-04-16T06:02:33
| 2022-04-16T06:02:33
| 166,224,197
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,114
|
py
|
"""
- dfs with memoization
- note: path is strictly increasing -> no loop -> DAG -> can use dfs
- O(mn), O(mn)
"""
class Solution:
def longestIncreasingPath(self, matrix: List[List[int]]) -> int:
self.matrix = matrix
self.dir = [(1, 0), (-1, 0), (0, 1), (0, -1)]
self.m = len(matrix)
self.n = len(matrix[0])
self.res = 0
for r in range(self.m):
for c in range(self.n):
self.dfs(r, c)
return self.res
@lru_cache(None)
def dfs(self, r, c):
connected = 0
for i, j in self.dir:
nr, nc = r + i, c + j
if 0 <= nr < self.m and 0 <= nc < self.n and self.matrix[nr][nc] > self.matrix[r][c]:
connected = max(connected, self.dfs(nr, nc))
connected += 1 # itself
self.res = max(self.res, connected)
return connected
"""
- topological sorting
- O(mn), O(mn)
"""
class Solution:
def longestIncreasingPath(self, matrix: List[List[int]]) -> int:
M, N = len(matrix), len(matrix[0])
indegrees = [[0] * N for _ in range(M)]
DIR = [(1, 0), (-1, 0), (0, 1), (0, -1)]
for r in range(M):
for c in range(N):
for i, j in DIR:
nr, nc = r + i, c + j
if 0 <= nr < M and 0 <= nc < N and matrix[nr][nc] > matrix[r][c]:
indegrees[r][c] += 1
q = deque()
for r in range(M):
for c in range(N):
if indegrees[r][c] == 0:
q.append((r, c))
steps = 0
while q:
new_q = deque()
while q:
r, c = q.popleft()
for i, j in DIR:
nr, nc = r + i, c + j
if 0 <= nr < M and 0 <= nc < N and matrix[nr][nc] < matrix[r][c]:
indegrees[nr][nc] -= 1
if indegrees[nr][nc] == 0:
new_q.append((nr, nc))
q = new_q
steps += 1
return steps
|
[
"zhuchanyoon@gmail.com"
] |
zhuchanyoon@gmail.com
|
a828a1d10bfc5c4b5cd149e658aca32e30558efa
|
7f80554c5013ba7bc66a3ec98f804156d977c277
|
/src/readux/urls.py
|
d95051dab91bc770f6c85b38826d7a40f9f870b8
|
[] |
no_license
|
akrahdan/LearnAI
|
fa89c133dbe3b0c06bfdce720ea6dcb429d1dc57
|
fbea836a7fc78c8ab92b313c2afa4bdeef59c362
|
refs/heads/main
| 2023-07-24T15:07:15.692045
| 2021-08-20T16:39:44
| 2021-08-20T16:39:44
| 376,688,774
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,083
|
py
|
"""readux URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.conf import settings
from allauth.account.views import confirm_email
from django.conf.urls.static import static
from courses.views import CourseDetailSlugView
from projects.views import ProjectDetailView
from .views import home_page, CourseLeadView, pricing
from files.views import DownloadView, UploadPolicyView, UploadView, UploadCoursePolicy, DownloadCourseView
urlpatterns = [
path('admin/', admin.site.urls),
path('', home_page, name='home'),
path('api/lead/', CourseLeadView.as_view(), name='course_signup'),
path('api/pricing/', pricing, name='pricing'),
path('api/dashboard/', include(('dashboard.urls', 'dashboard'), namespace="dashboard")),
path('api/courses/', include('courses.urls')),
path('api/course/<slug:slug>/', CourseDetailSlugView.as_view()),
#path('auths/', include(('accounts.urls', 'auths'), 'auths')),
path('accounts/', include('allauth.urls')),
path('api/accounts/', include('accounts.urls')),
path('api/billing/', include(('billing.urls', 'billing'), 'billing')),
path('api/instructor/', include(('instructors.urls'))),
path('api/students/', include(('students.urls', 'students'), namespace='students')),
path('api/upload/', UploadView.as_view()),
path('api/upload/policy/', UploadPolicyView.as_view()),
path('api/files/<int:id>/download/', DownloadView.as_view()),
path('api/orders/', include('orders.urls')),
path('rest-auth/', include('rest_auth.urls')),
path('api/auth/', include('auths.urls')),
path('api/analytics/', include('analytics.urls')),
path('api/projects/', include('projects.urls')),
path('api/projects/', include('projects.urls')),
path('api/project/<slug:slug>/', ProjectDetailView.as_view()),
path('api/categories/', include('categories.urls')),
path('api/project_categories/', include('project_categories.urls')),
re_path(r"^rest-auth/registration/account-confirm-email/(?P<key>[\s\d\w().+-_',:&]+)/$", confirm_email,
name="account_confirm_email"),
path('rest-auth/registration/', include('rest_auth.registration.urls')),
path('api/cart/', include('carts.urls')),
]
# if settings.DEBUG:
# urlpatterns = urlpatterns + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
# urlpatterns = urlpatterns + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"akrahdan@gmail.com"
] |
akrahdan@gmail.com
|
15ed8211c8d43131be4eeaa704dbd1400bbea598
|
59c55725576bbf0e2f6617507ba2f1db639abb3f
|
/stock_analytic_account/model/analytic_account.py
|
ea5556d076df6616c951843e5c8ca6abdca7a083
|
[] |
no_license
|
bmya/eficent-odoo-addons
|
e3426ebaf1f59e52726253fc1dd36a09d9363059
|
5d8ddfa384ab4417f42bda103b71d926848035f6
|
refs/heads/7.0
| 2021-01-21T16:48:55.312452
| 2015-11-04T14:11:19
| 2015-11-04T14:11:19
| 45,649,141
| 1
| 3
| null | 2015-11-06T00:35:17
| 2015-11-06T00:35:17
| null |
UTF-8
|
Python
| false
| false
| 1,957
|
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Eficent (<http://www.eficent.com/>)
# Jordi Ballester Alomar <jordi.ballester@eficent.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv, orm
import openerp.addons.decimal_precision as dp
class account_analytic_account(orm.Model):
_inherit = "account.analytic.account"
_columns = {
'move_ids': fields.one2many('stock.move', 'analytic_account_id',
'Moves for this analytic account',
readonly=True),
'use_reserved_stock': fields.boolean(
'Use reserved stock',
help="Stock with reference to this analytic account "
"is considered to be reserved.")
}
def copy(self, cr, uid, id, default=None, context=None):
if context is None:
context = {}
if default is None:
default = {}
default['move_ids'] = []
res = super(account_analytic_account, self).copy(cr, uid, id, default,
context)
return res
|
[
"jordi.ballester@eficent.com"
] |
jordi.ballester@eficent.com
|
766c006b44f7bca3d96dc1ad604ef9851b7c73be
|
0a1a95fe0344c27197b677e8f8d1acc05a9813bd
|
/tests/test_app/test_static.py
|
9c4549b3036f5029a3b59f6d3252f224e800aa5a
|
[
"MIT"
] |
permissive
|
hirokiky/uiro
|
5ddaee966395512919016406c5ed18baed5cb68c
|
8436976b21ac9b0eac4243768f5ada12479b9e00
|
refs/heads/master
| 2023-04-27T00:57:13.953417
| 2013-11-09T02:15:57
| 2013-11-09T02:15:57
| 13,859,983
| 0
| 0
|
MIT
| 2023-04-15T15:13:52
| 2013-10-25T12:30:05
|
Python
|
UTF-8
|
Python
| false
| false
| 411
|
py
|
import pytest
from webtest import TestApp
@pytest.fixture
def target():
from matcha import make_wsgi_app
from uiro.static import generate_static_matching
from .pkgs import static_app
matching = generate_static_matching(static_app)
return TestApp(make_wsgi_app(matching))
def test_static(target):
resp = target.get('/static/static_app/test.txt')
resp.mustcontain('No more work')
|
[
"hirokiky@gmail.com"
] |
hirokiky@gmail.com
|
b7759d6a6dcb81a63298d8ff7c3583729f1d19eb
|
7facdc4644fbe4209b5acdad9f2503bfcfb0d534
|
/ensure/_types.py
|
d0d2db7350653d2171e927886cffa6eccef0f7f8
|
[
"Apache-2.0"
] |
permissive
|
KeyWeeUsr/ensure
|
2a19d2101418f334bb188d299f5368f96aaf7916
|
47becf82672906d2fcfd4e8e5b0542e43845b3ed
|
refs/heads/master
| 2023-06-01T04:11:19.154208
| 2018-11-06T01:39:11
| 2018-11-06T01:39:11
| 165,532,375
| 0
| 0
|
Apache-2.0
| 2019-01-13T17:14:11
| 2019-01-13T17:14:10
| null |
UTF-8
|
Python
| false
| false
| 1,050
|
py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from six import add_metaclass
USING_PYTHON2 = True if sys.version_info < (3, 0) else False
if USING_PYTHON2:
str = unicode # noqa
class NumericStringType(type):
_type = str
_cast = float
def __instancecheck__(self, other):
try:
if not isinstance(other, self._type):
raise TypeError()
self._cast(other)
return True
except (TypeError, ValueError):
return False
class NumericByteStringType(NumericStringType):
_type = bytes
class IntegerStringType(NumericStringType):
_cast = int
class IntegerByteStringType(IntegerStringType):
_type = bytes
@add_metaclass(NumericStringType)
class NumericString(str):
pass
@add_metaclass(NumericByteStringType)
class NumericByteString(bytes):
pass
@add_metaclass(IntegerStringType)
class IntegerString(str):
pass
@add_metaclass(IntegerByteStringType)
class IntegerByteString(bytes):
pass
|
[
"kislyuk@gmail.com"
] |
kislyuk@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.