blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8e830639fc2ef0cc682f1d742ee537d47985f00f
|
a643c2ed78b48e4cacf140776fbedd0191881e18
|
/samples/openapi3/client/3_0_3_unit_test/python/unit_test_api/paths/request_body_post_array_type_matches_arrays_request_body/post.py
|
0ea29561b94824f85ffa27f8c2c1bf9e99c5e0c7
|
[
"Apache-2.0"
] |
permissive
|
padamstx/openapi-generator
|
5ae41f68a4f9349d76c1db81b9ff82e18e5b4b7c
|
00604aff594864447c134ddb1982565136e27857
|
refs/heads/master
| 2023-03-08T20:11:36.318959
| 2022-09-28T16:34:17
| 2022-09-28T16:34:17
| 160,528,958
| 0
| 1
|
Apache-2.0
| 2023-02-24T16:13:11
| 2018-12-05T14:17:50
|
Java
|
UTF-8
|
Python
| false
| false
| 5,206
|
py
|
# coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from unit_test_api import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from unit_test_api import schemas # noqa: F401
from unit_test_api.model.array_type_matches_arrays import ArrayTypeMatchesArrays
from . import path
# body param
SchemaForRequestBodyApplicationJson = ArrayTypeMatchesArrays
request_body_array_type_matches_arrays = api_client.RequestBody(
content={
'application/json': api_client.MediaType(
schema=SchemaForRequestBodyApplicationJson),
},
required=True,
)
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
)
_status_code_to_response = {
'200': _response_for_200,
}
class BaseApi(api_client.Api):
def _post_array_type_matches_arrays_request_body_oapg(
self: api_client.Api,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
used_path = path.value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if body is schemas.unset:
raise exceptions.ApiValueError(
'The required body parameter has an invalid value of: unset. Set a valid value instead')
_fields = None
_body = None
serialized_data = request_body_array_type_matches_arrays.serialize(body, content_type)
_headers.add('Content-Type', content_type)
if 'fields' in serialized_data:
_fields = serialized_data['fields']
elif 'body' in serialized_data:
_body = serialized_data['body']
response = self.api_client.call_api(
resource_path=used_path,
method='post'.upper(),
headers=_headers,
fields=_fields,
body=_body,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
class PostArrayTypeMatchesArraysRequestBody(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
def post_array_type_matches_arrays_request_body(
self: BaseApi,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_array_type_matches_arrays_request_body_oapg(
body=body,
content_type=content_type,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForpost(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
def post(
self: BaseApi,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_array_type_matches_arrays_request_body_oapg(
body=body,
content_type=content_type,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
|
[
"noreply@github.com"
] |
padamstx.noreply@github.com
|
1b1c02e75d0c463404a738766c2fe6e24d2476c7
|
ad849c40e75d098e38db897154c63054e6f89fca
|
/models_class/model.py
|
8ff2a80539b1abd9c6c4efeda44b6b17c4e7e9f6
|
[] |
permissive
|
vietnamican/Pytorch_Retinaface
|
768a96eb7e48b002dc91cc97cc41473206903c59
|
8d69dd191e16421bb399f49c7706d6e154d4a80e
|
refs/heads/main
| 2023-06-18T08:23:25.860727
| 2021-06-24T10:48:18
| 2021-06-24T10:48:18
| 366,045,702
| 1
| 0
|
MIT
| 2021-05-10T13:06:47
| 2021-05-10T13:06:46
| null |
UTF-8
|
Python
| false
| false
| 2,929
|
py
|
import torch
from torch import nn
from torchmetrics import Accuracy
from .base import ConvBatchNormRelu
from .base import Base
class Config(object):
dataroot = 'data/mrleye'
train_image_dir = '../LaPa_negpos_fusion/train/images'
train_label_dir = '../LaPa_negpos_fusion/train/labels'
val_image_dir = '../LaPa_negpos_fusion/val/images'
val_label_dir = '../LaPa_negpos_fusion/val/labels'
batch_size = 512
pin_memory= True
num_workers = 6
device = 'gpu'
max_epochs = 200
steps = [0.5, 0.7, 0.9]
cfg = Config()
class IrisModel(Base):
def __init__(self, cfg=cfg):
super().__init__()
self.conv1 = ConvBatchNormRelu(3, 10, kernel_size=3, padding=1, with_relu=False)
self.maxpool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.relu1 = nn.ReLU(inplace=True)
self.conv2 = ConvBatchNormRelu(10, 20, kernel_size=3, padding=1, with_relu=False)
self.maxpool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.relu2 = nn.ReLU(inplace=True)
self.conv3 = ConvBatchNormRelu(20, 50, kernel_size=3, padding=1, with_relu=False)
self.conv4 = ConvBatchNormRelu(50, 2, kernel_size=1, padding=0, with_relu=False)
self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
self.flatten = nn.Flatten()
self.criterion = nn.CrossEntropyLoss()
self.cfg = cfg
self.val_acc = Accuracy()
def forward(self, x):
x = self.relu1(self.maxpool1(self.conv1(x)))
x = self.relu2(self.maxpool2(self.conv2(x)))
x = self.conv3(x)
x = self.conv4(x)
x = self.avg_pool(x)
x = self.flatten(x)
return x
def _shared_step(self, batch, batch_dix):
eye, label, *_ = batch
logit = self.forward(eye)
loss = self.criterion(logit, label)
return loss, logit
def training_step(self, batch, batch_dix):
_, label, *_ = batch
loss, logit = self._shared_step(batch, batch_dix)
pred = logit.argmax(dim=1)
self.log('train_acc', self.val_acc(pred, label))
self.log('train_loss', loss)
return loss
def validation_step(self, batch, batch_dix):
_, label, *_ = batch
loss, logit = self._shared_step(batch, batch_dix)
pred = logit.argmax(dim=1)
self.log('val_acc', self.val_acc(pred, label))
self.log('val_loss', loss)
return loss
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.parameters(), lr=0.001, momentum=0.9, weight_decay=5e-4)
max_epochs = self.cfg.max_epochs
step0, step1, step2 = self.cfg.steps
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, [max_epochs*step0, max_epochs*step1, max_epochs*step2], gamma=0.1)
return {'optimizer': optimizer, 'lr_scheduler': lr_scheduler}
|
[
"vietnamican@gmail.com"
] |
vietnamican@gmail.com
|
213f42b8e3c626c96fdba83225479382cdd7034f
|
544cfadc742536618168fc80a5bd81a35a5f2c99
|
/tools/test/connectivity/acts/framework/acts/controllers/pdu_lib/synaccess/np02b.py
|
655328feb4bede2c154cc3f44e04463ee9f339ee
|
[] |
no_license
|
ZYHGOD-1/Aosp11
|
0400619993b559bf4380db2da0addfa9cccd698d
|
78a61ca023cbf1a0cecfef8b97df2b274ac3a988
|
refs/heads/main
| 2023-04-21T20:13:54.629813
| 2021-05-22T05:28:21
| 2021-05-22T05:28:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,057
|
py
|
#!/usr/bin/env python3
#
# Copyright 2019 - The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from acts import utils
from acts.controllers import pdu
import re
import telnetlib
import time
class PduDevice(pdu.PduDevice):
"""Implementation of pure abstract PduDevice object for the Synaccess np02b
Pdu.
"""
def __init__(self, host, username, password):
super(PduDevice, self).__init__(host, username, password)
self.tnhelper = _TNHelperNP02B(host)
def on_all(self):
""" Turns on both outlets on the np02b."""
self.tnhelper.cmd('ps 1')
self._verify_state({'1': True, '2': True})
def off_all(self):
""" Turns off both outlets on the np02b."""
self.tnhelper.cmd('ps 0')
self._verify_state({'1': False, '2': False})
def on(self, outlet):
""" Turns on specific outlet on the np02b.
Args:
outlet: string of the outlet to turn on ('1' or '2')
"""
self.tnhelper.cmd('pset %s 1' % outlet)
self._verify_state({outlet: True})
def off(self, outlet):
""" Turns off a specifc outlet on the np02b.
Args:
outlet: string of the outlet to turn off ('1' or '2')
"""
self.tnhelper.cmd('pset %s 0' % outlet)
self._verify_state({outlet: False})
def reboot(self, outlet):
""" Toggles a specific outlet on the np02b to off, then to on.
Args:
outlet: string of the outlet to reboot ('1' or '2')
"""
self.off(outlet)
self._verify_state({outlet: False})
self.on(outlet)
self._verify_state({outlet: True})
def status(self):
""" Returns the status of the np02b outlets.
Return:
a dict mapping outlet strings ('1' and '2') to:
True if outlet is ON
False if outlet is OFF
"""
res = self.tnhelper.cmd('pshow')
status_list = re.findall('(ON|OFF)', res)
status_dict = {}
for i, status in enumerate(status_list):
status_dict[str(i + 1)] = (status == 'ON')
return status_dict
def close(self):
"""Ensure connection to device is closed.
In this implementation, this shouldn't be necessary, but could be in
others that open on creation.
"""
self.tnhelper.close()
def _verify_state(self, expected_state, timeout=3):
"""Returns when expected_state is reached on device.
In order to prevent command functions from exiting until the desired
effect has occurred, this function verifys that the expected_state is a
subset of the desired state.
Args:
expected_state: a dict representing the expected state of one or
more outlets on the device. Maps outlet strings ('1' and/or '2')
to:
True if outlet is expected to be ON.
False if outlet is expected to be OFF.
timeout (default: 3): time in seconds until raising an exception.
Return:
True, if expected_state is reached.
Raises:
PduError if expected_state has not been reached by timeout.
"""
end_time = time.time() + timeout
while time.time() < end_time:
actual_state = self.status()
if expected_state.items() <= actual_state.items():
return True
time.sleep(.1)
raise pdu.PduError('Timeout while verifying state.\n'
'Expected State: %s\n'
'Actual State: %s' % (expected_state, actual_state))
class _TNHelperNP02B(object):
"""An internal helper class for Telnet with the Synaccess NP02B Pdu. This
helper is specific to the idiosyncrasies of the NP02B and therefore should
not be used with other devices.
"""
def __init__(self, host):
self._tn = telnetlib.Telnet()
self.host = host
self.tx_cmd_separator = '\n\r'
self.rx_cmd_separator = '\r\n'
self.prompt = '>'
"""
Executes a command on the device via telnet.
Args:
cmd_str: A string of the command to be run.
Returns:
A string of the response from the valid command (often empty).
"""
def cmd(self, cmd_str):
# Open session
try:
self._tn.open(self.host, timeout=3)
except:
raise pdu.PduError("Failed to open telnet session to host (%s)" %
self.host)
time.sleep(.1)
# Read to end of first prompt
cmd_str.strip(self.tx_cmd_separator)
self._tn.read_eager()
time.sleep(.1)
# Write command and read all output text
self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator))
res = self._tn.read_until(utils.ascii_string(self.prompt), 2)
# Parses out the commands output
if res is None:
raise pdu.PduError("Command failed: %s" % cmd_str)
res = res.decode()
if re.search('Invalid', res):
raise pdu.PduError("Command Invalid: %s" % cmd_str)
res = res.replace(self.prompt, '')
res = res.replace(self.tx_cmd_separator, '')
res = res.replace(self.rx_cmd_separator, '')
res = res.replace(cmd_str, '')
# Close session
self._tn.close()
time.sleep(0.5)
return res
def close(self):
self._tn.close()
|
[
"rick_tan@qq.com"
] |
rick_tan@qq.com
|
61c90a5a68de5d9fddb0ef91c1c3666064a8f85e
|
7bededcada9271d92f34da6dae7088f3faf61c02
|
/pypureclient/flasharray/FA_2_22/models/pod_replica_link_lag_response.py
|
4b80aac36f529a563527baa650fb2b54f0d5839c
|
[
"BSD-2-Clause"
] |
permissive
|
PureStorage-OpenConnect/py-pure-client
|
a5348c6a153f8c809d6e3cf734d95d6946c5f659
|
7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e
|
refs/heads/master
| 2023-09-04T10:59:03.009972
| 2023-08-25T07:40:41
| 2023-08-25T07:40:41
| 160,391,444
| 18
| 29
|
BSD-2-Clause
| 2023-09-08T09:08:30
| 2018-12-04T17:02:51
|
Python
|
UTF-8
|
Python
| false
| false
| 3,922
|
py
|
# coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.22
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_22 import models
class PodReplicaLinkLagResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[PodReplicaLinkLag]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.PodReplicaLinkLag]
):
"""
Keyword args:
items (list[PodReplicaLinkLag]): A list of pod replica link lag objects.
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PodReplicaLinkLagResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PodReplicaLinkLagResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"noreply@github.com"
] |
PureStorage-OpenConnect.noreply@github.com
|
a14cb2cae1fd69db8497611253f1cb936df5a192
|
23ec2d87fb96626992df44af73a7daa202be79a6
|
/src/examples/connectfour/vs.py
|
3810ad1a332014b09a368e147f4ae73e9ef179df
|
[] |
no_license
|
ishikota/pymcts
|
5d560ec7d0dcdf881a52c607adfdd384ae23e0c2
|
2d1ba191cadbbaab0ab922a785478210cf0709f4
|
refs/heads/master
| 2021-01-01T19:31:00.932984
| 2015-07-28T14:45:23
| 2015-07-28T14:45:23
| 39,330,236
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,146
|
py
|
# add path to the src and test directory
import os
import sys
PARENT_PATH = os.getenv('PYMCTS_ROOT')
SRC_PATH = PARENT_PATH +"src/"
sys.path.append(SRC_PATH+"algorithm")
import mcts
import connectfour_model
import heuristic_model
# Clear the shell
os.system("clear")
# Setup for MCTS
model = heuristic_model.ConnectFour()
#model = connectfour_model.ConnectFour()
print '> Input the maximum number of iteration in MCTS...'
playout_num = int(raw_input())
_mcts = mcts.MCTS()
_mcts.set_playout(playout_num)
_mcts.show_progress = True
# start the game !!
print 'Let\'s ConnectFour !!'
model.display()
while True:
# Player turn
print '> Input the column to make a move...'
action = int(raw_input())-1
end_flg, score = model.is_terminal(1, action)
model.update(action)
model.display()
if end_flg:
print '\nYou win !!!\n'
break
# MCTS CPU Turn
root, action = _mcts.start(model)
print 'MCTS make a move on column '+str(action+1)
end_flg, score = model.is_terminal(-1, action)
model.update(action)
model.display()
if end_flg:
print '\nYou lose ...\n'
break
|
[
"ishikota086@gmail.com"
] |
ishikota086@gmail.com
|
288b4bd41f49b1124f0b189c46fb7fc1cba2ea02
|
066f812b051afffbe1a05630a728d15bab9f02bc
|
/django_503/models.py
|
53c13d2c325313df137b454d59365c4a456316b9
|
[
"MIT"
] |
permissive
|
soul4code/django-503
|
04714af1a72813d5f6f1691eada97773adbe9c30
|
48f30e176f334988dafb48dff7c604b7f72ab290
|
refs/heads/master
| 2021-12-14T00:08:07.899188
| 2015-07-16T18:31:09
| 2015-07-16T18:31:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 219
|
py
|
from django.db import models
from django.utils.translation import ugettext as _
class Config(models.Model):
key = models.CharField(_('Key'), max_length=100, unique=True)
value = models.BooleanField(_('Value'))
|
[
"baryshev@gmail.com"
] |
baryshev@gmail.com
|
bc47db2fbd5e552d18ef8b17070331d9bf86f0a9
|
595c69f717fc3ceb4e0701cc433f6d7f927b6fdb
|
/Hogworts/Page_Object/Pages/main.py
|
7f61ff32429f5e1477ac8b6a1941e7c335deb355
|
[
"MIT"
] |
permissive
|
archerckk/PyTest
|
d6462ebf46c6dbd5bb3ce03666aad0c2665367cd
|
610dd89df8d70c096f4670ca11ed2f0ca3196ca5
|
refs/heads/master
| 2022-03-26T21:09:25.891745
| 2021-06-14T01:39:36
| 2021-06-14T01:39:36
| 129,497,345
| 0
| 0
| null | 2020-01-14T10:57:49
| 2018-04-14T08:23:03
|
Python
|
UTF-8
|
Python
| false
| false
| 533
|
py
|
from Page_Object.Pages.base_page import Base_Page
from Page_Object.Pages.login import Login
from Page_Object.Pages.register import Register
from selenium.webdriver.common.by import By
class Main(Base_Page):
_base_url='https://work.weixin.qq.com/'
def goto_register(self):
self.find(By.CSS_SELECTOR,'.index_head_info_pCDownloadBtn').click()
return Register(self._driver)
def goto_login(self):
self.find(By.CSS_SELECTOR,'.index_top_operation_loginBtn').click()
return Login(self._driver)
|
[
"archerckk@163.com"
] |
archerckk@163.com
|
a66d33de13362abe85bb1eaea386c7fdb853db98
|
d57b51ec207002e333b8655a8f5832ed143aa28c
|
/.history/l5/work/app_20200705183534.py
|
b04a220ca1102f3467a3b5ad0e580157e43c7a65
|
[] |
no_license
|
yevheniir/python_course_2020
|
b42766c4278a08b8b79fec77e036a1b987accf51
|
a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b
|
refs/heads/master
| 2022-11-15T07:13:24.193173
| 2020-07-11T15:43:26
| 2020-07-11T15:43:26
| 278,890,802
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 357
|
py
|
from flask import Flask
from flask import render_template
app = Flask(__name__)
scoreboard = [{"name": "Tester", "score": 10}, {"name": "Tester", "score": 11}]
@app.route("/<name>")
def hello(n):
return render_template("index.html")
@app.route("/game")
def game():
return render_template("index.html")
if __name__ == "__main__":
app.run()
|
[
"yevheniira@intelink-ua.com"
] |
yevheniira@intelink-ua.com
|
defc1b7d74de6d1d58c5993550f7e8e9ad068c89
|
0f0a7adfae45e07a896c5cd5648ae081d4ef7790
|
/python数据结构/python黑马数据结构/排序于搜索/桶排序.py
|
12d443dea83de14b64c2fafd0db8a034651882fd
|
[] |
no_license
|
renlei-great/git_window-
|
e2c578544c7a8bdd97a7a9da7be0464d6955186f
|
8bff20a18d7bbeeaf714aa49bf15ab706153cc28
|
refs/heads/master
| 2021-07-19T13:09:01.075494
| 2020-06-13T06:14:37
| 2020-06-13T06:14:37
| 227,722,554
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,463
|
py
|
lista = [12, 4, 5, 6, 22, 3, 43, 654, 765, 7, 234]
# 桶排序
"""
桶排序就是找出最大值和最小值,在这个区间进行分桶,然后将数组中的数按区间装桶,然后在对每个桶进行排序
"""
def pail_sort(alist):
"""桶排序"""
n = len(alist)
min_cur, max_cur = 0, 0
cur = 1
# 找出最大
while cur < n:
if alist[min_cur] > alist[cur]:
min_cur = cur
cur += 1
cur = 1
# 找出最小
while cur < n:
if alist[max_cur] < alist[cur]:
max_cur = cur
cur += 1
min_number, max_number = alist[min_cur], alist[max_cur]
# 初始化桶,和桶的区间,分出3个桶
for i in range(1,4):
number_name = 'number' + str(i)
pail_name = 'pail' + str(i)
number = max_number // i
setattr(pail_sort, pail_name, [])
setattr(pail_sort, number_name, number)
# 往桶里封装
for i in alist:
if i <= getattr(pail_sort, 'number1') and i > getattr(pail_sort, 'number2'):
pail_sort.__dict__['pail1'].append(i)
elif i < getattr(pail_sort, 'number2') and i > getattr(pail_sort, 'number3'):
pail_sort.__dict__['pail2'].append(i)
elif i < getattr(pail_sort, 'number3'):
pail_sort.__dict__['pail3'].append(i)
# 对每个桶进行排序后拼接返回
sort_pail = []
for i in range(3,0, -1):
sort_pail += marge_sort(pail_sort.__dict__['pail' + str(i)])
return sort_pail
def marge_sort(alist):
"""归并排序"""
n = len(alist)
if n <= 1:
return alist
mid = n // 2
left_li = marge_sort(alist[:mid])
right_li = marge_sort(alist[mid:])
left_cur, right_cur = 0, 0
result = []
while left_cur < len(left_li) and right_cur < len(right_li):
if left_li[left_cur] < right_li[right_cur]:
result.append(left_li[left_cur])
left_cur += 1
elif left_li[left_cur] > right_li[right_cur]:
result.append(right_li[right_cur])
right_cur += 1
elif left_li[left_cur] == right_li[right_cur]:
result.append(left_li[left_cur])
left_cur += 1
result.append(right_li[right_cur])
right_cur += 1
result += left_li[left_cur:] + right_li[right_cur:]
return result
if __name__ == "__main__":
new_li = pail_sort(lista)
# new_li = marge_sort(lista)
print(new_li)
|
[
"1415977534@qq.com"
] |
1415977534@qq.com
|
a0485c4cb332ebd75e227c8399d966b35342cc60
|
623065fb8f2fec97c7a4e201bff7ff1d9578e457
|
/imgviz/data/kitti/__init__.py
|
afb8eb994cbe1b8a3520b78d531e100de2e1bc1e
|
[] |
no_license
|
bigdatasciencegroup/imgviz
|
4759c4264a43e9d37429489cc63a8a00fbb489d5
|
cec9f1e3cc02cac46d11a99c63c696b8743ba6f1
|
refs/heads/master
| 2020-08-21T23:39:44.038394
| 2019-09-09T13:55:57
| 2019-09-09T13:55:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 700
|
py
|
import os.path as osp
import numpy as np
here = osp.dirname(osp.abspath(__file__))
def read_pose_file(filename):
with open(filename, 'r') as f:
transforms = []
for one_line in f:
one_line = one_line.split(' ')
Rt = [float(pose) for pose in one_line] + [0, 0, 0, 1]
Rt = np.reshape(np.array(Rt), (4, 4))
assert abs(Rt[3].sum() - 1) < 1e-5
transforms.append(Rt)
return transforms
def kitti_odometry():
# http://www.cvlibs.net/datasets/kitti/eval_odometry.php
pose_file = osp.join(here, 'odometry/00.txt')
transforms = read_pose_file(pose_file)
data = {'transforms': transforms}
return data
|
[
"www.kentaro.wada@gmail.com"
] |
www.kentaro.wada@gmail.com
|
3f2a7f8ca8c8b949eb087d6b60465bf94f7e9e90
|
ac01d8bdab2140eae6332613142b784484877b78
|
/main.py
|
c4d5cd5bb342e7b9329bf9ee51a5c37957b7ec15
|
[] |
no_license
|
2020668/api_automation_course
|
eb19322485fdb7db4b9586597895c3ac97727e96
|
33da9f5f1f17de5a5892d28a9f6feea09e8c4adc
|
refs/heads/master
| 2022-12-29T22:32:02.321058
| 2020-10-15T03:24:32
| 2020-10-15T03:24:32
| 304,195,531
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,271
|
py
|
# -*- coding: utf-8 -*-
"""
=================================
Author: keen
Created on: 2019/9/2
E-mail:keen2020@outlook.com
=================================
"""
import unittest
import os
import time
from library.HTMLTestRunnerNew import HTMLTestRunner
from common.config import conf
from common.constant import CASE_DIR, REPORT_DIR
from common.send_email import SendEmail
_title = conf.get('report', 'title')
_description = conf.get('report', 'description')
_tester = conf.get('report', 'tester')
report_name = conf.get('report', 'report_name')
report_name = time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_" + report_name
mail_title = conf.get('mail', 'mail_title')
mail_message = conf.get('mail', 'mail_message')
file_path = os.path.join(REPORT_DIR, report_name)
suite = unittest.TestSuite() # 创建测试集合
loader = unittest.TestLoader()
suite.addTest(loader.discover(CASE_DIR))
with open(file_path, 'wb') as f:
runner = HTMLTestRunner(
stream=f,
verbosity=2,
title=_title,
description=_description,
tester=_tester
)
runner.run(suite)
# 发送email
# SendEmail.send_qq_file_mail(mail_title, mail_message, file_path)
# SendEmail.send_outlook_file_mail(mail_title, mail_message, file_path)
|
[
"keen2020@outlook.com"
] |
keen2020@outlook.com
|
41894e7590dde3aa44f8c38b7453e8c364d924f5
|
cd8f7ecd20c58ce1ae0fe3840f7c7ee961aa5819
|
/Binary Tree Zigzag Level Order Traversal.py
|
5ffebb0274f92ac415a122c9c02b477d302ff3ff
|
[
"Apache-2.0"
] |
permissive
|
sugia/leetcode
|
9b0f2a3521b088f8f7e5633c2c6c17c76d33dcaf
|
6facec2a54d1d9f133f420c9bce1d1043f57ebc6
|
refs/heads/master
| 2021-06-05T07:20:04.099488
| 2021-02-24T07:24:50
| 2021-02-24T07:24:50
| 29,124,136
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,303
|
py
|
'''
Given a binary tree, return the zigzag level order traversal of its nodes' values. (ie, from left to right, then right to left for the next level and alternate between).
For example:
Given binary tree [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def zigzagLevelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
res = []
if not root:
return res
zigzag = True
vec = [root]
while len(vec):
zigzag = not zigzag
next_vec = []
tmp = []
for node in vec:
if zigzag:
tmp = [node.val] + tmp
else:
tmp.append(node.val)
if node.left:
next_vec.append(node.left)
if node.right:
next_vec.append(node.right)
res.append(tmp)
vec = next_vec
return res
|
[
"noreply@github.com"
] |
sugia.noreply@github.com
|
654c3bc950e7ddde3eaff1bddd8c9718702a2352
|
bfc2ba097b164af668efa29f883101673668456e
|
/nets/centernet_training.py
|
109a2e753890bf22328bb9efcd06e247931de674
|
[] |
no_license
|
Sharpiless/Paddlepaddle-CenterNet
|
b4892e1ab85a65f655b44fc6699e61315f5a0274
|
b02bca6bff55054bdb29ba370ac52b9e8951045a
|
refs/heads/main
| 2023-06-17T17:22:35.265697
| 2021-07-17T02:46:33
| 2021-07-17T02:46:33
| 386,817,805
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,313
|
py
|
import paddle
import paddle.nn.functional as F
def focal_loss(pred, target):
pred = paddle.transpose(pred, [0,2,3,1])
# pred = pred.permute(0,2,3,1)
#-------------------------------------------------------------------------#
# 找到每张图片的正样本和负样本
# 一个真实框对应一个正样本
# 除去正样本的特征点,其余为负样本
#-------------------------------------------------------------------------#
# pos_inds = target.equal(1).float()
pos_inds = target == 1
pos_inds = pos_inds.astype('float32')
# neg_inds = target.lt(1).float()
neg_inds = target < 1
neg_inds = neg_inds.astype('float32')
#-------------------------------------------------------------------------#
# 正样本特征点附近的负样本的权值更小一些
#-------------------------------------------------------------------------#
neg_weights = paddle.pow(1 - target, 4)
pred = paddle.clip(pred, 1e-6, 1 - 1e-6)
#-------------------------------------------------------------------------#
# 计算focal loss。难分类样本权重大,易分类样本权重小。
#-------------------------------------------------------------------------#
pos_loss = paddle.log(pred) * paddle.pow(1 - pred, 2) * pos_inds
neg_loss = paddle.log(1 - pred) * paddle.pow(pred, 2) * neg_weights * neg_inds
#-------------------------------------------------------------------------#
# 进行损失的归一化
#-------------------------------------------------------------------------#
num_pos = pos_inds.astype('float32').sum()
pos_loss = pos_loss.sum()
neg_loss = neg_loss.sum()
if num_pos == 0:
loss = -neg_loss
else:
loss = -(pos_loss + neg_loss) / num_pos
return loss
def reg_l1_loss(pred, target, mask):
#--------------------------------#
# 计算l1_loss
#--------------------------------#
# pred = pred.permute(0,2,3,1)
pred = paddle.transpose(pred, [0,2,3,1])
# expand_mask = paddle.unsqueeze(mask,-1).repeat(1,1,1,2)
expand_mask = paddle.tile(paddle.unsqueeze(mask,-1), [1,1,1,2])
loss = F.l1_loss(pred * expand_mask, target * expand_mask, reduction='sum')
loss = loss / (mask.sum() + 1e-4)
return loss
|
[
"1691608003@qq.com"
] |
1691608003@qq.com
|
9f2eaee40308723324858966dcd6932750b0241b
|
bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d
|
/lib/googlecloudsdk/command_lib/storage/tasks/task_buffer.py
|
dee39b0c1165d965f0fa3a433725b4686336f215
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
05fbb473d629195f25887fc5bfaa712f2cbc0a24
|
392abf004b16203030e6efd2f0af24db7c8d669e
|
refs/heads/master
| 2023-08-31T05:40:41.317697
| 2023-08-23T18:23:16
| 2023-08-23T18:23:16
| 335,182,594
| 9
| 2
|
NOASSERTION
| 2022-10-29T20:49:13
| 2021-02-02T05:47:30
|
Python
|
UTF-8
|
Python
| false
| false
| 2,762
|
py
|
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements a buffer for tasks used in task_graph_executor.
See go/parallel-processing-in-gcloud-storage for more information.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from six.moves import queue
class _PriorityWrapper:
"""Wraps a buffered task and tracks priority information.
Attributes:
task (Union[task.Task, str]): A buffered item. Expected to be a task or a
string (to handle shutdowns) when used by task_graph_executor.
priority (int): The priority of this task. A task with a lower value will be
executed before a task with a higher value, since queue.PriorityQueue uses
a min-heap.
"""
def __init__(self, task, priority):
self.task = task
self.priority = priority
def __lt__(self, other):
return self.priority < other.priority
class TaskBuffer:
"""Stores and prioritizes tasks.
The current implementation uses a queue.PriorityQueue under the hood, since
in experiments we found that the heap it maintains did not add too much
overhead. If it does end up being a bottleneck, the same API can be
implemented with a collections.deque.
"""
def __init__(self):
self._queue = queue.PriorityQueue()
def get(self):
"""Removes and returns an item from the buffer.
Calls to `get` block if there are no elements in the queue, and return
prioritized items before non-prioritized items.
Returns:
A buffered item. Expected to be a task or a string (to handle shutdowns)
when used by task_graph_executor.
"""
return self._queue.get().task
def put(self, task, prioritize=False):
"""Adds an item to the buffer.
Args:
task (Union[task.Task, str]): A buffered item. Expected to be a task or a
string (to handle shutdowns) when used by task_graph_executor.
prioritize (bool): Tasks added with prioritize=True will be returned by
`get` before tasks added with prioritize=False.
"""
priority = 0 if prioritize else 1
prioritized_item = _PriorityWrapper(task, priority)
self._queue.put(prioritized_item)
|
[
"cloudsdk.mirror@gmail.com"
] |
cloudsdk.mirror@gmail.com
|
8534041473d28f92fb8db6079f28b29f4e1c7743
|
cae9ca1dda110cd6f65d5021c5891fdee76ec6fe
|
/day2/set/2.py
|
e7bb3bc3680e0158226f7a9475d6dce754b58602
|
[] |
no_license
|
shobhit-nigam/yagami
|
fb33d6de76a698a160f9e8df9d7d9f5b836797d8
|
283e2a464f74ac07c21ae7095b9a45fa632aa38a
|
refs/heads/main
| 2023-07-04T09:46:51.057558
| 2021-08-10T05:13:27
| 2021-08-10T05:13:27
| 391,846,901
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 267
|
py
|
basket_a = {'apple', 'banana', 'pear', 'apple', 'kiwi', 'banana', 'avocado'}
basket_b = {'orange', 'plum', 'grapes', 'apple', 'pear', 'raspberry'}
print(type(basket_a))
print("basket_a =", basket_a)
print("basket_b =", basket_b)
print("basket_a[2] =", basket_a[2])
|
[
"noreply@github.com"
] |
shobhit-nigam.noreply@github.com
|
74630a900649910f52610167dda5e5175c1009c7
|
e3d33f5d82a541d7491e079c394dcebf1568f078
|
/server/settings/migrations/0003_delete_tournamenttype.py
|
746daa2db4b9fb8720e479ddcbeaca42296afee0
|
[
"MIT"
] |
permissive
|
MahjongRepository/mahjong-portal
|
51bd1300c3e6b8a341fbddb67a750b268950627e
|
20f01433858bed4610d60b27a98bafce5a810097
|
refs/heads/master
| 2023-07-09T09:05:23.155419
| 2023-07-08T10:47:14
| 2023-07-08T10:47:14
| 114,328,632
| 12
| 10
|
MIT
| 2022-07-29T01:29:59
| 2017-12-15T04:53:02
|
Python
|
UTF-8
|
Python
| false
| false
| 435
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-01-18 13:32
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tournament', '0005_remove_tournament_tournament_type'),
('settings', '0002_auto_20180117_0643'),
]
operations = [
migrations.DeleteModel(
name='TournamentType',
),
]
|
[
"lisikhin@gmail.com"
] |
lisikhin@gmail.com
|
428b845f68e1d7c602aa7f74a604609708605c11
|
b35aea9f4411f5dc7942392d78dc31bb76c7ec73
|
/ARTIN/index/forms.py
|
145690526dcce52d9b06a9000dcf43e2949b4874
|
[] |
no_license
|
ashkanusefi/rondshow
|
1079b81704fff55a1d54fa8dee2712ab61e92f4a
|
7e5a80fcc6e326b8b1737a54fb53becc4195e475
|
refs/heads/master
| 2023-09-01T18:45:33.170465
| 2021-09-18T11:24:52
| 2021-09-18T11:24:52
| 407,820,565
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 760
|
py
|
from django import forms
from index.models import Contactform
class Contact_Form(forms.ModelForm):
class Meta:
model = Contactform
widgets = {
'name': forms.TextInput(attrs={'placeholder': 'نام و نام خوانوادگی'}),
'email': forms.TextInput(attrs={'placeholder': 'ایمیل'}),
'subject': forms.TextInput(attrs={'placeholder': 'موضوع'}),
'phone': forms.TextInput(attrs={'placeholder': 'شماره تماس'}),
'description': forms.Textarea(attrs={'placeholder': 'پیام خود را وارد کنید'}),
}
fields = [
'name',
'email',
'subject',
'phone',
'description',
]
|
[
"yousefi.ashkan96@gmail.com"
] |
yousefi.ashkan96@gmail.com
|
165b320a0f937ccc6fd4ef9e6bae85487e84034d
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/100/usersdata/199/49629/submittedfiles/prova1.py
|
6b7bd8a5436b67d150ae6be7d451d557c92d2016
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 378
|
py
|
# -*- coding: utf-8 -*-
import math
#COMECE SEU CÓDIGO ABAIXO DESTA LINHA
Carta1 =(int(input('Digite C1:'))
Carta2 =(int(input('Digite C2:'))
Carta3 =(int(input('Digite C3:'))
Carta4 =(int(input('Digite C4:'))
Carta5 =(int(input('Digite C5:'))
if Carta1>Carta2>Carta3>Carta4>Carta5:
print('D')
elif Carta1<Carta2<Carta3<Carta4>Carta5:
print('C')
else:
print('N')
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
b410d142b81c1ff46a841b791aac9e8f0c825de6
|
7c1b5af77fbfde1f4f2c698a489e07024c147edc
|
/docs/sphinxext/example.py
|
a3a898c3d74b35d6e48e079745f272267a2beaef
|
[] |
no_license
|
migueldvb/pyasdf
|
e7812da935ee3e4fec6d3c61fb16425ac2e1bdc7
|
4a72952b0196ede261e07569fc4da2616fa5e4b3
|
refs/heads/master
| 2020-12-26T18:44:07.562442
| 2015-03-30T16:18:19
| 2015-03-30T16:18:19
| 29,930,850
| 0
| 0
| null | 2015-02-04T20:23:04
| 2015-01-27T19:29:17
|
Python
|
UTF-8
|
Python
| false
| false
| 3,764
|
py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals, print_function
import atexit
import io
import os
import shutil
import tempfile
import textwrap
from docutils.parsers.rst import Directive
from docutils import nodes
from sphinx.util.nodes import set_source_info
from pyasdf import AsdfFile
from pyasdf.constants import ASDF_MAGIC, BLOCK_FLAG_STREAMED
from pyasdf import versioning
from pyasdf import yamlutil
version_string = versioning.version_to_string(versioning.default_version)
TMPDIR = tempfile.mkdtemp()
def delete_tmpdir():
shutil.rmtree(TMPDIR)
GLOBALS = {}
LOCALS = {}
FLAGS = {
BLOCK_FLAG_STREAMED: "BLOCK_FLAG_STREAMED"
}
class RunCodeDirective(Directive):
has_content = True
def run(self):
code = textwrap.dedent('\n'.join(self.content))
cwd = os.getcwd()
os.chdir(TMPDIR)
try:
try:
exec(code, GLOBALS, LOCALS)
except:
print(code)
raise
literal = nodes.literal_block(code, code)
literal['language'] = 'python'
set_source_info(self, literal)
finally:
os.chdir(cwd)
return [literal]
class AsdfDirective(Directive):
required_arguments = 1
def run(self):
filename = self.arguments[0]
cwd = os.getcwd()
os.chdir(TMPDIR)
parts = []
try:
code = AsdfFile.read(filename, _get_yaml_content=True)
code = '{0}{1}\n'.format(ASDF_MAGIC, version_string) + code.strip()
literal = nodes.literal_block(code, code)
literal['language'] = 'yaml'
set_source_info(self, literal)
parts.append(literal)
ff = AsdfFile.read(filename)
for i, block in enumerate(ff.blocks.internal_blocks):
data = block.data.tostring().encode('hex')
if len(data) > 40:
data = data[:40] + '...'
allocated = block._allocated
size = block._size
data_size = block._data_size
flags = block._flags
if flags & BLOCK_FLAG_STREAMED:
allocated = size = data_size = 0
lines = []
lines.append('BLOCK {0}:'.format(i))
human_flags = []
for key, val in FLAGS.items():
if flags & key:
human_flags.append(val)
if len(human_flags):
lines.append(' flags: {0}'.format(' | '.join(human_flags)))
if block.compression:
lines.append(' compression: {0}'.format(block.compression))
lines.append(' allocated_size: {0}'.format(allocated))
lines.append(' used_size: {0}'.format(size))
lines.append(' data_size: {0}'.format(data_size))
lines.append(' data: {0}'.format(data))
code = '\n'.join(lines)
literal = nodes.literal_block(code, code)
literal['language'] = 'yaml'
set_source_info(self, literal)
parts.append(literal)
finally:
os.chdir(cwd)
result = nodes.admonition()
textnodes, messages = self.state.inline_text(filename, self.lineno)
title = nodes.title(filename, '', *textnodes)
result += title
result.children.extend(parts)
return [result]
def setup(app):
app.add_directive('runcode', RunCodeDirective)
app.add_directive('asdf', AsdfDirective)
atexit.register(delete_tmpdir)
|
[
"mdboom@gmail.com"
] |
mdboom@gmail.com
|
e2e3817a1e7ab097aac8071c3eb9fa89df0186b9
|
863509e794b069c9688f6263454c06d2c48868b2
|
/backend/backend/ml_model/client_server/ml_client.py
|
e892990daa897d7cb55cee5604a142de6051e99b
|
[
"MIT"
] |
permissive
|
TheDuckWhisperer/tournesol
|
c6df38185963bbecc6109dac275075a3ceca857a
|
0fde7587e91a42e5a2218f2ffb70d4fc8cff7f73
|
refs/heads/master
| 2023-04-18T15:31:20.627952
| 2021-05-01T19:59:07
| 2021-05-01T19:59:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,881
|
py
|
from jsonrpcclient.clients.http_client import HTTPClient
from multiprocessing import Process
from backend.rating_fields import VIDEO_FIELDS
import numpy as np
import pickle
from time import time
def time_cache_wrapper(f, expire_sec=3600):
"""Decorator which caches results for some seconds."""
# format pickle(x) -> (compute_time, value)
cache = {}
def wrapper(*args):
x_str = pickle.dumps(args)
if x_str in cache:
if time() - cache[x_str][0] <= expire_sec:
return cache[x_str][1]
result = f(*args)
cache[x_str] = (time(), result)
return result
return wrapper
class DatabaseLearnerCommunicator(object):
"""Communicate with training/inference workers."""
def __init__(
self,
port_inference=5000,
port_training=5001,
host='localhost'):
"""Initialize (remember ports)."""
self.port_inference = port_inference
self.port_training = port_training
self.host = host
def build_client(self, port):
"""Return an http client pointing to the worker."""
return HTTPClient("http://%s:%d" % (self.host, port))
@time_cache_wrapper
def __call__(self, x):
"""Transform embedding into preferences."""
try:
client = self.build_client(port=self.port_inference)
return client.call([float(t) for t in x]).data.result
except Exception as e:
print(e)
return np.zeros(len(VIDEO_FIELDS))
def fit(self):
"""Fit on data from the dataset."""
def fit_helper():
client = self.build_client(port=self.port_training)
client.fit()
client_inference = self.build_client(port=self.port_inference)
client_inference.reload()
Process(target=fit_helper).start()
|
[
"sergei.volodin.ch@gmail.com"
] |
sergei.volodin.ch@gmail.com
|
d10192ab95a1b46d604aa924f07a235b10ff2971
|
4fbd844113ec9d8c526d5f186274b40ad5502aa3
|
/algorithms/python3/number_of_digit_one.py
|
538d9b720619759dbae129533367a918a55ffec3
|
[] |
no_license
|
capric8416/leetcode
|
51f9bdc3fa26b010e8a1e8203a7e1bcd70ace9e1
|
503b2e303b10a455be9596c31975ee7973819a3c
|
refs/heads/master
| 2022-07-16T21:41:07.492706
| 2020-04-22T06:18:16
| 2020-04-22T06:18:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 510
|
py
|
# !/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Given an integer n, count the total number of digit 1 appearing in all non-negative integers less than or equal to n.
Example:
Input: 13
Output: 6
Explanation: Digit 1 occurred in the following numbers: 1, 10, 11, 12, 13.
"""
""" ==================== body ==================== """
class Solution:
def countDigitOne(self, n):
"""
:type n: int
:rtype: int
"""
""" ==================== body ==================== """
|
[
"capric8416@gmail.com"
] |
capric8416@gmail.com
|
4afb6395738c94f6f3c5f69cd5aba31fac3f7ab9
|
23a56e0555d6b27aa444d8396ec32f9d2b678a39
|
/07_ur_online/shifted_frames_setup/compas/packages/compas_fabrication/fabrication/grasshopper/utilities/sets.py
|
0aa962839f412ffad8a96c1e5c3841c1df6bb963
|
[
"MIT"
] |
permissive
|
dtbinh/T1_python-exercises
|
2ce1b01bc71f8032bbe8fb4ef8f71b648dcde1c5
|
f4710c3dc2ba8ddb3e3e9069ab8d65df674463ab
|
refs/heads/master
| 2020-04-04T20:00:52.191601
| 2018-01-09T08:14:36
| 2018-01-09T08:14:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,129
|
py
|
from Grasshopper import DataTree as Tree
from Grasshopper.Kernel.Data import GH_Path as Path
from System import Array
def list_to_tree(alist, none_and_holes=False, base_path=[0]):
"""
Transforms nestings of lists or tuples to a Grasshopper DataTree
Usage:
mytree = [ [1,2], 3, [],[ 4,[5]] ]
a = list_to_tree(mytree)
b = list_to_tree(mytree, none_and_holes=True, base_path=[7,1])
"""
def process_one_item(alist, tree, track):
path = Path(Array[int](track))
if len(alist) == 0 and none_and_holes:
tree.EnsurePath(path)
return
for i,item in enumerate(alist):
if hasattr(item, '__iter__'): #if list or tuple
track.append(i)
process_one_item(item, tree, track)
track.pop()
else:
if none_and_holes:
tree.Insert(item, path, i)
elif item is not None:
tree.Add(item, path)
tree = Tree[object]()
if alist is not None:
process_one_item(alist, tree, base_path[:])
return tree
|
[
"rusenova"
] |
rusenova
|
18a6e21aeda01c6a19f315813bff0d01b04146e0
|
b66450f669095b0ad013ea82cb1ae575b83d74c3
|
/Interview Preparation 2/maze.py
|
eae0d33ab121c0679d4a9e3e440d6211bec9b2ad
|
[] |
no_license
|
aulb/ToAsk
|
2649a3fad357820e3c8809816967dfb274704735
|
1e54c76ab9f7772316186db74496735ca1da65ce
|
refs/heads/master
| 2021-05-01T20:35:14.062678
| 2020-02-23T07:44:29
| 2020-02-23T07:44:29
| 33,289,737
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,032
|
py
|
Implement a function to generate a maze.
from random import randint
import enum
class Condition(enum):
CLEAR = 0
RIGHT = 1
BOTTOM = 2
class Direction(enum):
RIGHT = 0
LEFT = 1
UP = 2
DOWN = 3
# Create maze from top left to bottom right
def generate_maze(row, col):
if row < 0 or col < 0: return []
# create a fresh matrix of all zeroes
maze = [[0 for _ in range(col)] for _ in range(row)]
def get_direction(condition=Condition.CLEAR):
direction = randint(1, 10)
if 1 <= direction <= 4:
if Condition.CLEAR: return Direction.RIGHT
elif Condition.RIGHT: return Direction.DOWN
else: return Direction.RIGHT
elif 5 <= direction <= 8:
if Condition.CLEAR: return Direction.DOWN
elif Condition.RIGHT: return Direction.DOWN
else: return Direction.RIGHT
elif direction == 9:
if Condition.CLEAR: return Direction.LEFT
elif Condition.RIGHT: return Direction.DOWN
else: return Direction.UP
else:
if Condition.CLEAR: return Direction.UP
elif Condition.RIGHT: return Direction.DOWN
else: return Direction.RIGHT
def create_key(cell):
return ‘{},{}’.format(cell[0], cell[1])
def create_path(maze):
# [[i, j]]
path = {‘0,0’: True}
visited_rows = [0 for _ in range(len(maze))]
# randomly pick direction
current_cell = [0, 0]
condition = Condition.CLEAR
while current_cell != [len(maze) - 1, len(maze[0]) - 1]:
new_direction = get_direction(condition)
if new_direction == Direction.RIGHT:
# check if we can go right
if current_cell[1] + 1 <= len(maze[0]) - 1:
current_cell[1] += 1
path.append(current_cell[:])
if new_direction == Direction.LEFT:
# check if we can go right
if current_cell[1] - 1 >= 0:
current_cell[1] -= 1
path.append(current_cell[:])
if new_direction == Direction.UP:
# check if we can go right
if current_cell[0] - 1 >= 0:
current_cell[0] -= 1
path.append(current_cell[:])
if new_direction == Direction.DOWN:
# check if we can go right
if current_cell[0] + 1 <= len(maze) - 1:
current_cell[0] += 1
path.append(current_cell[:])
return path
def draw_wall(maze, path):
pass
|
[
"aalbertuntung@gmail.com"
] |
aalbertuntung@gmail.com
|
742f8a6dd2aee367cca6f94262b5612485524064
|
868b90e85541f1f76e1805346f18c2cb7675ffc8
|
/cnn/02_introductory_cnn.py
|
1eac1abda98b5a9c829bc11f7f50f4ba5b7d7589
|
[] |
no_license
|
WOW5678/tensorflow_study
|
a9a447c39c63a751046d4776eedc17589324634e
|
39e202b102cd2ebc1ba16f793acc8ebe9ea0e752
|
refs/heads/master
| 2020-03-19T15:49:24.729778
| 2018-10-19T06:45:26
| 2018-10-19T06:45:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,639
|
py
|
# Introductory CNN Model: MNIST Digits
#---------------------------------------
#
# In this example, we will download the MNIST handwritten
# digits and create a simple CNN network to predict the
# digit category (0-9)
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets
from tensorflow.python.framework import ops
ops.reset_default_graph()
# Start a graph session
sess = tf.Session()
# Load data
data_dir = 'temp'
mnist = read_data_sets(data_dir)
# Convert images into 28x28 (they are downloaded as 1x784)
train_xdata = np.array([np.reshape(x, (28,28)) for x in mnist.train.images])
test_xdata = np.array([np.reshape(x, (28,28)) for x in mnist.test.images])
# Convert labels into one-hot encoded vectors
train_labels = mnist.train.labels
test_labels = mnist.test.labels
# Set model parameters
batch_size = 100
learning_rate = 0.005
evaluation_size = 500
image_width = train_xdata[0].shape[0]
image_height = train_xdata[0].shape[1]
target_size = max(train_labels) + 1
num_channels = 1 # greyscale = 1 channel
generations = 500
eval_every = 5
conv1_features = 25
conv2_features = 50
max_pool_size1 = 2 # NxN window for 1st max pool layer
max_pool_size2 = 2 # NxN window for 2nd max pool layer
fully_connected_size1 = 100
# Declare model placeholders
x_input_shape = (batch_size, image_width, image_height, num_channels)
x_input = tf.placeholder(tf.float32, shape=x_input_shape)
y_target = tf.placeholder(tf.int32, shape=(batch_size))
eval_input_shape = (evaluation_size, image_width, image_height, num_channels)
eval_input = tf.placeholder(tf.float32, shape=eval_input_shape)
eval_target = tf.placeholder(tf.int32, shape=(evaluation_size))
# Declare model parameters
conv1_weight = tf.Variable(tf.truncated_normal([4, 4, num_channels, conv1_features],
stddev=0.1, dtype=tf.float32))
conv1_bias = tf.Variable(tf.zeros([conv1_features], dtype=tf.float32))
conv2_weight = tf.Variable(tf.truncated_normal([4, 4, conv1_features, conv2_features],
stddev=0.1, dtype=tf.float32))
conv2_bias = tf.Variable(tf.zeros([conv2_features], dtype=tf.float32))
# fully connected variables
resulting_width = image_width // (max_pool_size1 * max_pool_size2)
resulting_height = image_height // (max_pool_size1 * max_pool_size2)
full1_input_size = resulting_width * resulting_height * conv2_features
full1_weight = tf.Variable(tf.truncated_normal([full1_input_size, fully_connected_size1],
stddev=0.1, dtype=tf.float32))
full1_bias = tf.Variable(tf.truncated_normal([fully_connected_size1], stddev=0.1, dtype=tf.float32))
full2_weight = tf.Variable(tf.truncated_normal([fully_connected_size1, target_size],
stddev=0.1, dtype=tf.float32))
full2_bias = tf.Variable(tf.truncated_normal([target_size], stddev=0.1, dtype=tf.float32))
# Initialize Model Operations
def my_conv_net(input_data):
# First Conv-ReLU-MaxPool Layer
conv1 = tf.nn.conv2d(input_data, conv1_weight, strides=[1, 1, 1, 1], padding='SAME')
relu1 = tf.nn.relu(tf.nn.bias_add(conv1, conv1_bias))
max_pool1 = tf.nn.max_pool(relu1, ksize=[1, max_pool_size1, max_pool_size1, 1],
strides=[1, max_pool_size1, max_pool_size1, 1], padding='SAME')
# Second Conv-ReLU-MaxPool Layer
conv2 = tf.nn.conv2d(max_pool1, conv2_weight, strides=[1, 1, 1, 1], padding='SAME')
relu2 = tf.nn.relu(tf.nn.bias_add(conv2, conv2_bias))
max_pool2 = tf.nn.max_pool(relu2, ksize=[1, max_pool_size2, max_pool_size2, 1],
strides=[1, max_pool_size2, max_pool_size2, 1], padding='SAME')
# Transform Output into a 1xN layer for next fully connected layer
final_conv_shape = max_pool2.get_shape().as_list()
final_shape = final_conv_shape[1] * final_conv_shape[2] * final_conv_shape[3]
flat_output = tf.reshape(max_pool2, [final_conv_shape[0], final_shape])
# First Fully Connected Layer
fully_connected1 = tf.nn.relu(tf.add(tf.matmul(flat_output, full1_weight), full1_bias))
# Second Fully Connected Layer
final_model_output = tf.add(tf.matmul(fully_connected1, full2_weight), full2_bias)
return(final_model_output)
model_output = my_conv_net(x_input)
test_model_output = my_conv_net(eval_input)
# Declare Loss Function (softmax cross entropy)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=model_output, labels=y_target))
# Create a prediction function
prediction = tf.nn.softmax(model_output)
test_prediction = tf.nn.softmax(test_model_output)
# Create accuracy function
def get_accuracy(logits, targets):
batch_predictions = np.argmax(logits, axis=1)
num_correct = np.sum(np.equal(batch_predictions, targets))
return(100. * num_correct/batch_predictions.shape[0])
# Create an optimizer
my_optimizer = tf.train.MomentumOptimizer(learning_rate, 0.9)
train_step = my_optimizer.minimize(loss)
# Initialize Variables
init = tf.global_variables_initializer()
sess.run(init)
# Start training loop
train_loss = []
train_acc = []
test_acc = []
for i in range(generations):
rand_index = np.random.choice(len(train_xdata), size=batch_size)
rand_x = train_xdata[rand_index]
rand_x = np.expand_dims(rand_x, 3)
rand_y = train_labels[rand_index]
train_dict = {x_input: rand_x, y_target: rand_y}
sess.run(train_step, feed_dict=train_dict)
temp_train_loss, temp_train_preds = sess.run([loss, prediction], feed_dict=train_dict)
temp_train_acc = get_accuracy(temp_train_preds, rand_y)
if (i+1) % eval_every == 0:
eval_index = np.random.choice(len(test_xdata), size=evaluation_size)
eval_x = test_xdata[eval_index]
eval_x = np.expand_dims(eval_x, 3)
eval_y = test_labels[eval_index]
test_dict = {eval_input: eval_x, eval_target: eval_y}
test_preds = sess.run(test_prediction, feed_dict=test_dict)
temp_test_acc = get_accuracy(test_preds, eval_y)
# Record and print results
train_loss.append(temp_train_loss)
train_acc.append(temp_train_acc)
test_acc.append(temp_test_acc)
acc_and_loss = [(i+1), temp_train_loss, temp_train_acc, temp_test_acc]
acc_and_loss = [np.round(x,2) for x in acc_and_loss]
print('Generation # {}. Train Loss: {:.2f}. Train Acc (Test Acc): {:.2f} ({:.2f})'.format(*acc_and_loss))
# Matlotlib code to plot the loss and accuracies
eval_indices = range(0, generations, eval_every)
# Plot loss over time
plt.plot(eval_indices, train_loss, 'k-')
plt.title('Softmax Loss per Generation')
plt.xlabel('Generation')
plt.ylabel('Softmax Loss')
plt.show()
# Plot train and test accuracy
plt.plot(eval_indices, train_acc, 'k-', label='Train Set Accuracy')
plt.plot(eval_indices, test_acc, 'r--', label='Test Set Accuracy')
plt.title('Train and Test Accuracy')
plt.xlabel('Generation')
plt.ylabel('Accuracy')
plt.legend(loc='lower right')
plt.show()
# Plot some samples
# Plot the 6 of the last batch results:
actuals = rand_y[0:6]
predictions = np.argmax(temp_train_preds,axis=1)[0:6]
images = np.squeeze(rand_x[0:6])
Nrows = 2
Ncols = 3
for i in range(6):
plt.subplot(Nrows, Ncols, i+1)
plt.imshow(np.reshape(images[i], [28,28]), cmap='Greys_r')
plt.title('Actual: ' + str(actuals[i]) + ' Pred: ' + str(predictions[i]),
fontsize=10)
frame = plt.gca()
frame.axes.get_xaxis().set_visible(False)
frame.axes.get_yaxis().set_visible(False)
|
[
"noreply@github.com"
] |
WOW5678.noreply@github.com
|
86c22ca4ca7fe3b67919b54097bc9189805b71f3
|
e4066b34668bbf7fccd2ff20deb0d53392350982
|
/project_scrapy/spiders/grammarly.py
|
19c27a8c3349910b1bd4ad1227155e4b6ced0815
|
[] |
no_license
|
sushma535/WebSites
|
24a688b86e1c6571110f20421533f0e7fdf6e1a8
|
16a3bfa44e6c7e22ae230f5b336a059817871a97
|
refs/heads/master
| 2023-08-18T09:09:16.052555
| 2021-10-11T00:41:50
| 2021-10-11T00:41:50
| 415,621,279
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,541
|
py
|
import scrapy
from scrapy.crawler import CrawlerProcess
import os
import csv
from csv import reader
import re
total_data = {}
class SimilarWeb(scrapy.Spider):
name = 'SW'
user_agent = 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36'
start_urls = ['https://www.grammarly.com/', 'https://www.similarsites.com/site/grammarly.com/']
csv_columns = ['Category', 'Description', 'Name', 'Url']
csv_file = 'websites1_data.csv'
count = 0
def parse(self, response):
data, desc, cat = '', '', ''
print('response url:', response.url)
if response.url == self.start_urls[0]:
data = response.css('title::text').get()
if data:
data = re.sub("\n\t\t", '', data)
total_data['Name'] = data
self.count += 1
elif response.url == self.start_urls[1]:
cat = response.css(
'div[class="StatisticsCategoriesDistribution__CategoryTitle-fnuckk-6 jsMDeK"]::text').getall()
desc = response.css('div[class="SiteHeader__Description-sc-1ybnx66-8 hhZNQm"]::text').get()
if cat:
cat = ": ".join(cat[:])
total_data['Category'] = cat
total_data['Description'] = desc
total_data['Url'] = self.start_urls[0]
self.count += 1
if self.count == 2:
print("total data", total_data)
new_data = [total_data['Category'], total_data['Description'], total_data['Name'],
total_data['Url']]
print("new data", new_data)
self.row_appending_to_csv_file(new_data)
def row_appending_to_csv_file(self, data):
if os.path.exists(self.csv_file):
need_to_add_headers = False
with open(self.csv_file, 'a+', newline='') as file:
file.seek(0)
csv_reader = reader(file)
if len(list(csv_reader)) == 0:
need_to_add_headers = True
csv_writer = csv.writer(file)
if need_to_add_headers:
csv_writer.writerow(self.csv_columns)
csv_writer.writerow(data)
else:
with open(self.csv_file, 'w', newline='') as file:
csv_writer = csv.writer(file)
csv_writer.writerow(self.csv_columns) # header
csv_writer.writerow(data)
process = CrawlerProcess()
process.crawl(SimilarWeb)
process.start()
|
[
"sushmakusumareddy@gmail.com"
] |
sushmakusumareddy@gmail.com
|
700a9fbcb89b1b66f52a940e26430e4a1f4c5494
|
c96d9a76fe28630fe1b4cd7efa22e12fdce0399f
|
/kaggle/Song_popularity/optimize.py
|
a81690bd13b858784678e25cf5a75a1761a95589
|
[] |
no_license
|
tarunbhavnani/ml_diaries
|
858839e8ab8817caae3d56d3dad6d4ee9176ddbe
|
8d0700211a2881279df60ab2bea7095ef95ea8dc
|
refs/heads/master
| 2023-08-18T08:28:50.881356
| 2023-08-16T09:39:34
| 2023-08-16T09:39:34
| 157,958,911
| 0
| 1
| null | 2023-03-13T05:17:52
| 2018-11-17T06:52:34
|
Python
|
UTF-8
|
Python
| false
| false
| 1,620
|
py
|
#!/usr/bin/env python3
"""
optimize.py
"""
import optuna
import optuna.integration.lightgbm as lgb
import pandas as pd
from lightgbm import early_stopping, log_evaluation
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import train_test_split
def objective(trial: optuna.Trial):
df = pd.read_csv("train-knn3.csv")
train_x, test_x, train_y, test_y = train_test_split(
df.drop(columns="song_popularity"), df["song_popularity"], test_size=0.2, stratify= df["song_popularity"], random_state=1
)
params = {
"metric": "auc",
"objective": "binary",
"reg_alpha": trial.suggest_float("reg_alpha", 1e-8, 10.0, log=True),
"reg_lambda": trial.suggest_float("reg_lambda", 1e-8, 10.0, log=True),
"n_estimators": trial.suggest_int("n_estimators", 1, 100),
"num_leaves": trial.suggest_int("num_leaves", 2, 256),
"feature_fraction": trial.suggest_float("feature_fraction", 0.4, 1.0),
"bagging_fraction": trial.suggest_float("bagging_fraction", 0.4, 1.0),
"min_child_samples": trial.suggest_int("min_child_samples", 5, 100),
}
dtrain = lgb.Dataset(train_x, label=train_y)
dval = lgb.Dataset(test_x, label=test_y)
model = lgb.train(
params,
dtrain,
valid_sets=[dtrain, dval],
callbacks=[early_stopping(100), log_evaluation(100)],
)
prediction = model.predict(test_x, num_iteration=model.best_iteration)
return roc_auc_score(test_y, prediction)
study = optuna.create_study()
study.optimize(objective, n_jobs=-1, n_trials=100)
print(study.best_params)
|
[
"tarun.bhavnani@gmail.com"
] |
tarun.bhavnani@gmail.com
|
f10cf0c87250140ac5312dc31eb6fb3097b38031
|
5983bf3f4cbd49e222a4448f6e738946c1012553
|
/aicall/apps/info/migrations/0002_auto_20210322_1329.py
|
eb91fe911bbbd8d6a9721e302847df2eb2ef5cea
|
[] |
no_license
|
RympeR/aciall
|
7fa88eaf6799444aef84dba1ce9974de858ddfd4
|
ca238234160d93a1058f725121e1f3fbe71be33c
|
refs/heads/master
| 2023-04-04T14:45:39.390609
| 2021-04-05T10:02:00
| 2021-04-05T10:02:00
| 354,790,599
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 474
|
py
|
# Generated by Django 3.1.7 on 2021-03-22 11:29
from django.db import migrations
import imagekit.models.fields
class Migration(migrations.Migration):
dependencies = [
('info', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='talkthemes',
name='image_svg',
field=imagekit.models.fields.ProcessedImageField(blank=True, null=True, upload_to='', verbose_name='ImageSVG'),
),
]
|
[
"georg.rashkov@gmail.com"
] |
georg.rashkov@gmail.com
|
cd70acfcb63726d43d38f161933d4473e020bcb4
|
416ea1127f3e3a1a8e64dd980e59c7bf585379a0
|
/read_favorite_number.py
|
13e6938ce6b8bf90c1ab89faddd39721b21296a8
|
[] |
no_license
|
jocogum10/learning_python_crash_course
|
6cf826e4324f91a49da579fb1fcd3ca623c20306
|
c159d0b0de0be8e95eb8777a416e5010fbb9e2ca
|
refs/heads/master
| 2020-12-10T02:55:40.757363
| 2020-01-13T01:22:44
| 2020-01-13T01:22:44
| 233,486,206
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 180
|
py
|
import json
filename = 'favorite_number.json'
with open(filename) as file_object:
message = json.load(file_object)
print("I know your favorite number! It's " + message + "!")
|
[
"jocogum10@gmail.com"
] |
jocogum10@gmail.com
|
3734e13259f4b245375820776dc260e0f60a01d5
|
2455062787d67535da8be051ac5e361a097cf66f
|
/Producers/BSUB/TrigProd_amumu_a5_dR5/trigger_amumu_producer_cfg_TrigProd_amumu_a5_dR5_129.py
|
7569fce71cd2fd69603dc29210d1978acc440b9c
|
[] |
no_license
|
kmtos/BBA-RecoLevel
|
6e153c08d5ef579a42800f6c11995ee55eb54846
|
367adaa745fbdb43e875e5ce837c613d288738ab
|
refs/heads/master
| 2021-01-10T08:33:45.509687
| 2015-12-04T09:20:14
| 2015-12-04T09:20:14
| 43,355,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,360
|
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("PAT")
#process.load("BBA/Analyzer/bbaanalyzer_cfi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load('Configuration.EventContent.EventContent_cff')
process.load("Configuration.Geometry.GeometryRecoDB_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff")
process.load("PhysicsTools.PatAlgos.selectionLayer1.selectedPatCandidates_cff")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '')
process.load("Configuration.StandardSequences.MagneticField_cff")
####################
# Message Logger
####################
process.MessageLogger.cerr.FwkReport.reportEvery = cms.untracked.int32(100)
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
## switch to uncheduled mode
process.options.allowUnscheduled = cms.untracked.bool(True)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(500)
)
####################
# Input File List
####################
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('root://eoscms//eos/cms/store/user/ktos/RECO_Step3_amumu_a5/RECO_Step3_amumu_a5_129.root'),
secondaryFileNames = cms.untracked.vstring()
)
############################################################
# Defining matching in DeltaR, sorting by best DeltaR
############################################################
process.mOniaTrigMatch = cms.EDProducer("PATTriggerMatcherDRLessByR",
src = cms.InputTag( 'slimmedMuons' ),
matched = cms.InputTag( 'patTrigger' ), # selections of trigger objects
matchedCuts = cms.string( 'type( "TriggerMuon" ) && path( "HLT_Mu16_TkMu0_dEta18_Onia*")' ), # input does not yet have the 'saveTags' parameter in HLT
maxDPtRel = cms.double( 0.5 ), # no effect here
maxDeltaR = cms.double( 0.3 ), #### selection of matches
maxDeltaEta = cms.double( 0.2 ), # no effect here
resolveAmbiguities = cms.bool( True ),# definition of matcher output
resolveByMatchQuality = cms.bool( True )# definition of matcher output
)
# talk to output module
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string("file:RECO_Step3_amumu_a5_TrigProd_129.root"),
outputCommands = process.MINIAODSIMEventContent.outputCommands
)
process.out.outputCommands += [ 'drop *_*_*_*',
'keep *_*slimmed*_*_*',
'keep *_pfTausEI_*_*',
'keep *_hpsPFTauProducer_*_*',
'keep *_hltTriggerSummaryAOD_*_*',
'keep *_TriggerResults_*_HLT',
'keep *_patTrigger*_*_*',
'keep *_prunedGenParticles_*_*',
'keep *_mOniaTrigMatch_*_*'
]
################################################################################
# Running the matching and setting the the trigger on
################################################################################
from PhysicsTools.PatAlgos.tools.trigTools import *
switchOnTrigger( process ) # This is optional and can be omitted.
switchOnTriggerMatching( process, triggerMatchers = [ 'mOniaTrigMatch'
])
process.outpath = cms.EndPath(process.out)
|
[
"kmtos@ucdavis.edu"
] |
kmtos@ucdavis.edu
|
50b38f5aa112634f69a1964367b345d28107fa78
|
62153e297ca84bf9d76eef56b28408f5337113f9
|
/tasks/migrations/0005_announcements_picture.py
|
470373d027006370d1516280f097642bee71c5a1
|
[] |
no_license
|
zarif007/HRTaskManager
|
22b72c80d2cac99fa9d3f7f0cfd480cb832ff910
|
4c7e7f04b82f138a7177f659bb347c7e189c6220
|
refs/heads/main
| 2023-06-23T22:05:33.812024
| 2021-07-31T19:55:11
| 2021-07-31T19:55:11
| 373,304,992
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 408
|
py
|
# Generated by Django 3.2.4 on 2021-07-26 12:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0004_announcements'),
]
operations = [
migrations.AddField(
model_name='announcements',
name='picture',
field=models.ImageField(blank=True, null=True, upload_to=''),
),
]
|
[
"zarifhuq786@gmail.com"
] |
zarifhuq786@gmail.com
|
fd0816dae9157631a8d5823e89d9650f7806a979
|
75ce5b7fee397fe4e67ed15a58f4cd42e0f8de9f
|
/PythonMasterclass/HelloWorld/Strings.py
|
3b45aebdb6a60836687406b37ebae58981f463c5
|
[] |
no_license
|
lukbast/stuff
|
7fd03b7e035394802c307682a25621dfd667960b
|
160e1d77d1b592fac099b9c7139fb4e2f7f8dbbe
|
refs/heads/main
| 2023-08-06T21:39:55.334812
| 2021-09-23T17:37:47
| 2021-09-23T17:37:47
| 409,684,114
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,083
|
py
|
greeting = 'Hello'
name = 'Bruce'
# print(greeting+' ' + name)
# sample comment
# name2 = input('Please enter your name')
# print(greeting + ' ' + name2)
# splitString = 'This string has been\nsplit\nover\nseveral\nlines'
# print(splitString)
tabbedStrings = '1\t2\t3\t4\t5'
print(tabbedStrings)
print('The pet shop owner said "No, no, \'e \'s uh,...he\'s resting".')
# or
print("The pet shop owner said \"No, no, 'e 's uh,...he's resting\".")
# or
print('''The pet shop owner said "Oh, no, 'e 's uh,...he's resting".''')
anotherSplitString = '''This string has been
split over
several
lines'''
# print(anotherSplitString)
# parrot = 'Norwegian blue'
# print(parrot[3])
# print(parrot[len(parrot)-1])
# print()
# print(parrot[3])
# print(parrot[6])
# print(parrot[8])
#
# print()
#
# print(parrot[-11])
# print(parrot[-1])
# print()
# print(parrot[-11])
# print(parrot[-8])
# print(parrot[-6])
#
# print(parrot[10:len(parrot)])
num = 666
wordd = f'fbfbff {num} bbfgbfbfgbg ngngngng'
word = "ssddsdsvs {0} fnfgfngfn {1:} fnfggff {2:.3f}".format(1, 2+2, 11/7)
print(wordd)
|
[
"jaimperator99@gmail.com"
] |
jaimperator99@gmail.com
|
8380622bbde43b92e00ac4f96152d1afa7c46f30
|
bb4241ec40d0f3bc7484957a3aad2c7921f3ab5f
|
/src/tracewhack/log.py
|
c21fc7cf7481ebc5c882c0be2775dba2f2ae8ccc
|
[
"BSD-3-Clause"
] |
permissive
|
wingu/tracewhack
|
a17b7e54cbe7cc74cc99511cdf490cd2f12e4184
|
a324705c23ddd8921ed829152f07fa9ff758de0f
|
refs/heads/master
| 2020-06-04T17:25:08.534182
| 2013-01-28T15:29:20
| 2013-01-28T15:29:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 354
|
py
|
"""
Logging utilities.
"""
# LATER Right now these are just dumb placeholders.
def warn(msg):
"""
Log a warning.
"""
print msg
def verbose(msg, options):
"""
Log only if verbose mode
"""
if options and options.get('verbose', False):
print msg
def error(msg):
"""
Log an error.
"""
print msg
|
[
"tomheon@gmail.com"
] |
tomheon@gmail.com
|
4d3e87116d6556c0d297a5c799aedc741f817923
|
1854841ff9de3391f1c858fcb9f5ccd7dc5488eb
|
/backend/aidin_27554/wsgi.py
|
989a9b46785cae5d98a29e68726486205737a9bf
|
[] |
no_license
|
crowdbotics-apps/aidin-27554
|
84f8ddd4e63ebfa233fc8d4a2b617af399371b50
|
fd1e1768ee18919395a1740c70bbcd337360174e
|
refs/heads/master
| 2023-05-05T15:50:29.989821
| 2021-05-29T14:26:12
| 2021-05-29T14:26:12
| 371,992,096
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
py
|
"""
WSGI config for aidin_27554 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'aidin_27554.settings')
application = get_wsgi_application()
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
862e50035f6c13d3f48ac50c598fab378f1734e0
|
9edaf93c833ba90ae9a903aa3c44c407a7e55198
|
/netex/models/timing_point_in_journey_pattern.py
|
22366d74aaa3b679bdbaf7bc59fc9c56bf9ca822
|
[] |
no_license
|
tefra/xsdata-samples
|
c50aab4828b8c7c4448dbdab9c67d1ebc519e292
|
ef027fe02e6a075d8ed676c86a80e9647d944571
|
refs/heads/main
| 2023-08-14T10:31:12.152696
| 2023-07-25T18:01:22
| 2023-07-25T18:01:22
| 222,543,692
| 6
| 1
| null | 2023-06-25T07:21:04
| 2019-11-18T21:00:37
|
Python
|
UTF-8
|
Python
| false
| false
| 373
|
py
|
from dataclasses import dataclass
from .timing_point_in_journey_pattern_versioned_child_structure import TimingPointInJourneyPatternVersionedChildStructure
__NAMESPACE__ = "http://www.netex.org.uk/netex"
@dataclass
class TimingPointInJourneyPattern(TimingPointInJourneyPatternVersionedChildStructure):
class Meta:
namespace = "http://www.netex.org.uk/netex"
|
[
"chris@komposta.net"
] |
chris@komposta.net
|
2308fad2814b489f253f4b87ca63706bb82054c9
|
dfbd3e12a7a7ed28c13715b2fa0c964d0745c8cb
|
/python/day04/solve.py
|
a0ee63dd7b81c99836e716257b0c5ef859f13a1f
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
ijanos/advent2017
|
3a90c479bf4f1689264576fb2c4468883458b911
|
db7ba6c3f2abbe206e47f25480c24d2bade709bb
|
refs/heads/master
| 2021-08-31T23:20:35.637440
| 2017-12-23T12:09:55
| 2017-12-23T12:09:55
| 112,766,905
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 324
|
py
|
#!/usr/bin/env python3
import fileinput
p1 = 0
p2 = 0
for line in fileinput.input():
line = line.strip().split()
if len(set(line)) == len(line):
p1 += 1
line = [''.join(sorted(word)) for word in line]
if len(set(line)) == len(line):
p2 += 1
print(f"Part 1: {p1}")
print(f"Part 2: {p2}")
|
[
"ijanos@gmail.com"
] |
ijanos@gmail.com
|
5b891f3d735e0b91211908f1f7706d84e34478f9
|
88906fbe13de27413a51da917ebe46b473bec1b9
|
/Part-I/Chapter 6 - Dictionaries/favourite_languages.py
|
6c94cb510f855c612346614864fa12fd8c159746
|
[] |
no_license
|
lonewolfcub/Python-Crash-Course
|
0b127e40f5029d84ad036263fd9153f6c88c2420
|
322388dfb81f3335eeffabcdfb8f9c5a1db737a4
|
refs/heads/master
| 2021-01-01T16:45:50.617189
| 2017-10-27T14:23:58
| 2017-10-27T14:23:58
| 97,911,584
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 235
|
py
|
# A dictionary of similar objects
favourite_languages = {
'jen': 'python',
'sarah': 'c',
'edward': 'ruby',
'phil': 'python',
}
print("Sarah's favourite language is " +
favourite_languages['sarah'].title() + ".")
|
[
"lonewolfcub020@gmail.com"
] |
lonewolfcub020@gmail.com
|
9896a8d751a0cc983347cfab70f1606a7af16e7d
|
fa2fdfcf180507344be8de71da75af2fe72101b2
|
/train_and_run_experiments_bc.py
|
771396e09f5607ebe7f7628cd776c4f2b3cc08e2
|
[] |
no_license
|
sarahwie/AttentionExplanation
|
48b49c1769324fe40015b8a96f862753f559e329
|
919fe5c710be5d1721ef1803cd46c731e1953088
|
refs/heads/master
| 2020-05-16T12:46:31.184833
| 2019-04-23T16:39:04
| 2019-04-23T16:39:04
| 183,055,153
| 1
| 0
| null | 2019-04-23T16:31:19
| 2019-04-23T16:31:19
| null |
UTF-8
|
Python
| false
| false
| 1,163
|
py
|
import argparse
parser = argparse.ArgumentParser(description='Run experiments on a dataset')
parser.add_argument('--dataset', type=str, required=True)
parser.add_argument("--data_dir", type=str, required=True)
parser.add_argument("--output_dir", type=str)
parser.add_argument('--encoder', type=str, choices=['cnn', 'lstm', 'average', 'all'], required=True)
parser.add_argument('--attention', type=str, choices=['tanh', 'dot'], required=True)
args, extras = parser.parse_known_args()
args.extras = extras
from Transparency.Trainers.DatasetBC import *
from Transparency.ExperimentsBC import *
dataset = datasets[args.dataset](args)
if args.output_dir is not None :
dataset.output_dir = args.output_dir
encoders = ['cnn', 'lstm', 'average'] if args.encoder == 'all' else [args.encoder]
if args.attention == 'tanh' :
train_dataset_on_encoders(dataset, encoders)
generate_graphs_on_encoders(dataset, encoders)
elif args.attention == 'dot' :
encoders = [e + '_dot' for e in encoders]
train_dataset_on_encoders(dataset, encoders)
generate_graphs_on_encoders(dataset, encoders)
else :
raise LookupError("Attention not found ...")
|
[
"successar@gmail.com"
] |
successar@gmail.com
|
2ca58914081b89507b7e4b2db63b231eb16c13dc
|
5a113e0758da14ccf3e7f4b6b0eb3abddd4adf39
|
/tests/test_models/test_user.py
|
abb3c1ef9d5527f2de5cbc1764f4e25d0ee323bd
|
[] |
no_license
|
Esteban1891/AirBnB_clone
|
22a64c45d1e0c997c842ae907ea216ab662639fd
|
5860cf7ae43afe6e2fee96be60fcfb0b67d1d2fc
|
refs/heads/master
| 2022-11-30T06:41:54.718592
| 2020-08-13T22:58:50
| 2020-08-13T22:58:50
| 275,320,501
| 5
| 7
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,734
|
py
|
#!/usr/bin/python3
"""Module for test User class"""
import unittest
import json
import pep8
import datetime
from models.user import User
from models.base_model import BaseModel
class TestUser(unittest.TestCase):
"""Test User class implementation"""
def test_doc_module(self):
"""Module documentation"""
doc = User.__doc__
self.assertGreater(len(doc), 1)
def test_pep8_conformance_base_model(self):
"""Test that models/user.py conforms to PEP8."""
pep8style = pep8.StyleGuide(quiet=True)
result = pep8style.check_files(['models/user.py'])
self.assertEqual(result.total_errors, 0,
"Found code style errors (and warnings).")
def test_pep8_conformance_test_base_model(self):
"""Test that tests/test_models/test_user.py conforms to PEP8."""
pep8style = pep8.StyleGuide(quiet=True)
res = pep8style.check_files(['tests/test_models/test_user.py'])
self.assertEqual(res.total_errors, 0,
"Found code style errors (and warnings).")
def test_doc_constructor(self):
"""Constructor documentation"""
doc = User.__init__.__doc__
self.assertGreater(len(doc), 1)
def test_class(self):
"""Validate the types of the attributes an class"""
with self.subTest(msg='Inheritance'):
self.assertTrue(issubclass(User, BaseModel))
with self.subTest(msg='Attributes'):
self.assertIsInstance(User.email, str)
self.assertIsInstance(User.password, str)
self.assertIsInstance(User.first_name, str)
self.assertIsInstance(User.last_name, str)
if __name__ == '__main__':
unittest.main()
|
[
"esteban.delahoz15@gmail.com"
] |
esteban.delahoz15@gmail.com
|
21922a99ad639c61627562fe098fa13350d8bffa
|
dc54a813f0e5d3b1ea44b38e10f8e5f8ef4764d4
|
/sciwing/api/api.py
|
22b7a61170cfdaa45f4dcf65700bbe3d6c5ec570
|
[
"MIT"
] |
permissive
|
dragomirradev/sciwing
|
fc0a33b25d19ea0e11170e4930442eb0f8d05da4
|
b3f4e6831b2dadf20e3336821ca8d50db1248ee7
|
refs/heads/master
| 2022-04-18T14:03:31.275169
| 2020-04-13T04:48:44
| 2020-04-13T04:48:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 366
|
py
|
import sciwing.api.conf as config
from fastapi import FastAPI
from sciwing.api.routers import parscit
from sciwing.api.routers import citation_intent_clf
app = FastAPI()
@app.get("/")
def root():
return {"message": "Welcome To SciWING API"}
# add the routers to the main app
app.include_router(parscit.router)
app.include_router(citation_intent_clf.router)
|
[
"abhinav@comp.nus.edu.sg"
] |
abhinav@comp.nus.edu.sg
|
b2192129c1bf5e2df3ed5fee141a2932e0af8440
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/data/p3BR/R2/benchmark/startQiskit335.py
|
99346e8c0d944fc6842431c4a000ef35f61d00ed
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,145
|
py
|
# qubit number=3
# total number=66
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[2]) # number=38
prog.cz(input_qubit[0],input_qubit[2]) # number=39
prog.h(input_qubit[2]) # number=40
prog.h(input_qubit[2]) # number=59
prog.cz(input_qubit[0],input_qubit[2]) # number=60
prog.h(input_qubit[2]) # number=61
prog.h(input_qubit[2]) # number=42
prog.cz(input_qubit[0],input_qubit[2]) # number=43
prog.h(input_qubit[2]) # number=44
prog.h(input_qubit[2]) # number=48
prog.cz(input_qubit[0],input_qubit[2]) # number=49
prog.h(input_qubit[2]) # number=50
prog.h(input_qubit[2]) # number=63
prog.cz(input_qubit[0],input_qubit[2]) # number=64
prog.h(input_qubit[2]) # number=65
prog.x(input_qubit[2]) # number=55
prog.cx(input_qubit[0],input_qubit[2]) # number=56
prog.cx(input_qubit[0],input_qubit[2]) # number=47
prog.cx(input_qubit[0],input_qubit[2]) # number=37
prog.h(input_qubit[2]) # number=51
prog.cz(input_qubit[0],input_qubit[2]) # number=52
prog.h(input_qubit[2]) # number=53
prog.h(input_qubit[2]) # number=25
prog.cz(input_qubit[0],input_qubit[2]) # number=26
prog.h(input_qubit[2]) # number=27
prog.h(input_qubit[1]) # number=7
prog.cz(input_qubit[2],input_qubit[1]) # number=8
prog.rx(0.17592918860102857,input_qubit[2]) # number=34
prog.rx(-0.3989822670059037,input_qubit[1]) # number=30
prog.h(input_qubit[1]) # number=9
prog.h(input_qubit[1]) # number=18
prog.rx(2.3310617489636263,input_qubit[2]) # number=58
prog.cz(input_qubit[2],input_qubit[1]) # number=19
prog.h(input_qubit[1]) # number=20
prog.x(input_qubit[1]) # number=62
prog.y(input_qubit[1]) # number=14
prog.h(input_qubit[1]) # number=22
prog.cz(input_qubit[2],input_qubit[1]) # number=23
prog.rx(-0.9173450548482197,input_qubit[1]) # number=57
prog.h(input_qubit[1]) # number=24
prog.z(input_qubit[2]) # number=3
prog.z(input_qubit[1]) # number=41
prog.x(input_qubit[1]) # number=17
prog.y(input_qubit[2]) # number=5
prog.x(input_qubit[2]) # number=21
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit335.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('qasm_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
5da9e788a3db46e978e8273bd81283efdec746fe
|
b6b04c3bc6afe61e3c3128f552417091c451ba69
|
/flink-ml-python/pyflink/examples/ml/feature/elementwiseproduct_example.py
|
2dd8ffff654fb21c9023ca110ebb26cfa02623ee
|
[
"Apache-2.0"
] |
permissive
|
apache/flink-ml
|
d15365e1b89b82eb451b99af0050d66dff279f0c
|
5619c3b8591b220e78a0a792c1f940e06149c8f0
|
refs/heads/master
| 2023-08-31T04:08:10.287875
| 2023-08-24T06:40:12
| 2023-08-24T06:40:12
| 351,617,021
| 288
| 85
|
Apache-2.0
| 2023-09-07T08:03:42
| 2021-03-26T00:42:03
|
Java
|
UTF-8
|
Python
| false
| false
| 2,548
|
py
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Simple program that creates a ElementwiseProduct instance and uses it for feature
# engineering.
from pyflink.common import Types
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.ml.linalg import Vectors, DenseVectorTypeInfo
from pyflink.ml.feature.elementwiseproduct import ElementwiseProduct
from pyflink.table import StreamTableEnvironment
# create a new StreamExecutionEnvironment
env = StreamExecutionEnvironment.get_execution_environment()
# create a StreamTableEnvironment
t_env = StreamTableEnvironment.create(env)
# generate input data
input_data_table = t_env.from_data_stream(
env.from_collection([
(1, Vectors.dense(2.1, 3.1)),
(2, Vectors.dense(1.1, 3.3))
],
type_info=Types.ROW_NAMED(
['id', 'vec'],
[Types.INT(), DenseVectorTypeInfo()])))
# create an elementwise product object and initialize its parameters
elementwise_product = ElementwiseProduct() \
.set_input_col('vec') \
.set_output_col('output_vec') \
.set_scaling_vec(Vectors.dense(1.1, 1.1))
# use the elementwise product object for feature engineering
output = elementwise_product.transform(input_data_table)[0]
# extract and display the results
field_names = output.get_schema().get_field_names()
for result in t_env.to_data_stream(output).execute_and_collect():
input_value = result[field_names.index(elementwise_product.get_input_col())]
output_value = result[field_names.index(elementwise_product.get_output_col())]
print('Input Value: ' + str(input_value) + '\tOutput Value: ' + str(output_value))
|
[
"noreply@github.com"
] |
apache.noreply@github.com
|
95dcb275b4a638f2ba8f0094654be362d6d3ae3f
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/KoubeiCateringPosDishcateTransferModel.py
|
095ca8346512b61fba41d3997f22f02f7c9433ae
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 2,652
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiCateringPosDishcateTransferModel(object):
def __init__(self):
self._cate_id = None
self._cook_id = None
self._dish_ids = None
self._shop_id = None
@property
def cate_id(self):
return self._cate_id
@cate_id.setter
def cate_id(self, value):
self._cate_id = value
@property
def cook_id(self):
return self._cook_id
@cook_id.setter
def cook_id(self, value):
self._cook_id = value
@property
def dish_ids(self):
return self._dish_ids
@dish_ids.setter
def dish_ids(self, value):
if isinstance(value, list):
self._dish_ids = list()
for i in value:
self._dish_ids.append(i)
@property
def shop_id(self):
return self._shop_id
@shop_id.setter
def shop_id(self, value):
self._shop_id = value
def to_alipay_dict(self):
params = dict()
if self.cate_id:
if hasattr(self.cate_id, 'to_alipay_dict'):
params['cate_id'] = self.cate_id.to_alipay_dict()
else:
params['cate_id'] = self.cate_id
if self.cook_id:
if hasattr(self.cook_id, 'to_alipay_dict'):
params['cook_id'] = self.cook_id.to_alipay_dict()
else:
params['cook_id'] = self.cook_id
if self.dish_ids:
if isinstance(self.dish_ids, list):
for i in range(0, len(self.dish_ids)):
element = self.dish_ids[i]
if hasattr(element, 'to_alipay_dict'):
self.dish_ids[i] = element.to_alipay_dict()
if hasattr(self.dish_ids, 'to_alipay_dict'):
params['dish_ids'] = self.dish_ids.to_alipay_dict()
else:
params['dish_ids'] = self.dish_ids
if self.shop_id:
if hasattr(self.shop_id, 'to_alipay_dict'):
params['shop_id'] = self.shop_id.to_alipay_dict()
else:
params['shop_id'] = self.shop_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiCateringPosDishcateTransferModel()
if 'cate_id' in d:
o.cate_id = d['cate_id']
if 'cook_id' in d:
o.cook_id = d['cook_id']
if 'dish_ids' in d:
o.dish_ids = d['dish_ids']
if 'shop_id' in d:
o.shop_id = d['shop_id']
return o
|
[
"liuqun.lq@alibaba-inc.com"
] |
liuqun.lq@alibaba-inc.com
|
da2db0a6d1935e5fd45ba13f2ae2e27b96afb0b0
|
090324db0c04d8c30ad6688547cfea47858bf3af
|
/soko/perception/policy.py
|
bb1cb833033df50bbc170434ebf153d81461f29b
|
[] |
no_license
|
fidlej/sokobot
|
b82c4c36d73e224d0d0e1635021ca04485da589e
|
d3d04753a5043e6a22dafd132fa633d8bc66b9ea
|
refs/heads/master
| 2021-01-21T13:14:29.523501
| 2011-06-12T07:34:14
| 2011-06-12T07:34:14
| 32,650,745
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,752
|
py
|
import logging
from soko.solver.solver import Solver
from soko.perception import perceiving, saving
from libctw import factored, modeling
class PerceptSolver(Solver):
"""A sokoban solver.
It converts the seen states to a sequence of bits.
It then predicts the next action to take.
It is used to show the predicted paths.
"""
def solve(self, env):
"""Returns a rollout path for testing.
The path does not have to be a solution.
"""
policy = PerceptPolicy()
s = env.init()
num_steps = 100
path = []
for i in xrange(num_steps):
policy.add_history(env, s)
actions = env.get_actions(s)
a = policy.next_action(actions)
if a is None:
logging.warn("ending the path because of an invalid action")
return path
path.append(a)
s = env.predict(s, a)
return path
def _prepare_model(perceiver, num_remembered_steps):
num_action_bits = perceiver.get_num_action_bits()
return _get_trained_agent(perceiver.get_num_percept_bits(),
num_action_bits, num_remembered_steps)
class PerceptPolicy:
def __init__(self):
self.num_remembered_steps = 2
self.perceiver = perceiving.SokobanPerceiver()
self.agent_model = _prepare_model(self.perceiver,
self.num_remembered_steps)
def init_history(self, env, node):
self.agent_model.switch_history()
self._show_history(env, self.agent_model, node)
def _show_history(self, env, agent_model, node):
from soko.env.env import Action
sas = [node.s]
for i in xrange(self.num_remembered_steps):
if node.prev_node is None:
break
sas.insert(0, node.a)
sas.insert(0, node.prev_node.s)
node = node.prev_node
for item in sas:
if isinstance(item, Action):
bits = self.perceiver.encode_action(item)
else:
bits = self.perceiver.encode_state(env, item)
agent_model.see_added(bits)
def next_action(self, actions):
"""Returns a valid action or None.
"""
action_bits = _advance(self.agent_model,
self.perceiver.get_num_action_bits())
try:
action = self.perceiver.decode_action(action_bits)
except ValueError, e:
logging.warn("predicted invalid action_bits: %s", action_bits)
return None
if action.cmd not in [a.cmd for a in actions]:
logging.info("predicted impossible action: %s", action_bits)
return None
return action
def add_history(self, env, s):
percept = self.perceiver.encode_state(env, s)
self.agent_model.see_added(percept)
def _advance(model, num_bits):
return [_advance_bit(model) for i in xrange(num_bits)]
def _advance_bit(model):
one_p = model.predict_one()
assert 0 <= one_p <= 1.0, "invalid P: %s" % one_p
if one_p >= 0.5:
bit = 1
else:
bit = 0
model.see_added([bit])
return bit
def _get_trained_agent(num_percept_bits, num_action_bits,
num_remembered_steps):
train_seqs = saving.load_training_seqs()
#TEST: don't limit the number of used seqs
train_seqs = train_seqs[:15]
max_depth = (num_remembered_steps * (num_percept_bits + num_action_bits) +
num_action_bits)
agent_model = factored.create_model(max_depth=max_depth)
source_info = modeling.Interlaced(num_percept_bits, num_action_bits)
modeling.train_model(agent_model, train_seqs, bytes=False,
source_info=source_info)
return agent_model
|
[
"ivo@danihelka.net"
] |
ivo@danihelka.net
|
75f7278194a9195bc7423d26c1cab9ce1d07c3a7
|
f305f84ea6f721c2391300f0a60e21d2ce14f2a5
|
/11_动态规划/dp优化/kitamasa法.py
|
38f9a2c16abdaad119175126f6cd40ebaaf09584
|
[] |
no_license
|
981377660LMT/algorithm-study
|
f2ada3e6959338ae1bc21934a84f7314a8ecff82
|
7e79e26bb8f641868561b186e34c1127ed63c9e0
|
refs/heads/master
| 2023-09-01T18:26:16.525579
| 2023-09-01T12:21:58
| 2023-09-01T12:21:58
| 385,861,235
| 225
| 24
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,638
|
py
|
# 常系数线性递推
# https://tjkendev.github.io/procon-library/python/series/kitamasa.html
# !O(k^2logn) 求线性递推式的第n项 (比矩阵快速幂快一个k)
# 線形漸化式 dp[i+k] = c0*dp[i] + c1*dp[i+1] + ... + ci+k-1*dp[i+k-1] (i>=0) の第n項を求める
# C: 系数 c0,c1,...,ci+k-1
# A: dp[0]-dp[k-1] 初始值
# n: 第n项
from typing import List
MOD = int(1e9 + 7)
def kitamasa(C: List[int], A: List[int], n: int) -> int:
if n == 0:
return A[0]
assert len(C) == len(A)
k = len(C)
C0 = [0] * k
C1 = [0] * k
C0[1] = 1
def inc(k, C0, C1):
C1[0] = C0[k - 1] * C[0] % MOD
for i in range(k - 1):
C1[i + 1] = (C0[i] + C0[k - 1] * C[i + 1]) % MOD
def dbl(k, C0, C1):
D0 = [0] * k
D1 = [0] * k
D0[:] = C0[:]
for j in range(k):
C1[j] = C0[0] * C0[j] % MOD
for i in range(1, k):
inc(k, D0, D1)
for j in range(k):
C1[j] += C0[i] * D1[j] % MOD
D0, D1 = D1, D0
for i in range(k):
C1[i] %= MOD
p = n.bit_length() - 1
while p:
p -= 1
dbl(k, C0, C1)
C0, C1 = C1, C0
if (n >> p) & 1:
inc(k, C0, C1)
C0, C1 = C1, C0
res = 0
for i in range(k):
res = (res + C0[i] * A[i]) % MOD
return res
# 斐波那契
def fib(n: int) -> int:
"""0 1 1 2 3 5 8 13 21 34 55"""
return kitamasa([1, 1], [0, 1], n)
K, N = map(int, input().split())
print(kitamasa([1] * K, [1] * K, N - 1))
|
[
"lmt2818088@gmail.com"
] |
lmt2818088@gmail.com
|
307d7db79493210adf18a3116db90a72fbcf7642
|
1279908d488776ef1450492f0995e1bd48c99767
|
/.history/app_20210728170028.py
|
56fb63ebb031b38348b9cdfc6656f0a9ec0a72ab
|
[] |
no_license
|
tritchlin/sqlalchemy-challenge
|
249ed221daab1e148209904aa1544a924ce6a344
|
5d9288b516a1ab68bd6af16c98ca5c1170d3b927
|
refs/heads/main
| 2023-06-25T23:27:10.175847
| 2021-07-29T06:35:04
| 2021-07-29T06:35:04
| 388,950,398
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,024
|
py
|
from flask import Flask, jsonify
from flask_sqlalchemy import SQLAlchemy
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
# import climate_flask_data.py as querydata
app = Flask(__name__)
app.config['Hawaii']='sqlite:///hawaii.sqlite'
db=SQLAlchemy(app)
engine = create_engine("sqlite:///hawaii.sqlite")
Base = automap_base()
Base.prepare(engine, reflect=True)
measurement = base.classes.measurement
station = base.classes.station
# Create an app, being sure to pass __name__
# from climate_flask_data.py import base
# Define what to do when a user hits the index route
@app.route("/")
def welcome():
"""List all available api routes."""
return (
f"Available Routes:<br/>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/stations<br/>"
f"/api/v1.0/tobs<br/>"
f"/api/v1.0/<start><br/>"
f"/api/v1.0/<start>/<end>"
)
# Define what to do when a user hits the /about route
# @app.route("/api/v1.0/precipitation")
# def precipitation():
# return querydata.precipitation()
# # Define what to do when a user hits the /about route
# @app.route("/api/v1.0/stations")
# def about():
# print("Server received request for 'About' page...")
# return "Welcome to my 'About' page!"
# # Define what to do when a user hits the /about route
# @app.route("/api/v1.0/tobs")
# def about():
# print("Server received request for 'About' page...")
# return "Welcome to my 'About' page!"
# # Define what to do when a user hits the /about route
# @app.route("/api/v1.0/<start>")
# def about():
# print("Server received request for 'About' page...")
# return "Welcome to my 'About' page!"
# # Define what to do when a user hits the /about route
# @app.route("/api/v1.0/<start>/<end>")
# def about():
# print("Server received request for 'About' page...")
# return "Welcome to my 'About' page!"
# if __name__ == "__main__":
# app.run(debug=True)
|
[
"annylin@gmail.com"
] |
annylin@gmail.com
|
582acb3bcfdc0d636dfcd9571a7b4b463d749705
|
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
|
/Gauss_v45r10p1/Gen/DecFiles/options/23123011.py
|
c22ed6aed9a114cbc62b3916ee36b83d4b2a3b8e
|
[] |
no_license
|
Sally27/backup_cmtuser_full
|
34782102ed23c6335c48650a6eaa901137355d00
|
8924bebb935b96d438ce85b384cfc132d9af90f6
|
refs/heads/master
| 2020-05-21T09:27:04.370765
| 2018-12-12T14:41:07
| 2018-12-12T14:41:07
| 185,989,173
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 748
|
py
|
# file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/23123011.py generated: Wed, 25 Jan 2017 15:25:37
#
# Event Type: 23123011
#
# ASCII decay Descriptor: [D_s+ -> pi- e+ e+]cc
#
from Configurables import Generation
Generation().EventType = 23123011
Generation().SampleGenerationTool = "SignalPlain"
from Configurables import SignalPlain
Generation().addTool( SignalPlain )
Generation().SignalPlain.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Ds_pi-ee=DecProdCut.dec"
Generation().SignalPlain.CutTool = "DaughtersInLHCb"
Generation().SignalPlain.SignalPIDList = [ 431,-431 ]
|
[
"slavomirastefkova@b2pcx39016.desy.de"
] |
slavomirastefkova@b2pcx39016.desy.de
|
d0cfda9b9e6f2e6f19df057e89736ab28b36d573
|
c1edf63a93d0a6d914256e848904c374db050ae0
|
/Python/黑客攻防/破解/dictionary.py
|
909018a1cd432c05322b59a1d4b38474cb25f02d
|
[] |
no_license
|
clhiker/WPython
|
97b53dff7e5a2b480e1bf98d1b2bf2a1742cb1cd
|
b21cbfe9aa4356d0fe70d5a56c8b91d41f5588a1
|
refs/heads/master
| 2020-03-30T03:41:50.459769
| 2018-09-28T07:36:21
| 2018-09-28T07:36:21
| 150,703,520
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 286
|
py
|
import itertools as its
import time
def main():
a = time.time()
word = "abcdefghijklmnopqrstuvwxyz"
r = its.product(word, repeat=6)
dic = open("dictionary.txt", "a")
for i in r:
dic.write("".join(i))
b = time.time()
print(b-a)
dic.close()
main()
|
[
"1911618290@qq.com"
] |
1911618290@qq.com
|
9d1d0d94f750d498a91dd81d6d464c609ac9368c
|
eb19f68b76ab16375a096c06bf98cf920c8e7a0c
|
/src/tracking1.py
|
ab06f2fc4b87671ddc02529d29cd626c4a85187b
|
[] |
no_license
|
YerongLi/statistical-connectomes
|
a9869d918761b05bcd9980a0b4d36205673d582e
|
7519289c2f26314d88149e878125042021cea07d
|
refs/heads/master
| 2020-04-09T03:53:42.754095
| 2018-12-02T01:03:19
| 2018-12-02T01:03:19
| 160,001,525
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,242
|
py
|
"""
====================
Tracking Quick Start
====================
This example shows how to perform fast fiber tracking using DIPY_
[Garyfallidis12]_.
We will use Constrained Spherical Deconvolution (CSD) [Tournier07]_ for local
reconstruction and then generate deterministic streamlines using the fiber
directions (peaks) from CSD and fractional anisotropic (FA) from DTI as a
stopping criteria for the tracking.
Let's load the necessary modules.
"""
from os.path import join as pjoin
import numpy as np
from dipy.tracking.local import LocalTracking, ThresholdTissueClassifier
from dipy.tracking.utils import random_seeds_from_mask
from dipy.reconst.dti import TensorModel
from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel,
auto_response)
from dipy.direction import peaks_from_model
from dipy.data import fetch_stanford_hardi, read_stanford_hardi, get_sphere
from dipy.segment.mask import median_otsu
from dipy.viz import actor, window
from dipy.io.image import save_nifti
from nibabel.streamlines import save as save_trk
from nibabel.streamlines import Tractogram
from dipy.tracking.streamline import Streamlines
from utils import read_data
"""
Enables/disables interactive visualization
"""
interactive = False
"""
Load one of the available datasets with 150 gradients on the sphere and 10 b0s
"""
#fetch_stanford_hardi()
#img, gtab = read_stanford_hardi()
id = 103818
folder = pjoin('/projects','ml75','data',str(id))
img, gtab = read_data(folder)
data = img.get_data()
print(gtab)
"""
Create a brain mask. This dataset is a bit difficult to segment with the
default ``median_otsu`` parameters (see :ref:`example_brain_extraction_dwi`)
therefore we use here more advanced options.
"""
maskdata, mask = median_otsu(data, 3, 1, False,
vol_idx=range(10, 50), dilate=2)
"""
For the Constrained Spherical Deconvolution we need to estimate the response
function (see :ref:`example_reconst_csd`) and create a model.
"""
response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)
csd_model = ConstrainedSphericalDeconvModel(gtab, response)
"""
Next, we use ``peaks_from_model`` to fit the data and calculated the fiber
directions in all voxels.
"""
sphere = get_sphere('symmetric724')
csd_peaks = peaks_from_model(model=csd_model,
data=data,
sphere=sphere,
mask=mask,
relative_peak_threshold=.5,
min_separation_angle=25,
parallel=True)
"""
For the tracking part, we will use the fiber directions from the ``csd_model``
but stop tracking in areas where fractional anisotropy is low (< 0.1).
To derive the FA, used here as a stopping criterion, we would need to fit a
tensor model first. Here, we fit the tensor using weighted least squares (WLS).
"""
tensor_model = TensorModel(gtab, fit_method='WLS')
tensor_fit = tensor_model.fit(data, mask)
fa = tensor_fit.fa
"""
In this simple example we can use FA to stop tracking. Here we stop tracking
when FA < 0.1.
"""
tissue_classifier = ThresholdTissueClassifier(fa, 0.1)
"""
Now, we need to set starting points for propagating each track. We call those
seeds. Using ``random_seeds_from_mask`` we can select a specific number of
seeds (``seeds_count``) in each voxel where the mask ``fa > 0.3`` is true.
"""
seeds = random_seeds_from_mask(fa > 0.3, seeds_count=1)
"""
For quality assurance we can also visualize a slice from the direction field
which we will use as the basis to perform the tracking.
"""
'''
ren = window.Renderer()
ren.add(actor.peak_slicer(csd_peaks.peak_dirs,
csd_peaks.peak_values,
colors=None))
if interactive:
window.show(ren, size=(900, 900))
else:
window.record(ren, out_path='csd_direction_field.png', size=(900, 900))
'''
"""
.. figure:: csd_direction_field.png
:align: center
**Direction Field (peaks)**
``EuDX`` [Garyfallidis12]_ is a fast algorithm that we use here to generate
streamlines. This algorithm is what is used here and the default option
when providing the output of peaks directly in LocalTracking.
"""
streamline_generator = LocalTracking(csd_peaks, tissue_classifier,
seeds, affine=np.eye(4),
step_size=0.5)
streamlines = Streamlines(streamline_generator)
"""
The total number of streamlines is shown below.
"""
print(len(streamlines))
"""
To increase the number of streamlines you can change the parameter
``seeds_count`` in ``random_seeds_from_mask``.
We can visualize the streamlines using ``actor.line`` or ``actor.streamtube``.
"""
'''
ren.clear()
ren.add(actor.line(streamlines))
if interactive:
window.show(ren, size=(900, 900))
else:
print('Saving illustration as det_streamlines.png')
window.record(ren, out_path='det_streamlines.png', size=(900, 900))
'''
"""
.. figure:: det_streamlines.png
:align: center
**Deterministic streamlines using EuDX (new framework)**
To learn more about this process you could start playing with the number of
seed points or, even better, specify seeds to be in specific regions of interest
in the brain.
Save the resulting streamlines in a Trackvis (.trk) format and FA as
Nifti (.nii.gz).
"""
save_trk(Tractogram(streamlines, affine_to_rasmm=img.affine),
'det_streamlines.trk')
save_nifti('fa_map.nii.gz', fa, img.affine)
"""
In Windows if you get a runtime error about frozen executable please start
your script by adding your code above in a ``main`` function and use::
if __name__ == '__main__':
import multiprocessing
multiprocessing.freeze_support()
main()
References
----------
.. [Garyfallidis12] Garyfallidis E., "Towards an accurate brain tractography",
PhD thesis, University of Cambridge, 2012.
.. [Tournier07] J-D. Tournier, F. Calamante and A. Connelly, "Robust
determination of the fibre orientation distribution in diffusion MRI:
Non-negativity constrained super-resolved spherical deconvolution",
Neuroimage, vol. 35, no. 4, pp. 1459-1472, 2007.
.. include:: ../links_names.inc
"""
|
[
"yerong.li@outlook.com"
] |
yerong.li@outlook.com
|
549fcc281ee7b1ff3519de8b8882f35c1e72e4de
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_animations.py
|
02af88c871ee685813138b37592a71c92dd2f001
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 234
|
py
|
#calss header
class _ANIMATIONS():
def __init__(self,):
self.name = "ANIMATIONS"
self.definitions = animation
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['animation']
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
1f0f22bcce72ff8ae6781b02b4e85005590893ab
|
15592893bd1871bfeb1cdb4741523894cf32cf67
|
/python_fundamentals/bubblesort.py
|
accec854cbab91b213adf4927d6494e188eeb934
|
[] |
no_license
|
philmccormick23/Learning-Python
|
b07758d2bb310e617991a13230b257a71c3c2510
|
5a06c5155941816ce3e61d262ae5779ae2899196
|
refs/heads/master
| 2020-04-08T08:50:03.749946
| 2018-11-26T16:01:27
| 2018-11-26T16:01:27
| 159,195,744
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 237
|
py
|
arr=[8,3,5,1,2,0]
def bubbleSort(arr):
for j in range(len(arr)-1):
for i in range(len(arr)-1-j):
if(arr[i]>arr[i+1]):
arr[i],arr[i+1]=arr[i+1],arr[i]
return arr
print(bubbleSort([8,3,5,1,2,0]))
|
[
"phillipmccormick@Phillips-MacBook-Pro.local"
] |
phillipmccormick@Phillips-MacBook-Pro.local
|
71784b9871d44330a0d1df8c0e7409643afef6bf
|
4436277af74df812490a42f33deccfcf218e25f8
|
/backend/wallet/migrations/0001_initial.py
|
f2c922540416302dee4c799fa0921b7c911673d0
|
[] |
no_license
|
crowdbotics-apps/lunchbox-25105
|
308c49dcc77383ee8f11b25207f4b94e452f618e
|
21de4ca0cbad83a09ec5e28a99ccc3dc1fc3dbeb
|
refs/heads/master
| 2023-03-15T09:50:00.100961
| 2021-03-18T18:53:30
| 2021-03-18T18:53:30
| 349,182,205
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,258
|
py
|
# Generated by Django 2.2.19 on 2021-03-18 18:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
("task_profile", "0001_initial"),
("task", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="CustomerWallet",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("balance", models.FloatField()),
("expiration_date", models.DateTimeField()),
("last_transaction", models.DateTimeField()),
(
"customer",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="customerwallet_customer",
to="task_profile.CustomerProfile",
),
),
],
),
migrations.CreateModel(
name="PaymentMethod",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("account_token", models.CharField(max_length=255)),
("payment_account", models.CharField(max_length=10)),
("timestamp_created", models.DateTimeField(auto_now_add=True)),
(
"wallet",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="paymentmethod_wallet",
to="wallet.CustomerWallet",
),
),
],
),
migrations.CreateModel(
name="TaskerWallet",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("balance", models.FloatField(max_length=254)),
("expiration_date", models.DateTimeField()),
("last_transaction", models.DateTimeField()),
(
"tasker",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="taskerwallet_tasker",
to="task_profile.TaskerProfile",
),
),
],
),
migrations.CreateModel(
name="TaskerPaymentAccount",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("account_token", models.CharField(max_length=255)),
("payment_account", models.CharField(max_length=10)),
("timestamp_created", models.DateTimeField(auto_now_add=True)),
(
"wallet",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="taskerpaymentaccount_wallet",
to="wallet.TaskerWallet",
),
),
],
),
migrations.CreateModel(
name="PaymentTransaction",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("price", models.FloatField()),
("tip", models.FloatField()),
("tracking_id", models.CharField(max_length=50)),
("timestamp_created", models.DateTimeField(auto_now_add=True)),
(
"customer",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="paymenttransaction_customer",
to="task_profile.CustomerProfile",
),
),
(
"payment_method",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="paymenttransaction_payment_method",
to="wallet.PaymentMethod",
),
),
(
"tasker",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="paymenttransaction_tasker",
to="task_profile.TaskerProfile",
),
),
(
"transaction",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="paymenttransaction_transaction",
to="task.TaskTransaction",
),
),
],
),
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
575352ef768eea3f97b304e28386e9a5188da6ef
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02258/s967893517.py
|
fef386565550a9bfb1823e41855b7904dda27051
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 757
|
py
|
n = int(input())
R = []
for i in range(n):
R.append(int(input()))
kouho = set()
diff_max = None
for (i, rt) in enumerate(R):
if i == 0:
rt_min = rt
else:
if rt < rt_min and not (diff_max is None):
kouho.add(diff_max)
diff_max = None
rt_min = rt
elif rt < rt_min and diff_max is None:
rt_min = rt
elif rt >= rt_min:
if diff_max is None:
diff_max = rt - rt_min
else:
diff_max = max(diff_max, rt - rt_min)
if not (diff_max is None):
kouho.add(diff_max)
# print(kouho)
if kouho != set():
print(max(kouho))
else:
diff_tonari = {R[i + 1] - R[i] for i in range(n - 1)}
print(max(diff_tonari))
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
f79ea22c3b37f1ac9e6301b576168361eecb66b3
|
7bead245354e233f76fff4608938bf956abb84cf
|
/test/test_page_conversion_result.py
|
79bca57ea61ab579fcc7be32e07b455a76551754
|
[
"Apache-2.0"
] |
permissive
|
Cloudmersive/Cloudmersive.APIClient.Python.Convert
|
5ba499937b9664f37cb2700509a4ba93952e9d6c
|
dba2fe7257229ebdacd266531b3724552c651009
|
refs/heads/master
| 2021-10-28T23:12:42.698951
| 2021-10-18T03:44:49
| 2021-10-18T03:44:49
| 138,449,321
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,011
|
py
|
# coding: utf-8
"""
convertapi
Convert API lets you effortlessly convert file formats and types. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import cloudmersive_convert_api_client
from cloudmersive_convert_api_client.models.page_conversion_result import PageConversionResult # noqa: E501
from cloudmersive_convert_api_client.rest import ApiException
class TestPageConversionResult(unittest.TestCase):
"""PageConversionResult unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPageConversionResult(self):
"""Test PageConversionResult"""
# FIXME: construct object with mandatory attributes with example values
# model = cloudmersive_convert_api_client.models.page_conversion_result.PageConversionResult() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"35204726+Cloudmersive@users.noreply.github.com"
] |
35204726+Cloudmersive@users.noreply.github.com
|
bc7ad62bc0617a78f8aefb15b880c4de8926bd23
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/171/usersdata/269/82030/submittedfiles/decimal2bin.py
|
685e2ff066c0cea09273c3191d603c291bc3306a
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 245
|
py
|
# -*- coding: utf-8 -*-
binario=int(input('digite the fucking binario: '))
a=0
b=binario
while binario>0:
binario=binario//10
a=a+1
c=1
d=0
for i in range(0,a,1):
decimal=b//10**c
d=decimal*(2**i)+d
c=c+1
print(d)
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
8eea73a4817b583b59e9ae72e614c0630731fafb
|
dcddc234eea906c63553f6495e182e44f3e8431d
|
/forum/migrations/0001_initial.py
|
5ec4c7773023cc10c07b7d6d003e4cc6ea318831
|
[
"MIT"
] |
permissive
|
Kromey/akwriters
|
53e648e1cc4c0970c843c9b426d0e7de21c9eabb
|
72812b5f7dca3ad21e6e9d082298872b7fa607b9
|
refs/heads/master
| 2022-03-08T00:57:42.232126
| 2021-07-21T15:23:33
| 2021-07-21T15:23:33
| 28,463,802
| 0
| 0
|
MIT
| 2022-02-10T08:06:49
| 2014-12-24T22:41:56
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 2,238
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-20 01:11
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Board',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=25)),
('slug', models.SlugField(blank=True, max_length=25)),
('description', models.CharField(max_length=255)),
('parent', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='forum.Board')),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject', models.CharField(max_length=128)),
('body', models.TextField()),
('left', models.PositiveIntegerField(default=0)),
('right', models.PositiveIntegerField(default=0)),
],
options={
'ordering': ('left',),
},
),
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('board', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='forum.Board')),
],
),
migrations.AddField(
model_name='post',
name='topic',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='forum.Topic'),
),
migrations.AddField(
model_name='post',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"travisvz@gmail.com"
] |
travisvz@gmail.com
|
daa206411acf7dd63ef2ac0a7f67334f0de62493
|
6146d080087b21e36347408eea76598f4691ed67
|
/code/1112/2383.py
|
d2e70847a9ef9a11958d0d8c95a94edf7d85889f
|
[] |
no_license
|
banggeut01/algorithm
|
682c4c6e90179b8100f0272bf559dbeb1bea5a1d
|
503b727134909f46e518c65f9a9aa58479a927e9
|
refs/heads/master
| 2020-06-27T14:07:51.927565
| 2019-12-19T03:48:30
| 2019-12-19T03:48:30
| 199,800,363
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,576
|
py
|
# 2383.py [모의 SW 역량테스트] 점심 식사시간
import sys
sys.stdin = open('2383input.txt', 'r')
def getTime(t, l): # t: 사람-입구 거리 배열, l: 계단길이
for idx in range(len(t)):
if idx < 3:
t[idx] += l
else:
if t[idx - 3] > t[idx]:
t[idx] = t[idx - 3] + l
else:
t[idx] += l
if t: return t[-1]
else: return 0
T = int(input())
for tc in range(1, T + 1):
N = int(input())
board = [list(map(int, input().split())) for _ in range(N)]
P, S = [], [] # P: 사람 좌표(x, y), S: 계단좌표&길이(x, y, len)
MIN = 0xffffff
for i in range(N):
for j in range(N):
if board[i][j] == 1:
P.append((i, j))
elif board[i][j] > 1:
S.append((i, j, board[i][j]))
dct = [dict() for _ in range(2)]
for x in range(len(S)): # 한 계단에 대해
sr, sc, tmp = S[x]
for y in range(len(P)): # 사람-계단 거리를 구함
pr, pc = P[y]
d = abs(sr - pr) + abs(sc - pc)
dct[x][(pr, pc)] = d
for i in range(1 << len(P)):
time0, time1 = [], []
for j in range(len(P)):
if i & 1 << j:
time0.append(dct[0][P[j]] + 1)
else:
time1.append(dct[1][P[j]] + 1)
time0 = sorted(time0)
time1 = sorted(time1)
t0 = getTime(time0, S[0][2])
t1 = getTime(time1, S[1][2])
MIN = min(MIN, max(t0, t1))
print('#{} {}'.format(tc, MIN))
|
[
"genie121110@gmail.com"
] |
genie121110@gmail.com
|
b186151473ccd843ebb0867eb5d9584dbb5d852d
|
6fcfb638fa725b6d21083ec54e3609fc1b287d9e
|
/python/yosinski_deep-visualization-toolbox/deep-visualization-toolbox-master/misc.py
|
ed1ee755e93c924c466045c1232ccace4c7d6ee6
|
[] |
no_license
|
LiuFang816/SALSTM_py_data
|
6db258e51858aeff14af38898fef715b46980ac1
|
d494b3041069d377d6a7a9c296a14334f2fa5acc
|
refs/heads/master
| 2022-12-25T06:39:52.222097
| 2019-12-12T08:49:07
| 2019-12-12T08:49:07
| 227,546,525
| 10
| 7
| null | 2022-12-19T02:53:01
| 2019-12-12T07:29:39
|
Python
|
UTF-8
|
Python
| false
| false
| 1,272
|
py
|
#! /usr/bin/env python
import os
import time
import errno
class WithTimer:
def __init__(self, title = '', quiet = False):
self.title = title
self.quiet = quiet
def elapsed(self):
return time.time() - self.wall, time.clock() - self.proc
def enter(self):
'''Manually trigger enter'''
self.__enter__()
def __enter__(self):
self.proc = time.clock()
self.wall = time.time()
return self
def __exit__(self, *args):
if not self.quiet:
titlestr = (' ' + self.title) if self.title else ''
print 'Elapsed%s: wall: %.06f, sys: %.06f' % ((titlestr,) + self.elapsed())
def mkdir_p(path):
# From https://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def combine_dicts(dicts_tuple):
'''Combines multiple dictionaries into one by adding a prefix to keys'''
ret = {}
for prefix,dictionary in dicts_tuple:
for key in dictionary.keys():
ret['%s%s' % (prefix, key)] = dictionary[key]
return ret
|
[
"659338505@qq.com"
] |
659338505@qq.com
|
67122d17e933488f9e88e64701632d1088a4001e
|
31c9cd96667166ac4af15ce8b48753167da3084d
|
/sorting/bubble_sort.py
|
1c21807ac98a7e18e5e4c1b990067d0c11664874
|
[] |
no_license
|
vitorponce/algorithms
|
a8e305e32e38bbb2f473dc07c0e93bdf6a10fde0
|
87d5f3e3110394d21844b7f3a17468e01a366e83
|
refs/heads/master
| 2022-06-17T04:04:18.757909
| 2020-05-04T03:03:09
| 2020-05-04T03:03:09
| 259,844,564
| 1
| 0
| null | 2020-05-04T03:04:54
| 2020-04-29T06:34:52
|
Python
|
UTF-8
|
Python
| false
| false
| 1,717
|
py
|
def bubble_sort(input_list):
"""
The smallest element bubbles to the correct position
by comparing adjacent elements.
For each iteration, every element is compared with its neighbor
and swapped if they arent in the right order.
Smallest elements 'bubble' to the beginning of the list.
At the end fo the first iteration, the smallest element is in the
right position, at the end of the second iteration, the second
smallest is in the right position and so on
Complexity: O(n^2) in the worst case
- in worst case (list is sorted in descending order)
"n" elements are checked and swapped for each selected
element to get to the correct position
Stable: Yes
- logical ordering will be maintained
Memory: O(1)
- sorts in place, original list re-used so no extra space
Adaptivity: YES
- if there were no swaps on an iteration, we know the list
is already sorted, and we can break out early
Number of comparisons and swaps:
- O(n^2) comparisons and O(n^2) swaps
- more swaps than selection sort!
Discussion:
- O(n^2) == bad
- advantage over selection sort: adaptivity
"""
for i in range(len(input_list)):
swapped = False
# again, i represents the last position in list that is sorted
for j in range(len(input_list) - 1, i, -1):
if input_list[j] < input_list[j-1]:
input_list[j-1], input_list[j] = input_list[j], input_list[j-1]
swapped = True
# if no swaps, list is already in sorted state and we can break out
if not swapped:
break
return input_list
|
[
"johneshiver@gmail.com"
] |
johneshiver@gmail.com
|
71581e2aa28a19aa508f908fff09ae9da3e41017
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_connoisseur.py
|
f2ebfdee7349c5f0eb2d9d637ed4b4d9b3670125
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 419
|
py
|
#calss header
class _CONNOISSEUR():
def __init__(self,):
self.name = "CONNOISSEUR"
self.definitions = [u'a person who knows a lot about and enjoys one of the arts, or food, drink, etc. and can judge quality and skill in that subject: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
4e9f69d87e835061a181778d25e5810c1fdb12f4
|
dccf1fea8d62764b8c51259671f9b61d36196d41
|
/quiz/tests/test_views.py
|
3206b640c985d6489348d1011b7e9a68160df405
|
[
"MIT"
] |
permissive
|
Palombredun/django_quiz
|
e4594852c2709a9c6c58a96cc210f3f3dc1a282b
|
1565d251d54dfb54fdee83096b560876833275a2
|
refs/heads/master
| 2021-07-08T23:11:23.157677
| 2021-01-13T14:26:31
| 2021-01-13T14:26:31
| 224,863,683
| 0
| 0
| null | 2019-11-29T13:53:50
| 2019-11-29T13:53:50
| null |
UTF-8
|
Python
| false
| false
| 3,013
|
py
|
import datetime
import pytest
from pytest_django.asserts import assertTemplateUsed
from django.contrib.auth.models import User
from quiz.models import Category, SubCategory, Quiz, Statistic, Question, Grade
### FIXTURE ###
@pytest.fixture
def user_A(db):
return User.objects.create_user(
username="A", email="mail@mail.com", password="secret"
)
@pytest.fixture
def category_m(db):
return Category.objects.create(category="m")
@pytest.fixture
def sub_category_n(db, category_m):
return SubCategory.objects.create(category=category_m, sub_category="n")
@pytest.fixture
def quiz_q(db, category_m, sub_category_n, user_A):
date = datetime.datetime.now()
return Quiz.objects.create(
title="title",
description="Long description",
creator=user_A,
category=category_m,
category_name="m",
sub_category=sub_category_n,
created=date,
random_order=False,
difficulty=1,
url="title-1"
)
@pytest.fixture
def stats_s(db, quiz_q):
return Statistic.objects.create(
quiz=quiz_q,
number_participants=1,
mean=2,
easy=1,
medium=1,
difficult=1
)
@pytest.fixture
def grade_g(stats_s):
return Grade.objects.create(
statistics=stats_s,
grade=2,
number=1
)
### Tests page tutorial ###
def test_page_tutorial(client):
response = client.get("/quiz/tutorial/")
assert response.status_code == 200
### Tests page create ###
def test_access_page_create_unlogged(client):
response = client.get("/quiz/create/")
assert response.status_code == 302
def test_access_page_create_logged(client, user_A):
response = client.force_login(user_A)
response = client.get("/quiz/create/")
assert response.status_code == 200
### Test page load_sub_categories ###
def test_page_load_sub_categories(client, db):
response = client.get("quiz/ajax/load-subcategories/")
assert response.status_code == 200
### Test page quiz lists ###
def test_page_quiz_list(client, db):
response = client.get("/quiz/quiz-list/")
assert response.status_code == 200
def test_quiz_list_by_category(client, category_m):
response = client.get("/quiz/category/m/")
assert response.status_code == 200
def test_quiz_list_by_subcategory(client, sub_category_n):
response = client.get("/quiz/subcategory/n/")
assert response.status_code == 200
### Test page take ###
def test_take_quiz(client, quiz_q, user_A):
client.force_login(user_A)
url = "/quiz/take/" + quiz_q.url + "/"
response = client.get(url)
assert response.status_code == 200
### Test page statistics ###
def test_statistics(client, quiz_q, stats_s, user_A, grade_g):
q = Question.objects.create(
quiz=quiz_q,
difficulty=1
)
client.force_login(user_A)
url = "/quiz/statistics/" + quiz_q.url + "/"
response = client.get(url)
assert response.status_code == 200
|
[
"baptiste.name"
] |
baptiste.name
|
a3c84c720bb0bc8a3ec2921c600f975aaed6f1b8
|
20b4be7df5efeb8019356659c5d054f29f450aa1
|
/tools/indicators/build_indicators.py
|
16a8d1caeb8619580fb4836cc6c8c4cbb50269bb
|
[
"Apache-2.0"
] |
permissive
|
kumars99/TradzQAI
|
75c4138e30796573d67a5f08d9674c1488feb8e4
|
1551321642b6749d9cf26caf2e822051a105b1a5
|
refs/heads/master
| 2020-03-29T20:14:45.562143
| 2018-09-25T16:07:21
| 2018-09-25T16:07:21
| 150,302,554
| 1
| 0
| null | 2018-09-25T17:17:54
| 2018-09-25T17:17:54
| null |
UTF-8
|
Python
| false
| false
| 3,553
|
py
|
import pandas as pd
from tools.indicators.exponential_moving_average import exponential_moving_average as ema
from tools.indicators.volatility import volatility as vol
from tools.indicators.stochastic import percent_k as K
from tools.indicators.stochastic import percent_d as D
from tools.indicators.relative_strength_index import relative_strength_index as RSI
from tools.indicators.moving_average_convergence_divergence import moving_average_convergence_divergence as macd
from tools.indicators.bollinger_bands import bandwidth as bb
class Indicators():
def __init__(self, settings=None):
self.bb_period = 20
self.rsi_period = 14
self.sd_period = 0
self.sv_period = 0
self.stoch_period = 14
self.volatility_period = 20
self.macd_long = 24
self.macd_short = 12
self.ema_periods = [20, 50, 100]
self.settings = settings
self.build_func = None
self.names = []
def add_building(self, settings=None):
if settings:
self.settings = settings
if self.settings:
self.build_func = []
for key, value in self.settings.items():
if not value:
continue
elif "RSI" == key and value:
self.names.append('RSI')
if 'default' != value:
self.rsi_period = value
self.build_func.append([RSI, 'RSI', self.rsi_period])
elif "MACD" == key and value:
self.names.append('MACD')
if 'default' != value:
self.macd_long = value[1],
self.macd_short = value[0]
self.build_func.append([macd, 'MACD', [self.macd_short, self.macd_long]])
elif "Volatility" == key and value:
self.names.append('Volatility')
if 'default' != value:
self.volatility_period = value
self.build_func.append([vol, 'Volatility', self.volatility_period])
elif "EMA" == key and value:
if 'default' != value:
for values in value:
self.names.append('EMA'+str(values))
self.build_func.append([ema, 'EMA'+str(values), values])
elif "Bollinger_bands" == key and value:
self.names.append('Bollinger_bands')
if 'default' != value:
self.bb_period = value
self.build_func.append([bb, 'Bollinger_bands', self.bb_period])
elif "Stochastic" == key and value:
self.names.append('Stochastic_D')
self.names.append('Stochastic_K')
if 'default' != value:
self.stoch_period = value
self.build_func.append([D, 'Stochastic_D', self.stoch_period])
self.build_func.append([K, 'Stochastic_K', self.stoch_period])
def build_indicators(self, data):
if not self.build_func:
raise ValueError("No indicators to build.")
indicators = pd.DataFrame(columns=self.names)
for idx in self.build_func:
print (idx[1])
if "MACD" in idx[1]:
indicators[idx[1]] = idx[0](data, idx[2][0], idx[2][1])
else:
indicators[idx[1]] = idx[0](data, idx[2])
return indicators
|
[
"awakeproduction@hotmail.fr"
] |
awakeproduction@hotmail.fr
|
6a77109c6aa14b0e717e99865a97ceffd8cda1c1
|
09e5ce9673590f7ca27c480da605199a6d054a63
|
/modules/highscore.py
|
3daff9734d8b6c7a31ec3c42d9c75b6f9f816fd8
|
[] |
no_license
|
KirillMysnik/PySnake
|
781d7767cbb404033b608d15427e9e7996cc71d6
|
3fe1edc20248f20029413a31d88f673411374faf
|
refs/heads/master
| 2021-01-13T09:46:00.622694
| 2016-09-28T14:52:14
| 2016-09-28T14:52:14
| 69,473,624
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,698
|
py
|
from modules.delays import Delay
from modules.gui import TextLabel
from internal_events import InternalEvent
LABEL_COLOR = (255, 255, 255)
HIGHSCORE_LABEL_CAPTION = "score: {score}"
HIGHSCORE_LABEL_X = 64
HIGHSCORE_LABEL_Y = 64
TIME_LABEL_CAPTION = "elapsed: {seconds}s"
TIME_LABEL_X = 64
TIME_LABEL_Y = 100
app_ = None
highscore_label = None
time_label = None
highscore = 0
time_ = 0
time_delay = None
def update_time():
global time_, time_delay
time_ += 1
time_label.caption = TIME_LABEL_CAPTION.format(seconds=time_)
time_label.render()
time_delay = Delay(1, update_time)
@InternalEvent('load')
def on_load(app):
global app_, highscore_label, time_label
app_ = app
highscore_label = TextLabel(
HIGHSCORE_LABEL_X, HIGHSCORE_LABEL_Y,
HIGHSCORE_LABEL_CAPTION.format(score=0), 48, LABEL_COLOR,
caption_bold=True)
highscore_label.render()
time_label = TextLabel(
TIME_LABEL_X, TIME_LABEL_Y, TIME_LABEL_CAPTION.format(seconds=0),
32, LABEL_COLOR)
time_label.render()
app_.register_drawer('score', highscore_label.draw)
app_.register_drawer('score', time_label.draw)
@InternalEvent('fruit_eaten')
def on_game_start(fruit):
global highscore
highscore += 1
highscore_label.caption = HIGHSCORE_LABEL_CAPTION.format(score=highscore)
highscore_label.render()
@InternalEvent('game_start')
def on_game_end():
global highscore, time_, time_delay
highscore = 0
time_ = -1
highscore_label.caption = HIGHSCORE_LABEL_CAPTION.format(score=highscore)
highscore_label.render()
update_time()
@InternalEvent('game_end')
def on_game_end():
time_delay.cancel()
|
[
"kirill@mysnik.com"
] |
kirill@mysnik.com
|
fb58449531e8d4d38e17ea8628b285f48a6c86ad
|
321b4ed83b6874eeb512027eaa0b17b0daf3c289
|
/153/153.find-minimum-in-rotated-sorted-array.250607228.Accepted.leetcode.py
|
86c48a645e1a8abbc02eb311cb58e81777442548
|
[] |
no_license
|
huangyingw/submissions
|
7a610613bdb03f1223cdec5f6ccc4391149ca618
|
bfac1238ecef8b03e54842b852f6fec111abedfa
|
refs/heads/master
| 2023-07-25T09:56:46.814504
| 2023-07-16T07:38:36
| 2023-07-16T07:38:36
| 143,352,065
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 378
|
py
|
class Solution(object):
def findMin(self, nums):
if nums[0] <= nums[-1]:
return nums[0]
left, right = 0, len(nums) - 1
while left + 1 < right:
mid = (left + right) // 2
if nums[left] >= nums[mid]:
right = mid
else:
left = mid
return min(nums[left], nums[right])
|
[
"huangyingw@gmail.com"
] |
huangyingw@gmail.com
|
f7e66124cfb611cfdde05053e6b48a4ce7dff2fd
|
efd30b0ba0fd4d8c9e4ababe8113ba5be08319f2
|
/parkings/migrations/0015_fill_normalized_reg_nums.py
|
9f8b7a05457ba264cae61e627952374292b9a03d
|
[
"MIT"
] |
permissive
|
City-of-Helsinki/parkkihubi
|
3f559ef047592c5321b69c52474fc23a5eae0603
|
24751065d6e6cd68b89cd2a4358d51bdfc77cae6
|
refs/heads/master
| 2023-07-20T12:52:43.278380
| 2023-05-10T07:46:38
| 2023-05-10T07:46:38
| 75,084,288
| 14
| 15
|
MIT
| 2023-07-20T12:52:08
| 2016-11-29T13:32:13
|
Python
|
UTF-8
|
Python
| false
| false
| 838
|
py
|
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import Q
from ..models import Parking
def fill_normalized_reg_nums(apps, schema_editor):
parking_model = apps.get_model('parkings', 'Parking')
parkings_to_process = parking_model.objects.filter(
Q(normalized_reg_num=None) | Q(normalized_reg_num=''))
for parking in parkings_to_process:
parking.normalized_reg_num = Parking.normalize_reg_num(
parking.registration_number)
parking.save(update_fields=['normalized_reg_num'])
class Migration(migrations.Migration):
dependencies = [
('parkings', '0014_normalized_reg_num'),
]
operations = [
migrations.RunPython(
code=fill_normalized_reg_nums,
reverse_code=migrations.RunPython.noop),
]
|
[
"tuomas.suutari@anders.fi"
] |
tuomas.suutari@anders.fi
|
6c351742ccd9c3c58c9a7048ff2f0434e916f76c
|
04ae1836b9bc9d73d244f91b8f7fbf1bbc58ff29
|
/019/Solution.py
|
77b23c403e0c7749e27a5927c92d8dbf83f175dd
|
[] |
no_license
|
zhangruochi/leetcode
|
6f739fde222c298bae1c68236d980bd29c33b1c6
|
cefa2f08667de4d2973274de3ff29a31a7d25eda
|
refs/heads/master
| 2022-07-16T23:40:20.458105
| 2022-06-02T18:25:35
| 2022-06-02T18:25:35
| 78,989,941
| 14
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,040
|
py
|
"""
Given a linked list, remove the n-th node from the end of list and return its head.
Example:
Given linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes 1->2->3->5.
Note:
Given n will always be valid.
Follow up:
Could you do this in one pass?
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def removeNthFromEnd(self, head, n):
"""
:type head: ListNode
:type n: int
:rtype: ListNode
"""
dummy = ListNode(0)
dummy.next = head
slow = quick = dummy
while quick:
if n >= 0:
n -= 1
quick = quick.next
else:
quick = quick.next
slow = slow.next
slow.next = slow.next.next
return dummy.next
class Solution(object):
def removeNthFromEnd(self, head, n):
"""
:type head: ListNode
:type n: int
:rtype: ListNode
"""
dummy = ListNode(0)
dummy.next = head
count, p = 0, dummy
while p:
count += 1
p = p.next
k = count - n - 1
p = dummy
while k:
p = p.next
k -= 1
p.next = p.next.next
return dummy.next
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:
m = 0
cur = head
while cur:
m += 1
cur = cur.next
n = m - n
dummy = cur = ListNode()
dummy.next = head
while n:
cur = cur.next
n-=1
cur.next = cur.next.next
return dummy.next
|
[
"zrc720@gmail.com"
] |
zrc720@gmail.com
|
fb785b48dbc3883bf3983cf9a771dd2f9a6bb328
|
4a44d785d19f23033ec89775c8219a2f8275a4dd
|
/cride/circles/admin.py
|
a7031ba8c50be95f1f6624a720fbf3187af1f7ce
|
[
"MIT"
] |
permissive
|
mdark1001/crideApiRest
|
d17989dfb650eb799c44c57d87f3e0cec8fc647b
|
228efec90d7f1ad8a6766b5a8085dd6bbf49fc8a
|
refs/heads/main
| 2023-04-09T23:27:09.931730
| 2021-04-19T13:46:44
| 2021-04-19T13:46:44
| 357,706,873
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 312
|
py
|
from django.contrib import admin
# Register your models here.
from .models import Circle
@admin.register(Circle)
class CircleAdmin(admin.ModelAdmin):
list_display = ['name', 'is_verified', 'is_public', 'rides_taken', 'rides_offered']
list_filter = ['created', 'is_verified', 'is_public','is_limited']
|
[
"miguel.cabrera.app@gmail.com"
] |
miguel.cabrera.app@gmail.com
|
2d8e282e4ff5217cf1139f3a412e41342844571a
|
278a000f8b40476b5d1473cc1b98d5872551cab2
|
/test_sphere_volume.py
|
cb30c89536569a7350adecec0fb65901984d43cd
|
[] |
no_license
|
Kaliumerbol/kaliev_erbol_hw_2.6
|
172eb765a9cd5be8f8a9dc4f28e3fc258e5d92d9
|
5ea5fa98baf10d467a287da435f40e796c2594c3
|
refs/heads/main
| 2023-06-05T15:44:51.723761
| 2021-06-29T10:51:01
| 2021-06-29T10:51:01
| 381,329,097
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 981
|
py
|
import unittest
import math
from sphere_volume import calculate_sphere_volume
pi = math.pi
class TestSphereVolume(unittest.TestCase):
def test_area(self):
self.assertAlmostEqual(calculate_sphere_volume(5), 4/3*pi*5**3)
self.assertAlmostEqual(calculate_sphere_volume(3.7), 4/3*pi*3.7**3)
self.assertAlmostEqual(calculate_sphere_volume(1), 4/3*pi)
# толком не понял как работает АсертРейсес, в нете долго копалсяя но внятного объяснения нет. что и как вылавливать не понятно, значения не описаны. Поэтому закоментил. А так все работает норм.
# self.assertRaises(ValueError, calculate_sphere, 'four')
def test_negative(self):
self.assertEqual(calculate_sphere_volume(-5), 'Радиус сферы не может быть отрицательным')
unittest.main()
|
[
"you@example.com"
] |
you@example.com
|
924fcb51f482e997c837a79f2363ad5b113136aa
|
03195a6f98396fd27aedc3c06d81f1553fb1d16b
|
/pandas/tests/series/methods/test_rename.py
|
90c8f775586e6d8a3b4fbc61dfc9c8334d7b3417
|
[
"BSD-3-Clause"
] |
permissive
|
huaxz1986/pandas
|
a08d80d27726fe141d449835b9a09265bca5b5e0
|
ba2473834fedcf571d3f8245b4b24796873f2736
|
refs/heads/master
| 2023-06-11T02:20:14.544220
| 2022-01-12T04:40:06
| 2022-01-12T04:40:06
| 131,370,494
| 3
| 4
|
BSD-3-Clause
| 2018-04-28T03:51:05
| 2018-04-28T03:51:05
| null |
UTF-8
|
Python
| false
| false
| 4,450
|
py
|
from datetime import datetime
import numpy as np
import pytest
from pandas import (
Index,
MultiIndex,
Series,
)
import pandas._testing as tm
class TestRename:
def test_rename(self, datetime_series):
ts = datetime_series
renamer = lambda x: x.strftime("%Y%m%d")
renamed = ts.rename(renamer)
assert renamed.index[0] == renamer(ts.index[0])
# dict
rename_dict = dict(zip(ts.index, renamed.index))
renamed2 = ts.rename(rename_dict)
tm.assert_series_equal(renamed, renamed2)
def test_rename_partial_dict(self):
# partial dict
ser = Series(np.arange(4), index=["a", "b", "c", "d"], dtype="int64")
renamed = ser.rename({"b": "foo", "d": "bar"})
tm.assert_index_equal(renamed.index, Index(["a", "foo", "c", "bar"]))
def test_rename_retain_index_name(self):
# index with name
renamer = Series(
np.arange(4), index=Index(["a", "b", "c", "d"], name="name"), dtype="int64"
)
renamed = renamer.rename({})
assert renamed.index.name == renamer.index.name
def test_rename_by_series(self):
ser = Series(range(5), name="foo")
renamer = Series({1: 10, 2: 20})
result = ser.rename(renamer)
expected = Series(range(5), index=[0, 10, 20, 3, 4], name="foo")
tm.assert_series_equal(result, expected)
def test_rename_set_name(self):
ser = Series(range(4), index=list("abcd"))
for name in ["foo", 123, 123.0, datetime(2001, 11, 11), ("foo",)]:
result = ser.rename(name)
assert result.name == name
tm.assert_numpy_array_equal(result.index.values, ser.index.values)
assert ser.name is None
def test_rename_set_name_inplace(self):
ser = Series(range(3), index=list("abc"))
for name in ["foo", 123, 123.0, datetime(2001, 11, 11), ("foo",)]:
ser.rename(name, inplace=True)
assert ser.name == name
exp = np.array(["a", "b", "c"], dtype=np.object_)
tm.assert_numpy_array_equal(ser.index.values, exp)
def test_rename_axis_supported(self):
# Supporting axis for compatibility, detailed in GH-18589
ser = Series(range(5))
ser.rename({}, axis=0)
ser.rename({}, axis="index")
with pytest.raises(ValueError, match="No axis named 5"):
ser.rename({}, axis=5)
def test_rename_inplace(self, datetime_series):
renamer = lambda x: x.strftime("%Y%m%d")
expected = renamer(datetime_series.index[0])
datetime_series.rename(renamer, inplace=True)
assert datetime_series.index[0] == expected
def test_rename_with_custom_indexer(self):
# GH 27814
class MyIndexer:
pass
ix = MyIndexer()
ser = Series([1, 2, 3]).rename(ix)
assert ser.name is ix
def test_rename_with_custom_indexer_inplace(self):
# GH 27814
class MyIndexer:
pass
ix = MyIndexer()
ser = Series([1, 2, 3])
ser.rename(ix, inplace=True)
assert ser.name is ix
def test_rename_callable(self):
# GH 17407
ser = Series(range(1, 6), index=Index(range(2, 7), name="IntIndex"))
result = ser.rename(str)
expected = ser.rename(lambda i: str(i))
tm.assert_series_equal(result, expected)
assert result.name == expected.name
def test_rename_none(self):
# GH 40977
ser = Series([1, 2], name="foo")
result = ser.rename(None)
expected = Series([1, 2])
tm.assert_series_equal(result, expected)
def test_rename_series_with_multiindex(self):
# issue #43659
arrays = [
["bar", "baz", "baz", "foo", "qux"],
["one", "one", "two", "two", "one"],
]
index = MultiIndex.from_arrays(arrays, names=["first", "second"])
ser = Series(np.ones(5), index=index)
result = ser.rename(index={"one": "yes"}, level="second", errors="raise")
arrays_expected = [
["bar", "baz", "baz", "foo", "qux"],
["yes", "yes", "two", "two", "yes"],
]
index_expected = MultiIndex.from_arrays(
arrays_expected, names=["first", "second"]
)
series_expected = Series(np.ones(5), index=index_expected)
tm.assert_series_equal(result, series_expected)
|
[
"noreply@github.com"
] |
huaxz1986.noreply@github.com
|
ab3e2eb43508d9f342a4113bbe93ea6f50279af2
|
63255cf9da84b5dd6aa4454dd50385d50c43aac9
|
/tencent/sort_and_search/search.py
|
7ed8bafa2fc87c9642650340c00b88a538a669e8
|
[
"MIT"
] |
permissive
|
summer-vacation/AlgoExec
|
d37054e937b7e3cc4c0f76019cf996acb0fb5a34
|
55c6c3e7890b596b709b50cafa415b9594c03edd
|
refs/heads/master
| 2021-07-09T12:18:51.532581
| 2020-12-20T13:46:43
| 2020-12-20T13:46:43
| 223,929,183
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,429
|
py
|
# -*- coding: utf-8 -*-
"""
File Name: search
Author : jing
Date: 2020/3/19
https://leetcode-cn.com/explore/interview/card/tencent/224/sort-and-search/927/
搜索旋转排序数组
O(log n)
"""
class Solution:
def search(self, nums, target: int) -> int:
if nums is None or len(nums) == 0:
return -1
if target in nums:
index = nums.index(target)
return index
else:
return -1
# 二分搜索
def search2(self, nums, target: int) -> int:
if not nums:
return -1
if len(nums) == 1:
return 0 if nums[0] == target else -1
cent = len(nums) // 2
if target < nums[cent] <= nums[-1]:
return self.search(nums[:cent], target)
elif target >= nums[cent] >= nums[0]:
res = self.search(nums[cent:], target)
if res == -1:
return -1
else:
return cent + res
else:
resl = self.search(nums[:cent], target)
resr = self.search(nums[cent:], target)
if resr != -1:
return cent + resr
if resl != -1:
return resl
return -1
if __name__ == '__main__':
print(Solution().search([4,5,6,7,0,1,2], 3))
|
[
"280806137@qq.com"
] |
280806137@qq.com
|
2ad5fa16421f87f656519643af8a3217cfecc11c
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_145/223.py
|
c67f0e3c81cc4903f6cdce8da4043c6407e2bad7
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 668
|
py
|
#!/usr/bin/env pypy
# -*- coding: utf-8 -*-
# google code jam - c.durr - 2014
# Part Elf
# https://code.google.com/codejam/contest/3004486/dashboard
#
#
from math import *
from sys import *
from fractions import *
def readint(): return int(stdin.readline())
def readarray(f): return map(f, stdin.readline().split())
def readstring(): return stdin.readline().strip()
def solve(f):
k = -1
for g in range(40):
f *= 2
if f>=1:
f-=1
if k==-1:
k = g+1
if f==0:
return k
else:
return -1
for test in range(readint()):
f = readarray(Fraction)[0]
g = solve(f)
print "Case #%i:"% (test+1), ("impossible" if g==-1 else g)
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
385a5bafa117ea93e64ca3733a3337a00c47b93e
|
d24cef73100a0c5d5c275fd0f92493f86d113c62
|
/SRC/tutorials/adaptive.py
|
a1b03746e2aa8e81ec7c3f1153c3136a05f080a9
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
rlinder1/oof3d
|
813e2a8acfc89e67c3cf8fdb6af6b2b983b8b8ee
|
1fb6764d9d61126bd8ad4025a2ce7487225d736e
|
refs/heads/master
| 2021-01-23T00:40:34.642449
| 2016-09-15T20:51:19
| 2016-09-15T20:51:19
| 92,832,740
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,624
|
py
|
# -*- python -*-
# $RCSfile: adaptive.py,v $
# $Revision: 1.14.2.6 $
# $Author: langer $
# $Date: 2014/09/27 22:34:44 $
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
from ooflib.tutorials import tutorial
TutoringItem = tutorial.TutoringItem
TutorialClass = tutorial.TutorialClass
## TODO 3.1: Rewrite this so that it uses the Refine SkeletonModifier
## instead of the AMR MeshModifier. Then re-record the GUI test for
## this tutorial.
TutorialClass(
subject = "Adaptive Mesh Refinement",
ordering = 6,
lessons = [
TutoringItem(
subject="Introduction",
comments=
"""OOF3D provides a rudimentary adaptive mesh refinement tool via
BOLD(a Posteriori) error estimation scheme that utilizes
BOLD(Superconvergent Patch Recovery) of BOLD(Zienkiewicz) and
BOLD(Zhu) -- more discussion of the subject can be found in the
OOF3D manual.
In this tutorial, the adaptive mesh refinement will be briefly
demonstrated.
BOLD(NOTE:) In version 3.0 of OOF3D, adaptive mesh refinement
only works on the default Subproblem of a Mesh. Fields and
Equations defined on other Subproblems will not be seen by the
adaptive mesh machinery.
"""),
TutoringItem(
subject="Loading a Skeleton",
comments=
"""Open a graphics window, if none has been opened yet, with
the BOLD(Graphics/New) command in the BOLD(Windows) menu.
Download the file BOLD(el_shape.mesh) from
http://www.ctcms.nist.gov/oof/oof3d/examples, or locate it within the
share/oof3d/examples directory in your OOF3D installation.
A data file can be loaded from the BOLD(File) menu in the main OOF3D
window (BOLD(File -> Load -> Data)).
Select the example file (BOLD(el_shape.mesh)) in the file selector,
and click BOLD(OK).
""",
signal = ("new who", "Skeleton")
),
TutoringItem(
subject="L-shaped Domain",
comments=
"""If you have finished the tutorial for BOLD(Non-rectangular Domain),
you should be familiar with this Mesh.
The Mesh looks rectangular but Material has been assigned only to
the BOLD(green) part of the Mesh, which simulates an effective
BOLD(L)-shaped domain.
Move on to the next slide.
""" ),
TutoringItem(
subject="Boundary Conditions",
comments="""The Mesh is ready to be solved.
The applied boundary conditions (all BOLD(Dirichlet)) are:
BOLD(1.) u_x = 0 on the BOLD(Xmin) side
BOLD(2.) u_y = 0 on the BOLD(Xmin) side
BOLD(3.) u_z = 0 on the BOLD(Xmin) side
BOLD(4.) u_x = 0 on the BOLD(Ymax) side
BOLD(5.) u_y = 0 on the BOLD(Ymax) side
BOLD(6.) u_z = 0 on the BOLD(Ymax) side
BOLD(7.) u_y = -2 on the BOLD(Xmax) side
BOLD(8.) u_z = -2 on the BOLD(Xmax) side"""
),
# TODO 3.0: Minor schizophrenia -- since the introduction of
# subproblems, the "Solve" menu item sends "subproblem changed"
# and not "mesh changed", but the adaptive mesh refinement routine
# itself sends "mesh changed".
TutoringItem(
subject="Solution",
comments=
"""Open the BOLD(Solver) page and just click BOLD(Solve).
A deformed Mesh will be displayed in the graphics window.
Note that dummy elements (BOLD(ivory) part) are BOLD(NOT) displayed
in the deformed Mesh.
For the clearer view, let us hide the Skeleton layer.
Navigate to the bottom of the graphics window and find a layer
labeled BOLD(Skeleton(skeleton)) and Uncheck the square box to
hide the layer.
Due to the shape of the domain, it is obvious that stresses are
highly concentrated in the region surrounding the corner.
It is also safe to assume that errors in this region would be higher
than in other regions.
Move on to the next slide to start the process for adaptive mesh
refinement.
""",
signal = "subproblem changed"
),
# TODO: *** Mesh Status for el_shape:skeleton:mesh ***
# Unsolvable: Subproblem 'default' is ill-posed!
# Equation 'Force_Balance' has no flux contributions
TutoringItem(
subject="Adaptive Mesh Refinement",
comments=
"""Go back to the BOLD(FEMesh) page.
Select BOLD(Adaptive Mesh Refinement).
As of now, we have only one error estimator, BOLD(Z-Z Estimator).
Select BOLD(L2 Error Norm) for error estimating BOLD(method).
Select BOLD(stress), which is the only entity,
for the BOLD(flux) parameter.
Set BOLD(threshold) to be BOLD(10).
For each element, an L2 error norm will be computed
with stresses computed from the finite element solution and their
recovered counterparts, which act as exact stresses.
If the relative error exceeds 10 percent, the element will be refined.
The next three parameters, BOLD(criterion), BOLD(degree) and, BOLD(alpha)
take care of actual refinement. Don't bother with these parameters
for this tutorial (See BOLD(skeleton) tutorial for details).
Sometimes, refinement could create badly-shaped elements. These elements
can be removed by turning on the BOLD(rationalize) option.
By default, field values are transferred to the refined mesh.
This, however, is just a
projection of the previous solution onto the refined mesh --
you need to re-solve the problem for improved solution.
Leave these options as they are for now and click BOLD(OK).
""",
signal = "mesh changed"
),
TutoringItem(
subject="Refined Mesh",
comments=
"""As expected, elements surrounding the corner have been refined.
Now, go to the BOLD(Solver) page.
BOLD(Solve) the problem again with the refined mesh.
""",
signal = "subproblem changed"
),
TutoringItem(
subject="Refine Again",
comments=
"""
Go back to the BOLD(FEMesh) page and refine the mesh again
(just click BOLD(OK)).
The corner has been refined more. For a better view, use
BOLD(ctrl)+BOLD(.) or BOLD(Settings)->BOLD(Zoom)->BOLD(In) from
the graphics window.
This process (BOLD(Refine) + BOLD(Solve)) can be repeated, until
you're satisfied.
Thanks for trying out the tutorial.
""",
signal = "mesh changed"
)
])
|
[
"faical.congo@nist.gov"
] |
faical.congo@nist.gov
|
0d66bfb2e2f61e6192594453d928317bed7f64d2
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/MAzBohC2PxchT3wqK_13.py
|
a37c09d7d848225a334c7b4bb512adb1efefa6cd
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 360
|
py
|
def shadow_sentence(a, b):
alist, blist = a.split(' '), b.split(' ')
if len(alist) != len(blist):
return False
j = -1
for word in alist:
j += 1
if len(word) != len(blist[j]):
return False
i = -1
for word in blist:
i += 1
for letter in word:
if letter in alist[i]:
return False
return True
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
01eb27b1efe1e94dc35839dd99988ef3512c19f4
|
6584124fee86f79ce0c9402194d961395583d6c3
|
/blog/migrations/0005_userprofile.py
|
741df13e8b0ed00bd33880271c9c54062c148c8f
|
[] |
no_license
|
janusnic/webman
|
fdcffb7ed2f36d0951fd18bbaa55d0626cd271e1
|
2e5eaadec64314fddc19f27d9313317f7a236b9e
|
refs/heads/master
| 2018-12-28T18:21:00.291717
| 2015-06-05T11:49:00
| 2015-06-05T11:49:00
| 35,676,834
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 796
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0004_auto_20150522_1108'),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('website', models.URLField(blank=True)),
('picture', models.ImageField(upload_to=b'profile_images', blank=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"janusnic@gmail.com"
] |
janusnic@gmail.com
|
efb302e6899348b8a39d8588818d325e6a0f9ada
|
1d1a21b37e1591c5b825299de338d18917715fec
|
/Mathematics/Data science/Mathmatics/02/Exercise_2_4_5.py
|
4c1094e1a0624eace4209363bf4bc2727406716d
|
[] |
no_license
|
brunoleej/study_git
|
46279c3521f090ebf63ee0e1852aa0b6bed11b01
|
0c5c9e490140144caf1149e2e1d9fe5f68cf6294
|
refs/heads/main
| 2023-08-19T01:07:42.236110
| 2021-08-29T16:20:59
| 2021-08-29T16:20:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,206
|
py
|
# 보스턴 집값 문제는 미국 보스턴내 각 지역(town)의 주택 가격을 그 지역의 범죄율이나 공기 오염도 등의 특징
# 을 사용하여 예측하는 문제다. Scikit-Learn 패키지에서 임포트할 수 있다. 보스턴 집값 문제를 선형 예측모형
# Ax = b_hat로 풀었을 때의 가중치 벡터 를 구하라. 행렬과 벡터 데이터는 다음과 같이 얻을 수 있다. 여기에서는
# 문제를 간단하게 하기 위해 입력 데이터를 범죄율(CRIM), 공기 오염도(NOX), 방의 개수(RM), 오래된 정도
# (AGE)의 4종류로 제한했고 데이터도 4개만 사용했다
import numpy as np
from sklearn.datasets import load_boston
boston = load_boston()
X = boston.data
y = boston.target
A = X[:4, [0, 4, 5, 6]] # 'CRIM', 'NOX', 'RM', 'AGE'
b = y[:4]
x, resid, rank, s = np.linalg.lstsq(A,b)
print(A)
'''
[[6.320e-03 5.380e-01 6.575e+00 6.520e+01]
[2.731e-02 4.690e-01 6.421e+00 7.890e+01]
[2.729e-02 4.690e-01 7.185e+00 6.110e+01]
[3.237e-02 4.580e-01 6.998e+00 4.580e+01]]
'''
print(x) # [-3.12710043e+02 -1.15193942e+02 1.44996465e+01 -1.13259317e-01]
print(b) # [24. 21.6 34.7 33.4]
|
[
"jk04059@naver.com"
] |
jk04059@naver.com
|
93dab5e033bf2be71722860d57e80346b770aa7b
|
3b1daac7c1f72b985da899770d98e5f0e8fb835c
|
/Configurations/VBS/2017CR_v7/variables.py
|
695bd91e5fdd8c45254bfdcdab37fb9d9235b46a
|
[] |
no_license
|
freejiebao/PlotsConfigurations
|
7e10aa45aa3bf742f30d1e21dc565d59d2a025d8
|
cdfd3aff38d1ece9599a699997753bc8ba01b9b1
|
refs/heads/master
| 2020-06-18T19:22:00.561542
| 2019-09-02T12:52:28
| 2019-09-02T12:52:28
| 186,931,874
| 0
| 0
| null | 2019-05-16T01:58:07
| 2019-05-16T01:58:07
| null |
UTF-8
|
Python
| false
| false
| 5,363
|
py
|
# variables
#variables = {}
#'fold' : # 0 = not fold (default), 1 = fold underflowbin, 2 = fold overflow bin, 3 = fold underflow and overflow
# variables['events'] = { 'name': '1',
# 'range' : (1,0,2),
# 'xaxis' : 'events',
# 'fold' : 3
# }
variables['nJet'] = { 'name': 'nJet',
'range' : (6,0,6),
'xaxis' : 'njets',
'fold' : 3
}
variables['nJet_v2'] = { 'name': 'Sum$(CleanJet_pt>30)',
'range' : (4,0,4),
'xaxis' : 'njets',
'fold' : 3
}
variables['nLepton'] = {
'name': '1*(Alt$(Lepton_pt[0],0.)>20) + 1*(Alt$(Lepton_pt[1],0.)>20) + 1*(Alt$(Lepton_pt[2],0.)>20)+ 1*(Alt$(Lepton_pt[3],0.)>20) + 1*(Alt$(Lepton_pt[4],0.)>20)',
'range': (5,0,5),
'xaxis': '# leptons',
'fold': 3
}
variables['mll'] = { 'name': 'mll', # variable name
'range' : (4, 0. ,500), # variable range
'xaxis' : 'mll [GeV]', # x axis name
'fold' : 3
}
variables['mll_v3'] = { 'name': 'mll', # variable name
'range' : (12, 20. ,320), # variable range
'xaxis' : 'mll [GeV]', # x axis name
'fold' : 3
}
variables['mll_v2'] = { 'name': 'mll', # variable name
'range' : (80, 0. ,800), # variable range
'xaxis' : 'mll [GeV]', # x axis name
'fold' : 3
}
variables['mjj'] = { 'name': 'mjj',
'range': ([500,800,1200,1600,2000],), #for 500 < mjj < 1000
'xaxis': 'mjj [GeV]',
'fold': 3
}
variables['mjj_v2'] = { 'name': 'mjj',
'range': ([500,800,1200,1800,2000],), #for 500 < mjj < 1000
'xaxis': 'mjj [GeV]',
'fold': 3
}
variables['mjj_v3'] = { 'name': 'mjj',
'range': (15, 500. ,2000), #for 500 < mjj < 1000
'xaxis': 'mjj [GeV]',
'fold': 3
}
variables['mjj_v4'] = { 'name': 'mjj',
'range': (10,0 ,500), #for 500 < mjj < 1000
'xaxis': 'mjj [GeV]',
'fold': 3
}
variables['pt1'] = { 'name': 'Alt$(Lepton_pt[0],-9999.)',
'range' : (10,0.,100),
'xaxis' : 'p_{T} 1st lep',
'fold' : 3
}
variables['pt2'] = { 'name': 'Alt$(Lepton_pt[0],-9999.)',
'range' : (10,0.,150),
'xaxis' : 'p_{T} 2nd lep',
'fold' : 3
}
variables['jetpt1'] = { 'name': 'Alt$(Jet_pt[0],-9999.)',
'range' : (15,0.,200),
'xaxis' : 'p_{T} 1st jet',
'fold' : 3
}
variables['jetpt2'] = { 'name': 'Alt$(Jet_pt[1],-9999.)',
'range' : (15,0.,150),
'xaxis' : 'p_{T} 2nd jet',
'fold' : 3
}
variables['met'] = { 'name': 'MET_pt', # variable name
'range' : (10,0,200), # variable range
'xaxis' : 'pfmet [GeV]', # x axis name
'fold' : 3
}
variables['etaj1'] = { 'name': 'Alt$(Jet_eta[0],-9999.)',
'range': (10,-5,5),
'xaxis': 'etaj1',
'fold': 3
}
variables['etaj2'] = { 'name': 'Alt$(Jet_eta[1],-9999.)',
'range': (10,-5,5),
'xaxis': 'etaj2',
'fold': 3
}
variables['detajj'] = { 'name': 'detajj',
'range': (7,0.0,7.0),
'xaxis': 'detajj',
'fold': 3
}
variables['Zlep1'] = { 'name': '(Alt$(Lepton_eta[0],-9999.) - (Alt$(Jet_eta[0],-9999.)+Alt$(Jet_eta[1],-9999.))/2)/detajj',
'range': (10,-1.5,1.5),
'xaxis': 'Z^{lep}_{1}',
'fold': 3
}
variables['Zlep2'] = { 'name': '(Alt$(Lepton_eta[1],-9999.) - (Alt$(Jet_eta[0],-9999.)+Alt$(Jet_eta[1],-9999.))/2)/detajj',
'range': (10,-1.5,1.5),
'xaxis': 'Z^{lep}_{2}',
'fold': 3
}
variables['csvv2ivf_1'] = {
'name': 'Alt$(Jet_btagCSVV2[0],0.)',
'range' : (10,0,1),
'xaxis' : 'csvv2ivf 1st jet ',
'fold' : 3
}
variables['csvv2ivf_2'] = {
'name': 'Alt$(Jet_btagCSVV2[1],0.)',
'range' : (10,0,1),
'xaxis' : 'csvv2ivf 2nd jet ',
'fold' : 3
}
|
[
"jiexiao@pku.edu.cn"
] |
jiexiao@pku.edu.cn
|
4655a05a2e59738d661f9702526dbfdea1f20f57
|
ce083128fa87ca86c65059893aa8882d088461f5
|
/python/flask-webservices-labs/graphene/graphene/types/mutation.py
|
fe15f6a2daa5be2a7b09b6fd4419a7e2f2e88fd1
|
[
"MIT"
] |
permissive
|
marcosptf/fedora
|
581a446e7f81d8ae9a260eafb92814bc486ee077
|
359db63ff1fa79696b7bc803bcfa0042bff8ab44
|
refs/heads/master
| 2023-04-06T14:53:40.378260
| 2023-03-26T00:47:52
| 2023-03-26T00:47:52
| 26,059,824
| 6
| 5
| null | 2022-12-08T00:43:21
| 2014-11-01T18:48:56
| null |
UTF-8
|
Python
| false
| false
| 2,892
|
py
|
from collections import OrderedDict
from ..utils.get_unbound_function import get_unbound_function
from ..utils.props import props
from .field import Field
from .objecttype import ObjectType, ObjectTypeOptions
from .utils import yank_fields_from_attrs
from ..utils.deprecated import warn_deprecation
# For static type checking with Mypy
MYPY = False
if MYPY:
from .argument import Argument # NOQA
from typing import Dict, Type, Callable # NOQA
class MutationOptions(ObjectTypeOptions):
arguments = None # type: Dict[str, Argument]
output = None # type: Type[ObjectType]
resolver = None # type: Callable
class Mutation(ObjectType):
'''
Mutation Type Definition
'''
@classmethod
def __init_subclass_with_meta__(cls, resolver=None, output=None, arguments=None,
_meta=None, **options):
if not _meta:
_meta = MutationOptions(cls)
output = output or getattr(cls, 'Output', None)
fields = {}
if not output:
# If output is defined, we don't need to get the fields
fields = OrderedDict()
for base in reversed(cls.__mro__):
fields.update(
yank_fields_from_attrs(base.__dict__, _as=Field)
)
output = cls
if not arguments:
input_class = getattr(cls, 'Arguments', None)
if not input_class:
input_class = getattr(cls, 'Input', None)
if input_class:
warn_deprecation((
"Please use {name}.Arguments instead of {name}.Input."
"Input is now only used in ClientMutationID.\n"
"Read more:"
" https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input"
).format(name=cls.__name__))
if input_class:
arguments = props(input_class)
else:
arguments = {}
if not resolver:
mutate = getattr(cls, 'mutate', None)
assert mutate, 'All mutations must define a mutate method in it'
resolver = get_unbound_function(mutate)
if _meta.fields:
_meta.fields.update(fields)
else:
_meta.fields = fields
_meta.output = output
_meta.resolver = resolver
_meta.arguments = arguments
super(Mutation, cls).__init_subclass_with_meta__(_meta=_meta, **options)
@classmethod
def Field(cls, name=None, description=None, deprecation_reason=None):
return Field(
cls._meta.output,
args=cls._meta.arguments,
resolver=cls._meta.resolver,
name=name,
description=description,
deprecation_reason=deprecation_reason,
)
|
[
"marcosptf@yahoo.com.br"
] |
marcosptf@yahoo.com.br
|
d654571b75c42601d497f2010175e9d03db70f79
|
a9f38bb28ff9bd04b151d86c653cde9f46768c7c
|
/easy/guessNumberHigherLower.py
|
3d9a440d88b43dfb848527e6505af7060a690b0d
|
[] |
no_license
|
Xynoclafe/leetcode
|
02388516b10b8ee6bec6ee1b91ab5681c3254d33
|
4a80f02683e7fc14cb49c07170651ea3eeb280ac
|
refs/heads/master
| 2020-12-01T21:05:44.656581
| 2020-02-02T09:05:32
| 2020-02-02T09:05:32
| 230,770,600
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 699
|
py
|
# The guess API is already defined for you.
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
# def guess(num: int) -> int:
class Solution:
def guessNumber(self, n: int) -> int:
def binSearch(start, end):
if start == end:
start
pivot = (start + end) // 2
return pivot
start = 0
end = n
while True:
num = binSearch(start, end)
result = guess(num)
if result == 0:
return num
elif result == 1:
start = num + 1
else:
end = num - 1
|
[
"gokulprem.94@gmail.com"
] |
gokulprem.94@gmail.com
|
a8972a430ffb07204980882f80982295def04f91
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02690/s260373583.py
|
683e4c44817819625608097b879fb92b1bb0fe95
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 181
|
py
|
def get(n):
for i in range(-1000,1001):
for j in range(-1000,1001):
if(i**5-j**5==n):
print(i,j)
return
n=int(input())
get(n)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
ee3ab7b304dcfd2627a23109f9e5a4af1e9cf3b9
|
02b6f852787c0f169c298090e412de84d9fffdfa
|
/src/dsrlib/ui/configuration.py
|
1f6bd8871dcf3223bf9723382947e5adbfa4f922
|
[
"MIT"
] |
permissive
|
ca4ti/dsremap
|
f5ffa0d5f15e37af23ec5bd2c326a0dfa5e6f99c
|
ad0929adfe5fa8499515b3a6a80e94dfd8c1c0bc
|
refs/heads/master
| 2023-04-02T10:02:02.147173
| 2021-04-11T11:16:59
| 2021-04-11T11:16:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,439
|
py
|
#!/usr/bin/env python3
import os
import shutil
from PyQt5 import QtCore, QtGui, QtWidgets
from dsrlib.domain import ConfigurationMixin, commands
from dsrlib.meta import Meta
from .actions import ActionWidgetBuilder
from .mixins import MainWindowMixin
from .utils import LayoutBuilder
from .uicommands import AddActionButton, DeleteActionsButton, ConvertToCustomActionButton
class ThumbnailView(MainWindowMixin, ConfigurationMixin, QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setFixedWidth(170)
self.setFixedHeight(300)
self._pixmap = None
self.reload()
self.setAcceptDrops(True)
def reload(self):
filename = self.configuration().thumbnail()
if filename is None:
self._pixmap = QtGui.QIcon(':icons/image.svg').pixmap(150, 150)
else:
pixmap = QtGui.QPixmap(filename)
self._pixmap = pixmap.scaled(QtCore.QSize(150, 300), QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
self.update()
def paintEvent(self, event): # pylint: disable=W0613
painter = QtGui.QPainter(self)
painter.setRenderHint(painter.Antialiasing, True)
rect = QtCore.QRect(QtCore.QPoint(0, 0), self._pixmap.size())
rect.moveCenter(self.rect().center())
painter.drawPixmap(rect.topLeft(), self._pixmap)
if self.configuration().thumbnail() is None:
text = _('Drop an image here')
bbox = painter.fontMetrics().boundingRect(text)
bbox.moveCenter(rect.center())
bbox.moveTop(rect.bottom())
painter.drawText(bbox, 0, text)
def dragEnterEvent(self, event):
data = event.mimeData()
if data.hasFormat('text/uri-list'):
if len(data.urls()) != 1:
return
url = data.urls()[0]
if not url.isLocalFile():
return
filename = url.toLocalFile()
if not os.path.isfile(filename):
return
pixmap = QtGui.QPixmap(filename)
if pixmap.isNull():
return
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
def dropEvent(self, event):
src = event.mimeData().urls()[0].toLocalFile()
dst = Meta.newThumbnail(src)
shutil.copyfile(src, dst)
cmd = commands.ChangeConfigurationThumbnailCommand(configuration=self.configuration(), filename=dst)
self.history().run(cmd)
# From https://gist.github.com/hahastudio/4345418 with minor changes
class TextEdit(QtWidgets.QTextEdit):
"""
A TextEdit editor that sends editingFinished events
when the text was changed and focus is lost.
"""
editingFinished = QtCore.pyqtSignal()
receivedFocus = QtCore.pyqtSignal()
def __init__(self, parent):
super().__init__(parent)
self._changed = False
self.setTabChangesFocus(True)
self.textChanged.connect(self._handle_text_changed)
def focusInEvent(self, event):
super().focusInEvent(event)
self.receivedFocus.emit()
def focusOutEvent(self, event):
if self._changed:
self.editingFinished.emit()
super().focusOutEvent(event)
def _handle_text_changed(self): # pylint: disable=C0103
self._changed = True
def setTextChanged(self, state=True):
self._changed = state
def setHtml(self, html):
super().setHtml(html)
self._changed = False
def setPlainText(self, text):
super().setPlainText(text)
self._changed = False
class ConfigurationView(MainWindowMixin, ConfigurationMixin, QtWidgets.QWidget):
selectionChanged = QtCore.pyqtSignal()
def __init__(self, parent, **kwargs):
super().__init__(parent, **kwargs)
self.actions().rowsInserted.connect(self._addRows)
self.actions().rowsRemoved.connect(self._removeRows)
self._name = QtWidgets.QLineEdit(self.configuration().name(), self)
self._name.editingFinished.connect(self._changeName)
self._thumbnail = ThumbnailView(self, configuration=self.configuration(), mainWindow=self.mainWindow())
self._description = TextEdit(self)
self._description.setAcceptRichText(False)
self._description.setPlainText(self.configuration().description())
self._description.editingFinished.connect(self._changeDescription)
# We don't actually use a QTreeView because setItemWidget is
# convenient. Not in the mood to write a custom delegate.
self._tree = QtWidgets.QTreeWidget(self)
self._tree.setHeaderHidden(True)
self._tree.itemSelectionChanged.connect(self.selectionChanged)
self._tree.setAlternatingRowColors(True)
btnAdd = AddActionButton(self, configuration=self.configuration(), mainWindow=self.mainWindow())
btnDel = DeleteActionsButton(self, configuration=self.configuration(), container=self, mainWindow=self.mainWindow())
btnConv = ConvertToCustomActionButton(self, configuration=self.configuration(), container=self, mainWindow=self.mainWindow())
bld = LayoutBuilder(self)
with bld.vbox():
with bld.hbox() as header:
header.addWidget(self._name)
header.addWidget(btnConv)
header.addWidget(btnDel)
header.addWidget(btnAdd)
with bld.hbox() as content:
content.addWidget(self._thumbnail)
with bld.vbox() as vbox:
vbox.addWidget(self._tree)
vbox.addWidget(self._description)
self.configuration().changed.connect(self._updateValues)
# In case of redo, the model may not be empty
count = len(self.actions())
if count:
self._addRows(None, 0, count - 1)
def selection(self):
return [item.data(0, QtCore.Qt.UserRole) for item in self._tree.selectedItems()]
def _addRows(self, parent, first, last): # pylint: disable=W0613
for index, action in enumerate(self.actions().items()[first:last+1]):
item = QtWidgets.QTreeWidgetItem()
item.setData(0, QtCore.Qt.UserRole, action)
self._tree.insertTopLevelItem(first + index, item)
widget = ActionWidgetBuilder(self, self.mainWindow()).visit(action)
self._tree.setItemWidget(item, 0, widget)
widget.geometryChanged.connect(item.emitDataChanged)
def _removeRows(self, parent, first, last): # pylint: disable=W0613
for _ in range(last - first + 1):
self._tree.takeTopLevelItem(first)
def _changeName(self):
name = self._name.text()
if name != self.configuration().name():
cmd = commands.ChangeConfigurationNameCommand(configuration=self.configuration(), name=name)
self.history().run(cmd)
def _changeDescription(self):
text = self._description.toPlainText()
if text != self.configuration().description():
cmd = commands.ChangeConfigurationDescriptionCommand(configuration=self.configuration(), description=text)
self.history().run(cmd)
def _updateValues(self):
self._name.setText(self.configuration().name())
self._description.setPlainText(self.configuration().description())
self._thumbnail.reload()
|
[
"jerome@jeromelaheurte.net"
] |
jerome@jeromelaheurte.net
|
8e48cdc36af358ce63b9cee3a6d9027cf929722e
|
a838d4bed14d5df5314000b41f8318c4ebe0974e
|
/sdk/deviceupdate/azure-iot-deviceupdate/setup.py
|
3b4d52e9e304da8944d2b8b5c37ac85830a070fd
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
scbedd/azure-sdk-for-python
|
ee7cbd6a8725ddd4a6edfde5f40a2a589808daea
|
cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a
|
refs/heads/master
| 2023-09-01T08:38:56.188954
| 2021-06-17T22:52:28
| 2021-06-17T22:52:28
| 159,568,218
| 2
| 0
|
MIT
| 2019-08-11T21:16:01
| 2018-11-28T21:34:49
|
Python
|
UTF-8
|
Python
| false
| false
| 2,815
|
py
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
PACKAGE_NAME = "azure-iot-deviceupdate"
PACKAGE_PPRINT_NAME = "Device Update"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + "\n\n" + changelog,
long_description_content_type='text/markdown',
url='https://github.com/Azure/azure-sdk-for-python',
author='Microsoft Corporation',
author_email='adupmdevteam@microsoft.com',
license='MIT License',
zip_safe=False,
classifiers=[
"Development Status :: 4 - Beta",
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'License :: OSI Approved :: MIT License',
],
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.iot',
]),
install_requires=[
'msrest>=0.5.0',
'azure-common~=1.1',
'azure-core>=1.6.0,<2.0.0',
],
extras_require={
":python_version<'3.0'": ['azure-iot-nspkg'],
}
)
|
[
"noreply@github.com"
] |
scbedd.noreply@github.com
|
8e52e5124bb4d7475be9e1aba419b63c10e1da71
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/105/usersdata/188/50874/submittedfiles/av1_3.py
|
0328c1b58fec2bb6085db012bf78c1af432e4c52
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 311
|
py
|
# -*- coding: utf-8 -*-
import math
N1=int(input('Digite o valor do número 1:'))
N2=int(input('Digite o valor do número 2:'))
N3=int(input('Digite o valor do número 3:'))
n=1
b=0
while n>0:
if(n%a)==0 and (n%b)==0 and (n%c)==0:
b=b+n
break
else:
n=n+1
print ('b')
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
0c5b75952bd055ee2807574adfbafd0a1718e38e
|
ab670d6e59ebd4a0c23fa867fb77866d223163da
|
/Python/Problem243.py
|
4e68455830c3935144068fae503f0d467fa66e99
|
[] |
no_license
|
JeromeLefebvre/ProjectEuler
|
18799e85947e378e18839704c349ba770af4a128
|
3f16e5f231e341a471ffde8b0529407090920b56
|
refs/heads/master
| 2020-07-05T02:42:44.844607
| 2014-07-26T01:04:38
| 2014-07-26T01:04:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,847
|
py
|
'''
Problem 243
A positive fraction whose numerator is less than its denominator is called a proper fraction.
For any denominator, d, there will be d−1 proper fractions; for example, with d = 12:
1/12 , 2/12 , 3/12 , 4/12 , 5/12 , 6/12 , 7/12 , 8/12 , 9/12 , 10/12 , 11/12 .
We shall call a fraction that cannot be cancelled down a resilient fraction.
Furthermore we shall define the resilience of a denominator, R(d), to be the ratio of its proper fractions that are resilient; for example, R(12) = 4/11 .
In fact, d = 12 is the smallest denominator having a resilience R(d) < 4/10 .
Find the smallest denominator d, having a resilience R(d) < 15499/94744 .
'''
from projectEuler import primes,phiFromFactors, generateFactors,product
from itertools import combinations
import random
from math import log
def maxExponent(p,maximum):
if maximum <= 1: return 0
return int(log(maximum)/log(p))
def phiFromFactors(factors):
if factors == []: return 0
ph = 1
for p in set(factors):
ph *= p**factors.count(p) - p**(factors.count(p) - 1)
return ph
def genFactors(l = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29], maximum=10**10):
exp = {}
n = maximum
# 1 until
q = random.choice(l)
one = 1
for p in l:
try:
exp[p] = random.randint(one,maxExponent(p,n))
except:
exp[p] = 0
n //= p**exp[p]
if p == q:
one = 0
phi = product([p**exp[p] - p**(exp[p] - 1) for p in l if exp[p] > 0])
return phi, product([p**exp[p] for p in l])
genFactors(maximum=1000)
#892371480
#200560490130
#70274254050
# 10000 -> 36427776000
def problem243():
record = 2*3*5*7*11*13*17*19*23*29*31
record = 70274254050
for i in range(1,10000):
phi, n = genFactors(maximum=record)
r = phi/(n-1)
if r < 15499/94744 and n < record:
record = n
return record
if __name__ == "__main__":
print(problem243())
68916891abcABC
|
[
"jerome.p.lefebvre@gmail.com"
] |
jerome.p.lefebvre@gmail.com
|
06711799efc8419d428058ea3f7582f7a48c0a3e
|
284713c5e6ad6681d2c6c379efc96e5c42833321
|
/DB_SQLITE/04_sql_injection.py
|
ba90ecf3dcc02dcf9e5f32d856ff698089f5add3
|
[] |
no_license
|
alexeysorok/Udemy_Python_2019_YouRa
|
ddfa1f620dcac44cc53958bb88169845072db38e
|
8ebd8483f7927892b346af209b4325c9ae0c7dab
|
refs/heads/master
| 2020-09-27T03:24:08.997040
| 2019-12-07T19:04:17
| 2019-12-07T19:04:17
| 226,417,119
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,097
|
py
|
import sqlite3
conn = sqlite3.connect('users.db')
# cursor.execute('CREATE TABLE users (user_name TEXT,'
# 'user_password TEXT)')
users = [
('jack123', 'asdasdasd'),
('kasdsa', 'asdsadasd'),
('Bio', 'asdasda')
]
# insert_query = "INSERT INTO users VALUES (?, ?);"
user_name = input('Input user name: ')
user_password = input('Input you password: ')
# select_query = f"SELECT * FROM users WHERE user_name = '{user_name}' AND " \
# f"user_password = '{user_password}'"
# правильная запись запроса
select_query = f"SELECT * FROM users WHERE user_name = ? AND user_password = ?"
cursor = conn.cursor()
cursor.execute(select_query, (user_name, user_password))
data = cursor.fetchone()
if data:
print('You are logged in!')
else:
print("Please try again")
conn.commit()
conn.close()
# Итоговая строка выполнения запроса
# SELECT * FROM users WHERE user_name = '{user_name}' AND user_password = '' OR 1=1
# инъекция по парол
# ' or 1=1--'
# -- означает коментарий
|
[
"alexe@W10SSD"
] |
alexe@W10SSD
|
376d81f18957b729df1c5d3158ad6a37aa802021
|
a183a600e666b11331d9bd18bcfe1193ea328f23
|
/pdt/core/migrations/0032_case_revision.py
|
ff8909b6297a1b81d520bff33753bd05266aae58
|
[
"MIT"
] |
permissive
|
AbdulRahmanAlHamali/pdt
|
abebc9cae04f4afa1fc31b87cbf4b981affdca62
|
5c32aab78e48b5249fd458d9c837596a75698968
|
refs/heads/master
| 2020-05-15T07:51:09.877614
| 2015-12-01T18:22:56
| 2015-12-01T18:22:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 410
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0031_auto_20150618_1116'),
]
operations = [
migrations.AddField(
model_name='case',
name='revision',
field=models.CharField(blank=True, max_length=255),
),
]
|
[
"bubenkoff@gmail.com"
] |
bubenkoff@gmail.com
|
c7ef589bf879fb64a6c1ba7225ee9fec2cfe12bb
|
ccd30f827fb3bd4231c59d05e6d61c5963019291
|
/Practice/LeetCode/EverydayPrac/3.py
|
603f49bc17eca59b1792306e56056465b5c878af
|
[] |
no_license
|
anthony20102101/Python_practice
|
d6709e7768baebaa248612e0795dd3e3fa0ae6ba
|
56bb1335c86feafe2d3d82efe68b207c6aa32129
|
refs/heads/master
| 2023-06-10T18:49:11.619624
| 2021-06-27T15:36:10
| 2021-06-27T15:36:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 628
|
py
|
# 给定一个非负索引 k,其中 k ≤ 33,返回杨辉三角的第 k 行。
#
# 在杨辉三角中,每个数是它左上方和右上方的数的和。
#
# 示例:
# 输入: 3
# 输出: [1,3,3,1]
# 进阶:
# 你可以优化你的算法到 O(k) 空间复杂度吗?
# 优化:
# 注意到对第 i+1 行的计算仅用到了第 i 行的数据,因此可以使用滚动数组的思想优化空间复杂度。
# 利用上述公式我们可以在线性时间计算出第 n 行的所有组合数。
#
# 复杂度分析
#
# 时间复杂度:O(rowIndex)。
#
# 空间复杂度:O(1)。不考虑返回值的空间占用。
|
[
"492193947@qq.com"
] |
492193947@qq.com
|
04003011021ed9b70a92bbdc33e87c7af6f9ad9e
|
6fa0c051f742c3f9c99ee2800cd132db5ffb28c7
|
/src/account/migrations/0008_auto_20200806_2318.py
|
03591749cf0b1a71c7d4f20b7e81927a0e349322
|
[] |
no_license
|
MCN10/NXTLVL
|
9c37bf5782bfd8f24d0fb0431cb5885c585369b0
|
76d8818b7961e4f0362e0d5f41f48f53ce1bfdc5
|
refs/heads/main
| 2023-06-02T13:51:34.432668
| 2021-06-02T14:19:21
| 2021-06-02T14:19:21
| 328,625,042
| 1
| 0
| null | 2021-06-16T10:16:17
| 2021-01-11T10:19:44
|
Python
|
UTF-8
|
Python
| false
| false
| 611
|
py
|
# Generated by Django 3.0.8 on 2020-08-06 23:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0007_account_is_axisstaff'),
]
operations = [
migrations.AlterField(
model_name='account',
name='phone',
field=models.CharField(max_length=200, null=True, verbose_name='Phone Number'),
),
migrations.AlterField(
model_name='account',
name='username',
field=models.CharField(max_length=30, verbose_name='Full Name'),
),
]
|
[
"mcn10.foxx@gmail.com"
] |
mcn10.foxx@gmail.com
|
3d0e54171f99c5973fdb441873c4d1302c56d070
|
86d499787fb35024db798b0c1dbfa7a6936854e9
|
/py_tools/example/TV.py
|
4ec42492467c2b2392e40b3209a8fa35afb1b711
|
[] |
no_license
|
Tomtao626/python-note
|
afd1c82b74e2d3a488b65742547f75b49a11616e
|
e498e1e7398ff66a757e161a8b8c32c34c38e561
|
refs/heads/main
| 2023-04-28T08:47:54.525440
| 2023-04-21T17:27:25
| 2023-04-21T17:27:25
| 552,830,730
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,095
|
py
|
class TV:
def __init__(self):
self.channel = 1 # Default channel is 1
self.volumeLevel = 1 # Default volume level is 1
self.on = False # By default TV is off
def turnOn(self):
self.on = True
def turnOff(self):
self.on = False
def getChannel(self):
return self.channel
def setChannel(self, channel):
if self.on and 1 <= self.channel <= 120:
self.channel = channel
def getVolumeLevel(self):
return self.volumeLevel
def setVolume(self, volumeLevel):
if self.on and \
1 <= self.volumeLevel <= 7:
self.volumeLevel = volumeLevel
def channelUp(self):
if self.on and self.channel < 120:
self.channel += 1
def channelDown(self):
if self.on and self.channel > 1:
self.channel -= 1
def volumeUp(self):
if self.on and self.volumeLevel < 7:
self.volumeLevel += 1
def volumeDown(self):
if self.on and self.volumeLevel > 1:
self.volumeLevel -= 1
|
[
"gogs@fake.local"
] |
gogs@fake.local
|
932c777e82f9b34d9552ecd5044100328cbcb95c
|
a140fe192fd643ce556fa34bf2f84ddbdb97f091
|
/.history/class스타크래프트프로젝트전반전_20200709105346.py
|
18786ac5676fbf66cb3c84a0ce24b862c00e37aa
|
[] |
no_license
|
sangha0719/py-practice
|
826f13cb422ef43992a69f822b9f04c2cb6d4815
|
6d71ce64bf91cc3bccee81378577d84ba9d9c121
|
refs/heads/master
| 2023-03-13T04:40:55.883279
| 2021-02-25T12:02:04
| 2021-02-25T12:02:04
| 342,230,484
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,135
|
py
|
# 일반 유닛
class Unit:
def __init__(self, name, hp, speed):
self.name = name
self.hp = hp
self.speed = speed
print("{0} 유닛이 생성되었습니다.".format(name))
def move(self, location):
print("[지상 유닛 이동]")
print("{0} : {1} 방향으로 이동합니다. [속도 {2}]"
.format(self.name, location, self.speed))
def damaged(self, damage):
print("{0} : {1} 데미지를 입었습니다.".format(self.name, damage))
self.hp -= damage
print("{0} : 현재 체력은 {1}입니다.".format(self.name, self.hp))
if self.hp <= 0:
print("{0} : 파괴되었습니다.".format(self.name))
# 공격 유닛
class AttackUnit(Unit):
def __init__(self, name, hp, speed, damage):
Unit.__init__(self, name, hp, speed)
self.damage = damage
def attack(self, location): # 클래스 내에서 메소드 앞에는 항상 self를 적어주어야 한다.
print("{0} : {1} 방향으로 적군을 공격 합니다. [공격력 {2}]"
.format(self.name, location, self.damage))
# 마린
class Marine(AttackUnit):
def __init__(self):
AttackUnit.__init__(self, "마린", 40, 1, 5)
# 스팀팩 : 일정 시간 동안 이동 및 공격 속도를 증가, 체력 10 감소
def stimpack(self):
if self.hp > 10:
self.hp -= 10
print("{0} : 스팀팩을 사용합니다. (HP 10 감소)".format(self.name))
else:
print("{0} : 체력이 부족하여 스팀팩을 사용하지 않습니다.".format(self.name))
# 탱크
class Tank(AttackUnit):
# 시즈모드 : 탱크를 지상에 고정시켜, 더 높은 파워로 공격 가능. 이동 불가.
seize_developed = False # 시즈모드 개발 여부
def __init__(self):
AttackUnit.__init__(self, "탱크", 150, 1, 35)
self.seize_mode = False
def set_seize_mode(self):
if Tank.seize_developed == False:
return
# 현재 시즈모드가 아닐 때 -> 시즈모드
if self.seize_mode == False:
print("{0} : 시즈모드로 전환합니다.".format(self.name))
self.damage *= 2
self.seize_mode = True
# 현재 시즈모드일 때 -> 시즈모드 해제
else:
print("{0} : 시즈모드를 해제합니다.".format(self.name))
self.damaged /= 2
self.seize_mode = False
# 날 수 있는 기능을 가진 클래스
class Flyable:
def __init__(self, flying_speed):
self.flying_speed = flying_speed
def fly(self, name, location):
print("{0} : {1} 방향으로 날아갑니다. [속도 {2}]"
.format(name, location, self.flying_speed))
# 공중 공격 유닛 클래스
class FlyableAttackUnit(AttackUnit, Flyable):
def __init__(self, name, hp, damage, flying_speed):
AttackUnit.__init__(self, name, hp, 0, damage) # 지상 speed 0
Flyable.__init__(self, flying_speed)
def move(self, location):
print("[공중 유닛 이동]")
self.fly(self.name, location)
# 레이스
class Wraith(FlyableAttackUnit):
def __init__(self):
FlyableAttackUnit.__init__(self, "레이스", 80, 20, 5)
self.clocked = False # 클로킹 모드 (해제 상태)
def clocking(self):
if self.clocked == True: # 클로킹 모드 -> 모드 해제
print("{0} : 클로킹 모드 해제합니다.".format(self.name))
self.clocked == False
else: # 클로킹 모드 해제 -> 모드 설정
print("{0} : 클로킹 모드 설정합니다.".format(self.name))
self.clocked == True
def game_start():
print("[알림] 새로운 게임을 시작합니다.")
def game_over():
print("Player : gg")
print("[Player] 님이 게임에서 퇴장하셨습니다.")
# 실제 게임 진행
game_start()
# 마린 3기 생성
m1 = Marine()
m2 = Marine()
m3 = Marine()
# 탱크 2기 생성
t1 = Tank()
t2 = Tank()
# 레이스 1기 생성
w1 =
|
[
"sangha0719@gmail.com"
] |
sangha0719@gmail.com
|
cde9e8c41184ee204fbf4f603084f018c667ea9d
|
4cb81903c4d07cd85d9bb8d37eab8ab399a6e276
|
/Array Sequences/Practice Problems/uniquechar.py
|
ac9f2e97f4e67e22a89c814f8168cfb1ea4cb2ad
|
[] |
no_license
|
JitenKumar/Python-for-Algorithms--Data-Structures
|
0011881c8c8558a2e21430afc1aa7d9232392b2c
|
7ee8a3ef287761b00be1907c5bbad35e75c5bfd6
|
refs/heads/master
| 2020-03-18T22:02:47.673814
| 2018-08-05T06:07:17
| 2018-08-05T06:07:17
| 135,320,756
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 477
|
py
|
'''
Unique Characters in String
Problem
Given a string,determine if it is compreised of all unique characters. For example, the string 'abcde' has all unique characters and should return True. The string 'aabcde' contains duplicate characters and should return false.
'''
# solution
def unique_char(string):
s = set()
for i in string:
if i in s:
return False
else:
s.add(i)
return True
print(unique_char('ABCDEFGHI'))
|
[
"jitenderpalsra@gmail.com"
] |
jitenderpalsra@gmail.com
|
92ec87d6a1f183a10a48e5bb65076fbca52c2d3c
|
55c250525bd7198ac905b1f2f86d16a44f73e03a
|
/Python/Lazymux/routersploit/tests/creds/routers/netcore/test_ssh_default_creds.py
|
2d81b6e62845318c12ca1f5a515d18860390ca6d
|
[] |
no_license
|
NateWeiler/Resources
|
213d18ba86f7cc9d845741b8571b9e2c2c6be916
|
bd4a8a82a3e83a381c97d19e5df42cbababfc66c
|
refs/heads/master
| 2023-09-03T17:50:31.937137
| 2023-08-28T23:50:57
| 2023-08-28T23:50:57
| 267,368,545
| 2
| 1
| null | 2022-09-08T15:20:18
| 2020-05-27T16:18:17
| null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:029aa8eb5f4144f599d8be14416b93b3a2e19b768949e76237a74d16a341aaf0
size 634
|
[
"nateweiler84@gmail.com"
] |
nateweiler84@gmail.com
|
922539ebe02f2df53fc16aea241cff2fb0df5b23
|
795df757ef84073c3adaf552d5f4b79fcb111bad
|
/i4lib/i4vec_sorted_unique.py
|
eea4deabcf5c5276323c987e360cb3e95fd03a39
|
[] |
no_license
|
tnakaicode/jburkardt-python
|
02cb2f9ba817abf158fc93203eb17bf1cb3a5008
|
1a63f7664e47d6b81c07f2261b44f472adc4274d
|
refs/heads/master
| 2022-05-21T04:41:37.611658
| 2022-04-09T03:31:00
| 2022-04-09T03:31:00
| 243,854,197
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,288
|
py
|
#! /usr/bin/env python
#
def i4vec_sorted_unique ( n, a ):
#*****************************************************************************80
#
## I4VEC_SORTED_UNIQUE finds the unique elements in a sorted I4VEC.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 29 February 2016
#
# Author:
#
# John Burkardt
#
# Parameters:
#
# Input, integer N, the number of elements in A.
#
# Input, integer A(N), the sorted integer array.
#
# Output, integer N_UNIQUE, the number of unique elements in A.
#
# Output, integer A_UNIQUE[N_UNIQUE], the unique elements.
#
import numpy as np
from i4vec_sorted_unique_count import i4vec_sorted_unique_count
if ( n <= 0 ):
n_unique = 0
a_unique = np.zeros ( 0 )
return n_unique, a_unique
n_unique = i4vec_sorted_unique_count ( n, a )
a_unique = np.zeros ( n_unique, dtype = np.int32 )
k = 0
a_unique[0] = a[0];
for i in range ( 1, n ):
if ( a[i] != a_unique[k] ):
k = k + 1
a_unique[k] = a[i]
return n_unique, a_unique
def i4vec_sorted_unique_test ( ):
#*****************************************************************************80
#
## I4VEC_SORTED_UNIQUE_TEST tests I4VEC_SORTED_UNIQUE.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 29 February 2016
#
# Author:
#
# John Burkardt
#
import platform
from i4vec_print import i4vec_print
from i4vec_sort_heap_a import i4vec_sort_heap_a
from i4vec_uniform_ab import i4vec_uniform_ab
n = 20
b = 0
c = n
print ( '' )
print ( 'I4VEC_SORTED_UNIQUE_TEST' )
print ( ' Python version: %s' % ( platform.python_version ( ) ) )
print ( ' I4VEC_SORTED_UNIQUE finds unique entries in a sorted array.' )
seed = 123456789
a, seed = i4vec_uniform_ab ( n, b, c, seed )
a = i4vec_sort_heap_a ( n, a )
i4vec_print ( n, a, ' Input vector:' )
unique_num, a_unique = i4vec_sorted_unique ( n, a )
i4vec_print ( unique_num, a_unique, ' Unique entries:' )
#
# Terminate.
#
print ( '' )
print ( 'I4VEC_SORTED_UNIQUE_TEST' )
print ( ' Normal end of execution.' )
return
if ( __name__ == '__main__' ):
from timestamp import timestamp
timestamp ( )
i4vec_sorted_unique_test ( )
timestamp ( )
|
[
"tnakaicode@gmail.com"
] |
tnakaicode@gmail.com
|
827e8cc5f49718946538d832c5f3d61d6eebdca7
|
568345ee64e3e283a916af372a40b34b595d6ff3
|
/utils/lldb-dotest/lldb-dotest.in
|
cc6ea350654a205aae889195f9e92b114c284d36
|
[
"NCSA",
"Apache-2.0",
"LLVM-exception"
] |
permissive
|
enterstudio/swift-lldb
|
b16fb3f067da3933af0fb1024630f7066b38a7ef
|
af85d636d230da2460f91938b1ff734b0fb64b42
|
refs/heads/stable
| 2020-04-27T01:43:35.935989
| 2019-03-05T01:43:09
| 2019-03-05T01:43:09
| 173,973,645
| 2
| 0
|
Apache-2.0
| 2019-03-05T15:37:31
| 2019-03-05T15:37:26
| null |
UTF-8
|
Python
| false
| false
| 453
|
in
|
#!/usr/bin/env python
import os
import subprocess
import sys
dotest_path = '@LLDB_SOURCE_DIR@/test/dotest.py'
dotest_args_str = '@LLDB_DOTEST_ARGS@'
if __name__ == '__main__':
wrapper_args = sys.argv[1:]
dotest_args = dotest_args_str.split(';')
# Build dotest.py command.
cmd = [dotest_path, '-q']
cmd.extend(dotest_args)
cmd.extend(wrapper_args)
# Invoke dotest.py and return exit code.
sys.exit(subprocess.call(cmd))
|
[
"jonas@devlieghere.com"
] |
jonas@devlieghere.com
|
a93ec77494af1405eec2e4807036d13cb21449f5
|
a88e486c3be855554e8c9998766869a19a4e0635
|
/coursera/knapsack/greedy.py
|
9fcfd92a3a800c1f6cb2b685445bf70c2456db4b
|
[] |
no_license
|
DXV-HUST-SoICT/Combinatorial-Optimization
|
03559786a36f66f10742e3a0c520a3369e96a065
|
67c326635bb4245e3dd9819ea9704c37bb9635d3
|
refs/heads/master
| 2021-03-17T12:59:51.141027
| 2020-06-09T17:42:41
| 2020-06-09T17:42:41
| 246,992,799
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 412
|
py
|
def greedy_by_avarage_value(items, taken, capacity):
def key(item):
return item.value / item.weight
items.sort(key=key, reverse=True)
value = 0
weight = 0
for i in range(len(items)):
item = items[i]
if weight + item.weight <= capacity:
value += item.weight
weight += item.weight
taken[item.index] = 1
return value, weight, 0
|
[
"vuong.1998@gmail.com"
] |
vuong.1998@gmail.com
|
0a26c1cda5fed23e78e41221fca74d55a0da585f
|
1524720d6480ad0a51b6fd8ff709587455bf4c5d
|
/tums/trunk/source/plugins/DHCP.py
|
7af3eb81f2a7ce65771d5d72723a41f35579e175
|
[] |
no_license
|
calston/tums
|
2bd6d3cac5232d2ccb7e9becfc649e302a310eab
|
b93e3e957ff1da5b020075574942913c8822d12a
|
refs/heads/master
| 2020-07-12T03:46:43.639800
| 2018-05-12T10:54:54
| 2018-05-12T10:54:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,247
|
py
|
import config, os
from Core import Utils
class Plugin(object):
parameterHook = "--dhcp"
parameterDescription = "Reconfigure DHCP"
parameterArgs = ""
autoRun = True
configFiles = [
"/etc/dhcp/dhcpd.conf"
]
def reloadServices(self):
if config.General.get('services', {}).get('dhcp3-server', True):
os.system('/etc/init.d/dhcpd restart')
os.system('update-rc.d dhcp3-server defaults')
else:
os.system('update-rc.d -f dhcp3-server remove')
def writeConfig(self, *a):
lans = Utils.getLanNetworks(config)
extramain = config.DHCP.get('main','')
ips = Utils.getLanIPs(config)
myIp = ips[0]
rev = '.'.join([i for i in reversed(myIp.split('.')[:3])])
ifaces = []
dhcpconf = """# DHCPD config generated by TUMS Configurator
ddns-update-style interim;
default-lease-time 21600;
max-lease-time 21600;
allow booting;
allow bootp;
authoritative;
log-facility local7;
zone %(domain)s. {
primary 127.0.0.1;
}
zone %(rev)s.in-addr.arpa. {
primary 127.0.0.1;
}
option local-pac-server code 252 = text;
option option-66 code 66 = text;
option option-67 code 67 = text;
%(snomConfig)s
%(extramain)s
""" % {
'extramain': extramain,
'domain': config.Domain,
'snomConfig':"""class "snom" {
match if substring (hardware, 1, 3) = 00:04:13 ;
}""",
'rev': rev
}
n = 0
for k,v in lans.items():
myNet = v
myIp = config.EthernetDevices[k].get('ip', '/').split('/')[0]
dhcpConf = config.DHCP.get(k, {})
if not myIp:
# No IP set for this interface (is DHCP itself)
continue
if not config.EthernetDevices[k].get('dhcpserver'):
# Not set to do DHCP
continue
ifaces.append(k)
statics = ""
for ip, hostmac in config.DHCP.get('leases',{}).items():
if Utils.matchIP(myNet, ip):
# make sure the IP is in this network
host, mac = hostmac
statics += """ host %s {
fixed-address %s;
hardware ethernet %s;
}\n""" % (host, ip, mac)
myNetmask = Utils.cidr2netmask(myNet.split('/')[1])
rangeStart = dhcpConf.get('rangeStart', "100")
rangeEnd = dhcpConf.get('rangeEnd', "240")
snomRangeStart = dhcpConf.get('snomStart', "60")
snomRangeEnd = dhcpConf.get('snomEnd', "80")
snomConfigAddr = dhcpConf.get('snomConfigAddr', myIp + ':9682')
noRange = dhcpConf.get('noRange', False)
netmask = dhcpConf.get('netmask', myNetmask)
netbios = dhcpConf.get('netbios', myIp)
nameserver = dhcpConf.get('nameserver', myIp)
router = dhcpConf.get('gateway', myIp)
myNet = dhcpConf.get('network', Utils.getNetwork(config.EthernetDevices[k]['ip']))
domain = dhcpConf.get('domain', config.Domain)
if not '/' in myNet:
# AAAAAAAAAAAARGH GOD DAMN DIE IN HELL PAUL VIXIE
cdr = Utils.netmask2cidr(netmask)
myNet = "%s/%s" % (myNet, cdr)
bcast = Utils.getBroadcast(myNet)
else:
bcast = Utils.getBroadcast(myNet)
# allow custom configuration options
custom = dhcpConf.get('custom', '')
netL = '.'.join(myNet.split('.')[:3])
if not ("." in rangeStart):
rangeStart = "%s.%s" % (netL, rangeStart)
rangeEnd = "%s.%s" % (netL, rangeEnd)
if not ("." in snomRangeStart):
snomRangeStart = "%s.%s" % (netL, snomRangeStart)
snomRangeEnd = "%s.%s" % (netL, snomRangeEnd)
snomConfig = ""
if dhcpConf.get('autoProv', True):
snomConfig = """
pool {
allow members of "snom";
range dynamic-bootp %(rangeStart)s %(rangeEnd)s;
option option-66 "http://%(configURL)s";
option option-67 "snom/snom.htm";
filename "snom/snom.htm";
}""" % {
'configURL': snomConfigAddr,
'rangeStart': snomRangeStart,
'rangeEnd': snomRangeEnd,
}
defn = {
'netname': 'DHCP%s' % k.upper(),
'myIp': myIp,
'pacIp': myIp.replace('.', '-'),
'domain': domain,
'network': netL,
'networkF': myNet.split('/')[0],
'static': statics,
'custom': custom,
'netmask': netmask,
'rangeSpec': 'range dynamic-bootp %s %s;' % (rangeStart, rangeEnd),
'rangeStart': rangeStart,
'rangeEnd': rangeEnd,
'myNetbios': netbios,
'myDns': nameserver,
'myRouter': router,
'extramain': extramain,
'bcast': bcast,
'snomConfig': snomConfig,
}
"""If noRange is specified, don't provide a range in the dhcpd.conf, may be useful for custom configs"""
if noRange:
defn['generalPool'] = ""
else:
defn['generalPool'] = """
pool {
%s
%s
}""" % (
dhcpConf.get('autoProv', True) and 'deny members of "snom";' or '',
defn['rangeSpec']
)
dhcpnet = """
shared-network %(netname)s {
use-host-decl-names on;
option domain-name "%(domain)s";
option domain-name-servers %(myDns)s;
option netbios-name-servers %(myNetbios)s;
option netbios-node-type 8;
option local-pac-server "http://%(myIp)s/wpad-%(pacIp)s.pac";
option ntp-servers %(myIp)s;
option time-servers %(myIp)s;
option log-servers %(myIp)s;
option font-servers %(myIp)s;
option pop-server %(myIp)s;
option smtp-server %(myIp)s;
option x-display-manager %(myIp)s;
subnet %(networkF)s netmask %(netmask)s {
option subnet-mask %(netmask)s;
option broadcast-address %(bcast)s;
option routers %(myRouter)s;
}
%(snomConfig)s
%(generalPool)s
%(static)s
%(custom)s
}\n""" % defn
dhcpconf += dhcpnet
# Check for debianism (goes in /etc/dhcp3)
f = open('/etc/dhcp3/dhcpd.conf', 'wt')
f.write(dhcpconf)
f.close()
f = open('/etc/default/dhcp3-server', 'wt')
f.write('# On what interfaces should the DHCP server (dhcpd) serve DHCP requests?\n')
f.write('# Separate multiple interfaces with spaces, e.g. "eth0 eth1".\n')
f.write('INTERFACES="%s"\n' % ' '.join(ifaces))
f.close()
|
[
"junwin@gmail.com"
] |
junwin@gmail.com
|
de495f8070a7258d450001eb321fe83c21087cd2
|
df601ac0a0dd618c75241ca050468cab5f580d3a
|
/kgb/calls.py
|
77111f43f0829d696bd0c66e7999af9890c67e7f
|
[] |
no_license
|
mitchhentges/kgb
|
ff90d7e6c66417ba147ab3e32518d9e4facba256
|
4c7f4361a8050e5426cb23e4a84ee64df25a6c12
|
refs/heads/master
| 2022-12-12T14:50:18.838424
| 2020-09-04T06:57:34
| 2020-09-04T06:57:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,232
|
py
|
"""Call tracking and checks for spiess."""
from __future__ import unicode_literals
from kgb.pycompat import iteritems, text_type
from kgb.signature import FunctionSig
class SpyCall(object):
"""Records arguments made to a spied function call.
SpyCalls are created and stored by a FunctionSpy every time it is
called. They're accessible through the FunctionSpy's ``calls`` attribute.
"""
def __init__(self, spy, args, kwargs):
"""Initialize the call.
Args:
spy (kgb.spies.FunctionSpy):
The function spy that the call was made on.
args (tuple):
A tuple of positional arguments from the spy. These correspond
to positional arguments in the function's signature.
kwargs (dict):
A dictionary of keyword arguments from the spy. These
correspond to keyword arguments in the function's signature.
"""
self.spy = spy
self.args = args
self.kwargs = kwargs
self.return_value = None
self.exception = None
def called_with(self, *args, **kwargs):
"""Return whether this call was made with the given arguments.
Not every argument and keyword argument made in the call must be
provided to this method. These can be a subset of the positional and
keyword arguments in the call, but cannot contain any arguments not
made in the call.
Args:
*args (tuple):
The positional arguments made in the call, or a subset of
those arguments (starting with the first argument).
**kwargs (dict):
The keyword arguments made in the call, or a subset of those
arguments.
Returns:
bool:
``True`` if the call's arguments match the provided arguments.
``False`` if they do not.
"""
if len(args) > len(self.args):
return False
if self.args[:len(args)] != args:
return False
pos_args = self.spy._sig.arg_names
if self.spy.func_type in (FunctionSig.TYPE_BOUND_METHOD,
FunctionSig.TYPE_UNBOUND_METHOD):
pos_args = pos_args[1:]
all_args = dict(zip(pos_args, self.args))
all_args.update(self.kwargs)
for key, value in iteritems(kwargs):
if key not in all_args or all_args[key] != value:
return False
return True
def returned(self, value):
"""Return whether this call returned the given value.
Args:
value (object):
The expected returned value from the call.
Returns:
bool:
``True`` if this call returned the given value. ``False`` if it
did not.
"""
return self.return_value == value
def raised(self, exception_cls):
"""Return whether this call raised this exception.
Args:
exception_cls (type):
The expected type of exception raised by the call.
Returns:
bool:
``True`` if this call raised the given exception type.
``False`` if it did not.
"""
return ((self.exception is None and exception_cls is None) or
type(self.exception) is exception_cls)
def raised_with_message(self, exception_cls, message):
"""Return whether this call raised this exception and message.
Args:
exception_cls (type):
The expected type of exception raised by the call.
message (unicode):
The expected message from the exception.
Returns:
bool:
``True`` if this call raised the given exception type and message.
``False`` if it did not.
"""
return (self.exception is not None and
self.raised(exception_cls) and
text_type(self.exception) == message)
def __repr__(self):
return '<SpyCall(args=%r, kwargs=%r, returned=%r, raised=%r>' % (
self.args, self.kwargs, self.return_value, self.exception)
|
[
"christian@beanbaginc.com"
] |
christian@beanbaginc.com
|
d59aaea52583e6a20a8bae86ba53ef71554cb62d
|
64f39ad662546e1f92df4dd2bf7b5ac2f748d39d
|
/octavia_f5/common/constants.py
|
0e1768c388d52b598ce6766ffe5f6d26eec24a41
|
[
"Apache-2.0"
] |
permissive
|
zongzw/python-as3
|
2b5026bec3a2e1bba24d4fae7fc90b7f1f58523a
|
de51773fb2877f4a0988cc655cf4624a3129fd65
|
refs/heads/master
| 2022-11-24T07:58:04.738669
| 2020-07-28T01:12:25
| 2020-07-28T01:12:25
| 283,049,282
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,640
|
py
|
# Copyright 2018 SAP SE
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from octavia_lib.common.constants import *
PROJECT_ID = 'project_id'
BIGIP = 'bigip'
PREFIX_PROJECT = 'project_'
PREFIX_LISTENER = 'listener_'
PREFIX_TLS_LISTENER = 'tls_listener_'
PREFIX_TLS_POOL = 'tls_pool_'
PREFIX_CONTAINER = 'container_'
PREFIX_CERTIFICATE = 'cert_'
PREFIX_POOL = 'pool_'
PREFIX_HEALTH_MONITOR = 'hm_'
PREFIX_LOADBALANCER = 'lb_'
PREFIX_POLICY = 'l7policy_'
PREFIX_WRAPPER_POLICY = 'wrapper_policy_'
PREFIX_NETWORK = 'net_'
PREFIX_IRULE = 'irule_'
PREFIX_MEMBER = 'member_'
PREFIX_SECRET = 'secret_'
APPLICATION_TCP = 'tcp'
APPLICATION_UDP = 'udp'
APPLICATION_HTTP = 'http'
APPLICATION_HTTPS = 'https'
APPLICATION_L4 = 'l4'
APPLICATION_GENERIC = 'generic'
APPLICATION_SHARED = 'shared'
SUPPORTED_APPLICATION_TEMPLATES = (APPLICATION_TCP, APPLICATION_UDP,
APPLICATION_HTTP, APPLICATION_HTTPS,
APPLICATION_L4, APPLICATION_GENERIC,
APPLICATION_SHARED)
SERVICE_TCP = 'Service_TCP'
SERVICE_UDP = 'Service_UDP'
SERVICE_HTTP = 'Service_HTTP'
SERVICE_HTTPS = 'Service_HTTPS'
SERVICE_L4 = 'Service_L4'
SERVICE_GENERIC = 'Service_Generic'
SUPPORTED_SERVICES = (SERVICE_TCP, SERVICE_UDP, SERVICE_HTTP,
SERVICE_HTTPS, SERVICE_L4, SERVICE_GENERIC)
SERVICE_TCP_TYPES = (SERVICE_TCP, SERVICE_GENERIC, SERVICE_HTTP, SERVICE_HTTPS)
SERVICE_HTTP_TYPES = (SERVICE_HTTP, SERVICE_HTTPS)
SINGLE_USE_DH = 'singleUseDh'
STAPLER_OCSP = 'staplerOCSP'
TLS_1_0 = 'tls1_0'
TLS_1_1 = 'tls1_1'
TLS_1_2 = 'tls1_2'
TLS_1_3 = 'tls1_3'
TLS_OPTIONS_SERVER = (SINGLE_USE_DH, STAPLER_OCSP, TLS_1_0, TLS_1_1, TLS_1_2, TLS_1_3)
TLS_OPTIONS_CLIENT = (SINGLE_USE_DH, TLS_1_0, TLS_1_1, TLS_1_2, TLS_1_3)
ROLE_MASTER = 'MASTER'
ROLE_BACKUP = 'BACKUP'
SEGMENT = 'segment'
VIF_TYPE = 'f5'
ESD = 'esd'
RPC_NAMESPACE_CONTROLLER_AGENT = 'f5controller'
DEVICE_OWNER_LISTENER = 'network:' + 'f5listener'
PROFILE_L4 = 'basic'
OPEN = 'OPEN'
FULL = 'FULL'
UP = 'UP'
DOWN = 'DOWN'
DRAIN = 'DRAIN'
NO_CHECK = 'no check'
MAINT = 'MAINT'
F5_NETWORK_AGENT_TYPE = 'F5 Agent'
|
[
"a.zong@f5.com"
] |
a.zong@f5.com
|
034ac87ab3d44c8c5221b639dd2987db0f489445
|
4302fd10583ccff63ff5693bd2ae5903323cb769
|
/curate/migrations/0033_auto_20190224_0315.py
|
9e514c889504231d27382afd8b779b6510c0517c
|
[
"MIT"
] |
permissive
|
ScienceCommons/curate_science
|
1faf742c8de1e9c9180e4d8ec6a7457ad95bb705
|
4e4072e8c000df0d2e80637016f8f0e667f4df54
|
refs/heads/master
| 2022-02-12T19:56:51.730534
| 2022-01-25T16:44:54
| 2022-01-25T16:44:54
| 149,122,317
| 14
| 7
|
MIT
| 2021-03-23T17:27:05
| 2018-09-17T12:32:25
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,466
|
py
|
# Generated by Django 2.1.7 on 2019-02-24 03:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('curate', '0032_auto_20190224_0231'),
]
operations = [
migrations.AddField(
model_name='keyfigure',
name='height',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='keyfigure',
name='thumb_height',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='keyfigure',
name='thumb_width',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='keyfigure',
name='width',
field=models.PositiveIntegerField(default=0),
),
migrations.AlterField(
model_name='keyfigure',
name='image',
field=models.ImageField(height_field=models.PositiveIntegerField(default=0), null=True, upload_to='key_figures/', width_field=models.PositiveIntegerField(default=0)),
),
migrations.AlterField(
model_name='keyfigure',
name='thumbnail',
field=models.ImageField(height_field=models.PositiveIntegerField(default=0), null=True, upload_to='key_figure_thumbnails/', width_field=models.PositiveIntegerField(default=0)),
),
]
|
[
"alex.kyllo@gmail.com"
] |
alex.kyllo@gmail.com
|
46f088c66d64354d6e4a7ddddc6951a6a16cb979
|
7e4ee5b457ac9c85b64661eeedba0ba51b211c68
|
/entities/background.py
|
043381b99de005498e7cbc63c0fda1b10cbdf342
|
[] |
no_license
|
iCodeIN/Maze-Game
|
180ae7dfb2ffc7b8f2868e450b186b41f3ab510a
|
9956bf10f12326307eccff668cbc9cc615c0fee9
|
refs/heads/master
| 2022-12-03T04:00:12.270692
| 2020-08-26T23:43:45
| 2020-08-26T23:43:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
import pygame
import os
import sys
from easy_sdl.tools import *
from easy_sdl.sprite import Sprite
from easy_sdl.sprite import keyUp, keyDown
class Background(Sprite):
def __init__(self):
super().__init__(0, 0, image=path("background.png"))
|
[
"noreply@github.com"
] |
iCodeIN.noreply@github.com
|
3d1adc70cd541480ba5036a9efa4b5fee148a93d
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/YcqAY72nZNPtvofuJ_8.py
|
b8e97a2438ad8150983e02cf9849e462c2089ed7
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 415
|
py
|
def quad_sequence(lst):
#find pattern
difference = [lst[len(lst)-2] - lst[len(lst)-3], lst[len(lst)-1] - lst[len(lst)-2]]
difference_of_difference = difference[1] - difference[0]
#workout
last_num = lst[len(lst)-1]
last_diff = difference[1]
next_nums = []
for _ in range(len(lst)):
last_diff+=difference_of_difference
last_num +=last_diff
next_nums.append(last_num)
return next_nums
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
595d87247e2461ae9ffc07c514269c1026c31b6b
|
d499057c555f2c9217cdfa5052207b67ea54b5cc
|
/server.py
|
d28eb56439d3b0a8e280d4903de84f66fd06cc9b
|
[
"Apache-2.0"
] |
permissive
|
defnngj/movie-website
|
6fe1fcc2571c75dd7f423137833eb46c4ac7db1d
|
d1ffaf209b4c689cd5180b8a8bb1866ad6d0f0e8
|
refs/heads/main
| 2023-08-11T10:37:38.783093
| 2021-09-27T14:39:56
| 2021-09-27T14:39:56
| 410,204,122
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,250
|
py
|
import os
import sqlite3
from flask import g
from flask import Flask
from flask import render_template
app = Flask(__name__)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DATABASE = os.path.join(BASE_DIR, "dev.db")
def connect_db():
return sqlite3.connect(DATABASE)
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(DATABASE)
return db
@app.teardown_appcontext
def close_connection(exception):
db = getattr(g, '_database', None)
if db is not None:
db.close()
def query_db(query, args=(), one=False):
"""
查询db
:param query:
:param args:
:param one:
:return:
"""
cur = get_db().execute(query, args)
rv = cur.fetchall()
cur.close()
return (rv[0] if rv else None) if one else rv
@app.route("/")
def hello_world():
return "<p>Hello, World!</p>"
@app.route("/top")
def move_top():
return "<p>250经典电影</p>"
@app.route("/movie")
def movie_page():
"""
电影页面
:return:
"""
movie_list = query_db('select * from movie')
# for movie in movie_list:
# print("db-data\n", type(movie), movie)
return render_template("hello.html", moves=movie_list)
|
[
"defnngj@gmail.com"
] |
defnngj@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.