blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d870899b9adaefd930cc8e8c6db22b73f7aedb2f | 73a0f661f1423d63e86489d4b2673f0103698aab | /python/oneflow/test/modules/test_global_atleast.py | 72a326ed1d97d4e680e99bd754eb71ec05aa9f58 | [
"Apache-2.0"
] | permissive | Oneflow-Inc/oneflow | 4fc3e081e45db0242a465c4330d8bcc8b21ee924 | 0aab78ea24d4b1c784c30c57d33ec69fe5605e4a | refs/heads/master | 2023-08-25T16:58:30.576596 | 2023-08-22T14:15:46 | 2023-08-22T14:15:46 | 81,634,683 | 5,495 | 786 | Apache-2.0 | 2023-09-14T09:44:31 | 2017-02-11T06:09:53 | C++ | UTF-8 | Python | false | false | 2,566 | py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@autotest(n=1, check_graph=True)
def _test_atleast1d_with_random_data(test_case, placement, sbp):
x = random_tensor(ndim=1, dim0=8).to_global(placement, sbp)
y = random_tensor(ndim=2, dim0=8).to_global(placement, sbp)
out = torch.atleast_1d([x, y])
return out
@autotest(n=1, check_graph=True)
def _test_atleast2d_with_random_data(test_case, placement, sbp):
x = random_tensor(ndim=1, dim0=8).to_global(placement, sbp)
y = random_tensor(ndim=2, dim0=8).to_global(placement, sbp)
z = random_tensor(ndim=3, dim0=8).to_global(placement, sbp)
out = torch.atleast_2d([x, y, z])
return out
@autotest(n=1, check_graph=True)
def _test_atleast3d_with_random_data(test_case, placement, sbp):
x = random_tensor(ndim=1, dim0=8).to_global(placement, sbp)
y = random_tensor(ndim=2, dim0=8).to_global(placement, sbp)
z = random_tensor(ndim=3, dim0=8).to_global(placement, sbp)
p = random_tensor(ndim=4, dim0=8).to_global(placement, sbp)
out = torch.atleast_3d([x, y, z, p])
return out
class TestAtLeastModule(flow.unittest.TestCase):
@globaltest
def test_atleast1d_with_random_data(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=1):
_test_atleast1d_with_random_data(test_case, placement, sbp)
@globaltest
def test_atleast2d_with_random_data(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=1):
_test_atleast2d_with_random_data(test_case, placement, sbp)
@globaltest
def test_atleast3d_with_random_data(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=1):
_test_atleast3d_with_random_data(test_case, placement, sbp)
if __name__ == "__main__":
unittest.main()
| [
"noreply@github.com"
] | Oneflow-Inc.noreply@github.com |
24b9244f975fc9f704b84fc1daaf22d63692e52f | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_5/mznsha002/question4.py | 77e703fb5f51759144b184f439d7d5c00edb3207 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,537 | py | # 16 April 2014
# Shaun Muzenda
# Drawing a text based graph based on a user inputed function
import math
def main():
function = input("Enter a function f(x):\n") #asks the user for a given function
x = 0 #initial vaule of x set to 0
y = 0 #initial vaule of x set to 0
for rows in range(10,-11,-1): #the range for the y-axis
for column in range(-10,11,1): #the range for the x-axis
x = column
round_fx = round(eval(function)) #rounds the value of the given value
if round_fx == rows:
print("o", end="") #prints the plotted values as " o's "
if rows == 0 and column == 0 and not rows == round_fx:
print("+", end="")
if column == 0 and not rows == 0 and not rows == round_fx:
print("|", end="") #prints the y-axis using " |'s "
if rows == 0 and not column == 0 and not rows == round_fx:
print("-", end="") #prints the x-axis using " -'s "
else:
if not rows == 0:
if not column == 0:
if not rows == round_fx:
print(" ", end="") #leaves the unplotted parts of the screen blank
print() #prints the graph
main() | [
"jarr2000@gmail.com"
] | jarr2000@gmail.com |
4e328373f1a4f38593046ce6c799df598ee7a871 | a447f89a13573328dc09ebc267a436220cf0b521 | /tests/fixtures/common/models/hl7_v3/ne2008/multicacheschemas/mcci_mt000100_uv01.py | c1dd6b439c8aa23003872beb6f0c9aa12e99e4cf | [
"MIT"
] | permissive | ansFourtyTwo/xsdata | 8260e6dda8cf6e963ddf782b6a22a5dfc9192514 | 525be01d12e4d8abd792969adedcfafcee3fcf9b | refs/heads/master | 2022-11-13T13:25:18.370403 | 2020-07-11T17:54:53 | 2020-07-12T17:31:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,084 | py | from dataclasses import dataclass, field
from typing import List, Optional
from tests.fixtures.common.models.hl7_v3.ne2008.coreschemas.datatypes_base import (
Any,
Cs,
EdExplicit,
EnExplicit,
Ii,
IvlTsExplicit,
Sc,
TelExplicit,
)
from tests.fixtures.common.models.hl7_v3.ne2008.multicacheschemas.coct_mt040203_uv01 import (
CoctMt040203Uv01NotificationParty,
)
from tests.fixtures.common.models.hl7_v3.ne2008.coreschemas.voc import (
CommunicationFunctionType,
EntityClassDevice,
EntityClassOrganization,
EntityClassPlace,
EntityClassRoot,
EntityDeterminer,
NullFlavor,
RoleClassAgent,
RoleClassLocatedEntity,
)
__NAMESPACE__ = "urn:hl7-org:v3"
@dataclass
class McciMt000100Uv01AttentionLine:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar key_word_text:
:ivar value:
:ivar null_flavor:
"""
class Meta:
name = "MCCI_MT000100UV01.AttentionLine"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
key_word_text: Optional[Sc] = field(
default=None,
metadata=dict(
name="keyWordText",
type="Element",
namespace="urn:hl7-org:v3"
)
)
value: Optional[Any] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3"
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
@dataclass
class McciMt000100Uv01EntityRsp:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar id:
:ivar name:
:ivar telecom:
:ivar null_flavor:
:ivar class_code:
:ivar determiner_code:
"""
class Meta:
name = "MCCI_MT000100UV01.EntityRsp"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
id: Optional[Ii] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
required=True
)
)
name: List[EnExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
telecom: List[TelExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
class_code: Optional[EntityClassRoot] = field(
default=None,
metadata=dict(
name="classCode",
type="Attribute",
required=True
)
)
determiner_code: EntityDeterminer = field(
init=False,
default=EntityDeterminer.INSTANCE,
metadata=dict(
name="determinerCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01Organization:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar id:
:ivar name:
:ivar telecom:
:ivar notification_party:
:ivar null_flavor:
:ivar class_code:
:ivar determiner_code:
"""
class Meta:
name = "MCCI_MT000100UV01.Organization"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
id: List[Ii] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=1,
max_occurs=9223372036854775807
)
)
name: List[EnExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
telecom: List[TelExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
notification_party: Optional[CoctMt040203Uv01NotificationParty] = field(
default=None,
metadata=dict(
name="notificationParty",
type="Element",
namespace="urn:hl7-org:v3",
nillable=True
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
class_code: Optional[EntityClassOrganization] = field(
default=None,
metadata=dict(
name="classCode",
type="Attribute",
required=True
)
)
determiner_code: EntityDeterminer = field(
init=False,
default=EntityDeterminer.INSTANCE,
metadata=dict(
name="determinerCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01Place:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar id:
:ivar name:
:ivar telecom:
:ivar null_flavor:
:ivar class_code:
:ivar determiner_code:
"""
class Meta:
name = "MCCI_MT000100UV01.Place"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
id: List[Ii] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=1,
max_occurs=9223372036854775807
)
)
name: List[EnExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
telecom: List[TelExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
class_code: Optional[EntityClassPlace] = field(
default=None,
metadata=dict(
name="classCode",
type="Attribute",
required=True
)
)
determiner_code: EntityDeterminer = field(
init=False,
default=EntityDeterminer.INSTANCE,
metadata=dict(
name="determinerCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01Agent:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar represented_organization:
:ivar null_flavor:
:ivar class_code:
"""
class Meta:
name = "MCCI_MT000100UV01.Agent"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
represented_organization: Optional[McciMt000100Uv01Organization] = field(
default=None,
metadata=dict(
name="representedOrganization",
type="Element",
namespace="urn:hl7-org:v3",
nillable=True
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
class_code: Optional[RoleClassAgent] = field(
default=None,
metadata=dict(
name="classCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01LocatedEntity:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar location:
:ivar null_flavor:
:ivar class_code:
"""
class Meta:
name = "MCCI_MT000100UV01.LocatedEntity"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
location: Optional[McciMt000100Uv01Place] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
nillable=True
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
class_code: Optional[RoleClassLocatedEntity] = field(
default=None,
metadata=dict(
name="classCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01RespondTo:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar telecom:
:ivar entity_rsp:
:ivar null_flavor:
:ivar type_code:
"""
class Meta:
name = "MCCI_MT000100UV01.RespondTo"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
telecom: Optional[TelExplicit] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3"
)
)
entity_rsp: List[McciMt000100Uv01EntityRsp] = field(
default_factory=list,
metadata=dict(
name="entityRsp",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=1,
max_occurs=9223372036854775807,
nillable=True
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
type_code: Optional[CommunicationFunctionType] = field(
default=None,
metadata=dict(
name="typeCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01Device:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar id:
:ivar name:
:ivar desc:
:ivar existence_time:
:ivar telecom:
:ivar manufacturer_model_name:
:ivar software_name:
:ivar as_agent:
:ivar as_located_entity:
:ivar null_flavor:
:ivar class_code:
:ivar determiner_code:
"""
class Meta:
name = "MCCI_MT000100UV01.Device"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
id: List[Ii] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=1,
max_occurs=9223372036854775807
)
)
name: List[EnExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
desc: Optional[EdExplicit] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3"
)
)
existence_time: Optional[IvlTsExplicit] = field(
default=None,
metadata=dict(
name="existenceTime",
type="Element",
namespace="urn:hl7-org:v3"
)
)
telecom: List[TelExplicit] = field(
default_factory=list,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
manufacturer_model_name: Optional[Sc] = field(
default=None,
metadata=dict(
name="manufacturerModelName",
type="Element",
namespace="urn:hl7-org:v3"
)
)
software_name: Optional[Sc] = field(
default=None,
metadata=dict(
name="softwareName",
type="Element",
namespace="urn:hl7-org:v3"
)
)
as_agent: Optional[McciMt000100Uv01Agent] = field(
default=None,
metadata=dict(
name="asAgent",
type="Element",
namespace="urn:hl7-org:v3",
nillable=True
)
)
as_located_entity: List[McciMt000100Uv01LocatedEntity] = field(
default_factory=list,
metadata=dict(
name="asLocatedEntity",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807,
nillable=True
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
class_code: Optional[EntityClassDevice] = field(
default=None,
metadata=dict(
name="classCode",
type="Attribute",
required=True
)
)
determiner_code: EntityDeterminer = field(
init=False,
default=EntityDeterminer.INSTANCE,
metadata=dict(
name="determinerCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01Receiver:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar telecom:
:ivar device:
:ivar null_flavor:
:ivar type_code:
"""
class Meta:
name = "MCCI_MT000100UV01.Receiver"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
telecom: Optional[TelExplicit] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3"
)
)
device: Optional[McciMt000100Uv01Device] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
required=True
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
type_code: Optional[CommunicationFunctionType] = field(
default=None,
metadata=dict(
name="typeCode",
type="Attribute",
required=True
)
)
@dataclass
class McciMt000100Uv01Sender:
"""
:ivar realm_code:
:ivar type_id:
:ivar template_id:
:ivar telecom:
:ivar device:
:ivar null_flavor:
:ivar type_code:
"""
class Meta:
name = "MCCI_MT000100UV01.Sender"
realm_code: List[Cs] = field(
default_factory=list,
metadata=dict(
name="realmCode",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
type_id: Optional[Ii] = field(
default=None,
metadata=dict(
name="typeId",
type="Element",
namespace="urn:hl7-org:v3"
)
)
template_id: List[Ii] = field(
default_factory=list,
metadata=dict(
name="templateId",
type="Element",
namespace="urn:hl7-org:v3",
min_occurs=0,
max_occurs=9223372036854775807
)
)
telecom: Optional[TelExplicit] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3"
)
)
device: Optional[McciMt000100Uv01Device] = field(
default=None,
metadata=dict(
type="Element",
namespace="urn:hl7-org:v3",
required=True
)
)
null_flavor: Optional[NullFlavor] = field(
default=None,
metadata=dict(
name="nullFlavor",
type="Attribute"
)
)
type_code: Optional[CommunicationFunctionType] = field(
default=None,
metadata=dict(
name="typeCode",
type="Attribute",
required=True
)
)
| [
"tsoulloftas@gmail.com"
] | tsoulloftas@gmail.com |
6f1aaee7126ad0091b8796e50b4f93f0d141418d | 137832600734c4a3a16966bbaba19d3540378f9a | /naft_iipf.py | b6d2632bf5820c4dcaefd1b2a93af7b08c47e4dd | [] | no_license | DidierStevens/DidierStevensSuite | e824354c80f5b7aae4dfb6e55f60178eb9ae208c | 8190354314d6f42c9ddc477a795029dc446176c5 | refs/heads/master | 2023-09-01T20:11:55.341694 | 2023-08-29T10:26:39 | 2023-08-29T10:26:39 | 35,275,445 | 1,670 | 554 | null | 2023-06-04T22:54:40 | 2015-05-08T11:21:00 | Python | UTF-8 | Python | false | false | 16,468 | py | #!/usr/bin/env python
__description__ = 'Network Appliance Forensic Toolkit - IOS Image Parsing Functions'
__author__ = 'Didier Stevens'
__version__ = '0.0.3'
__date__ = '2014/05/03'
"""
Source code put in public domain by Didier Stevens, no Copyright
https://DidierStevens.com
Use at your own risk
History:
2011/12/18: start
2011/12/23: continue
2011/12/28: extract CW_* strings
2012/01/26: NAFT refactoring
2013/03/24: updated Pack to handle ELF file with 7 sections; added ImageUncompressedIDAPro
2014/05/03: version 0.0.3 assign section names (nameIndexString) when string table section is present
Todo:
"""
import struct
import cStringIO
import zipfile
import hashlib
import naft_uf
import naft_impf
class cELFSection:
def __init__(self, data, dataELF):
self.data = data
header = struct.unpack('>IIIIIIIIII', self.data)
self.nameIndex = header[0]
self.nameIndexString = ''
self.type = header[1]
self.flags = header[2]
self.offset = header[4]
self.size = header[5]
if self.offset + self.size <= len(dataELF):
self.sectionData = dataELF[self.offset : self.offset + self.size]
else:
self.sectionData = ''
def GetHeader(self, offset=None, size=None):
result = self.data[0:16]
if offset == None:
result += self.data[16:20]
else:
result += struct.pack('>I', offset)
if size == None:
result += self.data[20:24]
else:
result += struct.pack('>I', size)
result += self.data[24:40]
return result
class cELF:
def __init__(self, data):
self.data = data
self.countSections = 0
self.stringTableIndex = None
self.Parse()
def ParseELFHeader(self):
if len(self.data) < 52:
self.error = 1
return
if self.data[0:4] != '\x7FELF': # ELF MAGIC number
self.error = 2
return
if ord(self.data[4]) != 1: # 32-bit ELF header
self.error = 3
return
if ord(self.data[5]) != 2: # MSB format
self.error = 4
return
header = struct.unpack('>IIIIHHHHHH', self.data[24:52])
self.addressEntry = header[0]
self.programOffset = header[1]
self.sectionOffset = header[2]
if header[4] != 52: # ELF header size
self.error = 5
return
if header[5] != 32: # program header size
self.error = 6
return
if header[6] != 1: # number of program headers
self.error = 7
return
if header[7] != 40: # section header size
self.error = 8
return
self.countSections = header[8]
self.stringTableIndex = header[9]
def GetNullTerminatedString(self, index):
result = ''
while ord(self.data[index]) != 0:
result += self.data[index]
index += 1
return result
def ParseSectionHeaders(self):
if len(self.data) < self.sectionOffset + self.countSections * 40:
self.error = 9
return
self.sections = []
for index in range(self.countSections):
self.sections.append(cELFSection(self.data[self.sectionOffset + index * 40:self.sectionOffset + (index + 1) * 40], self.data))
if self.stringTableIndex == 0:
dSectionNames = {0: '', 1: '.shstrtab', 11: '.text', 17: '.rodata', 25: '.sdata2', 33: '.data', 39: '.sdata', 46: '.sbss', 52: '.bss'}
for oELFSection in self.sections:
if oELFSection.nameIndex in dSectionNames:
oELFSection.nameIndexString = dSectionNames[oELFSection.nameIndex]
else:
for oELFSection in self.sections:
oELFSection.nameIndexString = self.GetNullTerminatedString(self.sections[self.stringTableIndex].offset + oELFSection.nameIndex)
def Parse(self):
self.error = 0
self.ParseELFHeader()
if self.error == 0:
self.ParseSectionHeaders()
def GetHeader(self):
return self.data[0:52]
def GetProgramHeader(self, length):
return self.data[self.programOffset:self.programOffset + 16] + struct.pack('>I', length) + struct.pack('>I', length + 0x10000) + self.data[self.programOffset + 24:self.programOffset + 32]
class cIOSImage:
def __init__(self, data):
self.data = data
self.embeddedMD5 = None
self.imageUncompressedName = None
self.sizeUncompressed = None
self.sizeCompressed = None
self.checksumCompressed = None
self.checksumUncompressed = None
self.calculatedChecksumCompressed = None
self.calculatedChecksumUncompressed = None
self.imageUncompressed = None
self.oCWStrings = None
self.Parse()
@classmethod
def CalcChecksum(cls, data):
sum = 0
index = 0
length = len(data)
while length - index >= 4:
sum += struct.unpack('>I', data[index:index + 4])[0]
if sum > 0xFFFFFFFF:
sum = (sum + 1) & 0xFFFFFFFF
index += 4
# if length - index != 0:
# for x in data[index:]:
# if x != '\x00':
# print('Warning: checksum data remainder not zero (%d)' % ord(x))
return sum
def ExtractEmbeddedMD5(self, data):
index = data.find(naft_impf.cCiscoMagic.STR_FADEFAD1)
if index < 0:
return None
if index + len(naft_impf.cCiscoMagic.STR_FADEFAD1) + 16 > len(data):
return None
return(''.join(['%02x' % ord(x) for x in data[index + len(naft_impf.cCiscoMagic.STR_FADEFAD1):index + len(naft_impf.cCiscoMagic.STR_FADEFAD1) + 16]]))
def ExtractSections(self, oELF):
oSectionHeaderCompressedImage = None
oSectionHeaderEmbeddedMD5 = None
oSectionHeaderCWStrings = None
for oSectionHeader in oELF.sections:
if oSectionHeader.sectionData[0:4] == naft_impf.cCiscoMagic.STR_FEEDFACE:
if oSectionHeaderCompressedImage != None:
print('Error: more than one FEEDFACE section')
self.error = 2
else:
oSectionHeaderCompressedImage = oSectionHeader
elif oSectionHeader.sectionData.find(naft_impf.cCiscoMagic.STR_FADEFAD1) >= 0:
if oSectionHeaderEmbeddedMD5 != None:
print('Error: more than one FADEFAD1 section')
self.error = 3
else:
oSectionHeaderEmbeddedMD5 = oSectionHeader
elif oSectionHeader.sectionData.find(naft_impf.cCiscoMagic.STR_CW_BEGIN) >= 0:
if oSectionHeaderCWStrings != None:
print('Error: more than one CW_ strings section')
self.error = 10
else:
oSectionHeaderCWStrings = oSectionHeader
return (oSectionHeaderCompressedImage, oSectionHeaderEmbeddedMD5, oSectionHeaderCWStrings)
def Parse(self):
self.error = 0
self.oELF = cELF(self.data)
if self.oELF.error != 0:
self.error = 1
print('ELF parsing error %d.' % self.oELF.error)
if self.oELF.error <= 2:
print('This is not an ELF file.')
elif self.oELF.error < 5:
print('This is probably not an ELF file/Cisco IOS image.')
return
self.oSectionHeaderCompressedImage, self.oSectionHeaderEmbeddedMD5, self.oSectionHeaderCWStrings = self.ExtractSections(self.oELF)
if self.oSectionHeaderEmbeddedMD5 != None:
self.embeddedMD5 = self.ExtractEmbeddedMD5(self.oSectionHeaderEmbeddedMD5.sectionData)
if self.oSectionHeaderCWStrings != None:
self.oCWStrings = naft_impf.cCiscoCWStrings(self.oSectionHeaderCWStrings.sectionData)
md5 = hashlib.md5()
index = 0
for oSectionHeader in self.oELF.sections:
if index != 3 and index != 4:
md5.update(oSectionHeader.sectionData)
index += 1
self.calculatedMD5 = md5.hexdigest()
if self.oSectionHeaderCompressedImage == None:
print('MAGIC number FEEDFACE not found')
self.error = 4
return
self.sizeUncompressed, self.sizeCompressed, self.checksumCompressed, self.checksumUncompressed = struct.unpack('>IIII', self.oSectionHeaderCompressedImage.sectionData[len(naft_impf.cCiscoMagic.STR_FEEDFACE):len(naft_impf.cCiscoMagic.STR_FEEDFACE) + 4*4])
zipData = self.oSectionHeaderCompressedImage.sectionData[len(naft_impf.cCiscoMagic.STR_FEEDFACE) + 4*4:len(naft_impf.cCiscoMagic.STR_FEEDFACE) + 4*4 + self.sizeCompressed]
self.calculatedChecksumCompressed = cIOSImage.CalcChecksum(zipData)
try:
oZipFile = zipfile.ZipFile(cStringIO.StringIO(zipData))
try:
names = oZipFile.namelist()
except:
self.error = 6
print('Error retrieving ZIP namelist')
oZipFile = None
except:
self.error = 5
print('Error parsing ZIP section')
oZipFile = None
if oZipFile != None:
if len(names) == 0:
self.error = 7
print('Error: no file found in ZIP')
elif len(names) == 1:
self.imageUncompressedName = names[0]
else:
self.error = 8
print('More than one file found in ZIP')
print(','.join(names))
if self.imageUncompressedName != None:
try:
self.imageUncompressed = oZipFile.open(self.imageUncompressedName).read()
except:
self.error = 9
print('Error decompressing ZIP section')
if self.imageUncompressed != None:
self.calculatedChecksumUncompressed = cIOSImage.CalcChecksum(self.imageUncompressed)
def Print(self):
if self.oCWStrings != None and self.oCWStrings.error == '':
for key in ['CW_VERSION', 'CW_FAMILY', 'CW_FEATURE', 'CW_IMAGE', 'CW_SYSDESCR']:
if key in self.oCWStrings.dCWStrings:
print('%s:%s%s' % (key, ' ' * (22 - len(key)), self.oCWStrings.dCWStrings[key]))
if self.oELF.error == 0:
print('Entry point: 0x%08X' % self.oELF.addressEntry)
print('Number of sections: %d' % self.oELF.countSections)
print('Embedded MD5: %s' % naft_uf.cn(self.embeddedMD5))
# print('Calculated MD5: %s' % naft_uf.cn(self.calculatedMD5))
print('Compressed size: %s' % naft_uf.cn(self.sizeCompressed, '%d'))
print('Checksum compressed: %s' % naft_uf.cn(self.checksumCompressed, '0x%08X'))
print('Calculated checksum: %s (%s)' % (naft_uf.cn(self.calculatedChecksumCompressed, '0x%08X'), naft_uf.iif(self.checksumCompressed == self.calculatedChecksumCompressed, 'identical', 'DIFFERENT')))
print('Uncompressed size: %s' % naft_uf.cn(self.sizeUncompressed, '%d'))
print('Image name: %s' % naft_uf.cn(self.imageUncompressedName))
print('Checksum uncompressed: %s' % naft_uf.cn(self.checksumUncompressed, '0x%08X'))
print('Calculated checksum: %s (%s)' % (naft_uf.cn(self.calculatedChecksumUncompressed, '0x%08X'), naft_uf.iif(self.checksumUncompressed == self.calculatedChecksumUncompressed, 'identical', 'DIFFERENT')))
def Compress(self, filenameUncompressedImage, imageUncompressed):
oStringIO = cStringIO.StringIO()
oZipFile = zipfile.ZipFile(oStringIO, 'w')
oZipInfo = zipfile.ZipInfo(filenameUncompressedImage)
oZipInfo.compress_type = zipfile.ZIP_DEFLATED
oZipFile.writestr(oZipInfo, imageUncompressed)
oZipFile.close()
result = oStringIO.getvalue()
oStringIO.close()
result = naft_impf.cCiscoMagic.STR_FEEDFACE + struct.pack('>IIII', len(imageUncompressed), len(result), cIOSImage.CalcChecksum(result), cIOSImage.CalcChecksum(imageUncompressed)) + result
return result
def Pack(self, filenameUncompressedImage, imageUncompressed):
if self.oELF.countSections == 6:
SFX = self.oELF.sections[0].sectionData + self.oELF.sections[1].sectionData + self.oELF.sections[2].sectionData + self.oELF.sections[3].sectionData
imageCompressed = self.Compress(filenameUncompressedImage, imageUncompressed)
imageNew = self.oELF.GetHeader()
imageNew += self.oELF.GetProgramHeader(len(SFX) + len(imageCompressed) + len(self.oELF.sections[4].sectionData))
imageNew += self.oELF.sections[0].GetHeader()
imageNew += self.oELF.sections[1].GetHeader()
imageNew += self.oELF.sections[2].GetHeader()
imageNew += self.oELF.sections[3].GetHeader()
lengthHeaders = len(imageNew) + 2 * len(self.oELF.sections[4].GetHeader())
imageNew += self.oELF.sections[4].GetHeader(lengthHeaders + len(SFX) + len(imageCompressed), len(self.oELF.sections[4].sectionData))
imageNew += self.oELF.sections[5].GetHeader(lengthHeaders + len(SFX), len(imageCompressed))
imageNew += SFX
imageNew += imageCompressed
imageNew += self.oELF.sections[4].sectionData
return imageNew
elif self.oELF.countSections == 7:
SFX = self.oELF.sections[0].sectionData + self.oELF.sections[1].sectionData + self.oELF.sections[2].sectionData + self.oELF.sections[3].sectionData + self.oELF.sections[4].sectionData
imageCompressed = self.Compress(filenameUncompressedImage, imageUncompressed)
imageNew = self.oELF.GetHeader()
imageNew += self.oELF.GetProgramHeader(len(SFX) + len(imageCompressed) + len(self.oELF.sections[5].sectionData))
imageNew += self.oELF.sections[0].GetHeader()
imageNew += self.oELF.sections[1].GetHeader()
imageNew += self.oELF.sections[2].GetHeader()
imageNew += self.oELF.sections[3].GetHeader()
imageNew += self.oELF.sections[4].GetHeader()
lengthHeaders = len(imageNew) + 2 * len(self.oELF.sections[5].GetHeader())
imageNew += self.oELF.sections[5].GetHeader(lengthHeaders + len(SFX) + len(imageCompressed), len(self.oELF.sections[5].sectionData))
imageNew += self.oELF.sections[6].GetHeader(lengthHeaders + len(SFX), len(imageCompressed))
imageNew += SFX
imageNew += imageCompressed
imageNew += self.oELF.sections[5].sectionData
return imageNew
else:
return None
def ImageUncompressedIDAPro(self):
newImage = self.imageUncompressed[0:18] + chr(0) + chr(0x14) + self.imageUncompressed[20:] # Set machine to PowerPC 0x14
return newImage
class cMD5Database():
def __init__(self, directoryCSVFiles):
self.dMD5Database = {}
countDoubles = 0
countMD5EmptyString = 0
for filenameCSV in glob.glob(os.path.join(directoryCSVFiles, '*.csv')):
result = self.AddCSV(filenameCSV)
countDoubles += result[0]
countMD5EmptyString += result[1]
print('%d unique entries in md5 database, %d doubles of which %d empty string' % (len(self.dMD5Database), countDoubles, countMD5EmptyString))
def AddCSV(self, filenameCSV):
countDoubles = 0
countMD5EmptyString = 0
md5EmptyString = hashlib.md5('').hexdigest()
basename = os.path.basename(filenameCSV)
for line in open(filenameCSV, 'r').readlines():
md5hash, filename = line.strip('\n').split(',')
md5hash = md5hash.lower()
if md5hash in self.dMD5Database:
if md5hash == md5EmptyString:
countMD5EmptyString += 1
countDoubles += 1
else:
self.dMD5Database[md5hash] = (basename, filename.strip(' '))
return (countDoubles, countMD5EmptyString)
def Find(self, md5hash):
if md5hash in self.dMD5Database:
return self.dMD5Database[md5hash]
else:
return None, None
| [
"didier.stevens@gmail.com"
] | didier.stevens@gmail.com |
1ac1cec482db208913214198ef20d3d5f9820694 | 88cdfe0809655f9e3fcd460558e861cc01d639a3 | /Scripts/split_data_frontal_ap.py | bb85d686c40632cf01e4bab0fde090fb795f263a | [] | no_license | aguilarmg/cs231n-finalproject | 36bc3d7e03512d64e20c84e3b8bc538894a826f1 | 8d13a7ce59938bde5f46b0006690157ba88305d8 | refs/heads/master | 2020-05-23T19:38:12.810170 | 2020-03-28T12:40:11 | 2020-03-28T12:40:11 | 186,917,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | import csv
train_frontal_ap_labels = []
with open('../Data/csv_files/train_frontal_ap.csv', 'r') as f:
train_frontal_ap_labels = list(csv.reader(f))
num_batches = 5
size_of_batches = (len(train_frontal_ap_labels)-1) // 5
for idx in range(num_batches):
with open('../Data/csv_files/train_frontal_ap_'+str(idx)+'.csv', 'w') as fp:
writer = csv.writer(fp)
writer.writerow(train_frontal_ap_labels[0])
#print(len(train_frontal_ap_labels))
train_frontal_ap_labels.pop(0)
#print(len(train_frontal_ap_labels))
for idx, patient in enumerate(train_frontal_ap_labels):
if idx < size_of_batches:
with open('../Data/csv_files/train_frontal_ap_0.csv', 'a') as fp_0:
writer = csv.writer(fp_0)
writer.writerow(patient)
elif idx < size_of_batches*2:
with open('../Data/csv_files/train_frontal_ap_1.csv', 'a') as fp_1:
writer = csv.writer(fp_1)
writer.writerow(patient)
elif idx < size_of_batches*3:
with open('../Data/csv_files/train_frontal_ap_2.csv', 'a') as fp_2:
writer = csv.writer(fp_2)
writer.writerow(patient)
elif idx < size_of_batches*4:
with open('../Data/csv_files/train_frontal_ap_3.csv', 'a') as fp_3:
writer = csv.writer(fp_3)
writer.writerow(patient)
elif idx < size_of_batches*5:
with open('../Data/csv_files/train_frontal_ap_4.csv', 'a') as fp_4:
writer = csv.writer(fp_4)
writer.writerow(patient)
| [
"google-dl-platform@googlegroups.com"
] | google-dl-platform@googlegroups.com |
39d31f29a3f55b5250f1df043bf9e09c2ea6007e | 8a96b57301ae04d40a32aa194c7680dc853ff767 | /carts/migrations/0001_initial.py | 111ebb7816b3e38d018bc59c388b83fca28fe413 | [] | no_license | katalyzator/internetShop | 14b39f2b8402d30beeeb0b26f4fa108dfa3ddb09 | 3f740b63e481bd620c23124a973e657fd35a447f | refs/heads/master | 2020-09-26T09:09:29.995231 | 2016-08-22T09:00:32 | 2016-08-22T09:00:32 | 66,252,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 933 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-07-29 14:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('products', '0007_auto_20160728_1439'),
]
operations = [
migrations.CreateModel(
name='Cart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('total', models.DecimalField(decimal_places=2, default=0.0, max_digits=100)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('active', models.BooleanField(default=True)),
('products', models.ManyToManyField(blank=True, null=True, to='products.Product')),
],
),
]
| [
"web.coder96@gmail.com"
] | web.coder96@gmail.com |
154a721d1bf5981c9018b42e71a3432e7e4613b7 | 1ebe5a07e7f6260c2c2ceb6ca00dcf2a0341e544 | /op_impl/built-in/ai_core/tbe/impl/squared_difference.py | 3c61e8f9c8388ecf1d55dedb175f39176f47270c | [] | no_license | gekowa/ascend-opp | f5e09905336d85f9974d555d03d37a75cb8185c1 | 5c28a2faf9d2a117ea6f0923efe35fcd53904dd2 | refs/heads/master | 2023-04-09T12:14:40.337104 | 2021-04-19T23:00:59 | 2021-04-19T23:00:59 | 359,620,865 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,923 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
squared_difference
"""
import te.lang.cce
from te import tvm
from topi import generic
from topi.cce import util
from te.utils.op_utils import refine_shapes_for_broadcast
from te.utils.op_utils import *
SHAPE_SIZE_LIMIT = 2147483648
# pylint: disable=locally-disabled,too-many-locals,invalid-name
@check_op_params(REQUIRED_INPUT, REQUIRED_INPUT, REQUIRED_OUTPUT, KERNEL_NAME)
def squared_difference(x1, x2, y, kernel_name="squared_difference"):
"""
algorithm: squared_difference
calculating data's tf_squared_difference,y= (x - y) * (x - y)
Parameters
----------
x2 : dict
shape and dtype of y input, only support float16, float32
input_dy : dict
shape and dtype of dy input, only support float16, float32
output_x: dict
shape and dtype of output, should be same shape and type as input
kernel_name : str
cce kernel name, default value is squared_difference
Returns
-------
None
"""
shape_x = x1.get("shape")
shape_y = x2.get("shape")
check_shape(shape_x, param_name="x1")
check_shape(shape_y, param_name="x2")
check_list = ["float16", "float32", "int32"]
dtype = x1.get("dtype").lower()
if not dtype in check_list:
raise RuntimeError(
"tf_squared_difference_cce only support float16, float32, int32")
shape_x, shape_y, shape_max = broadcast_shapes(shape_x, shape_y, param_name_input1="x1", param_name_input2="x2")
shape_x, shape_y = refine_shapes_for_broadcast(shape_x, shape_y)
data_x = tvm.placeholder(shape_x, dtype=dtype, name="data_x")
data_y = tvm.placeholder(shape_y, dtype=dtype, name="data_y")
with tvm.target.cce():
shape_x, shape_y, shape_max = broadcast_shapes(shape_x, shape_y, param_name_input1="x1", param_name_input2="x2")
data_x_tmp = te.lang.cce.broadcast(data_x, shape_max)
data_y_tmp = te.lang.cce.broadcast(data_y, shape_max)
data_sub = te.lang.cce.vsub(data_x_tmp, data_y_tmp)
res = te.lang.cce.vmul(data_sub, data_sub)
sch = generic.auto_schedule(res)
config = {"print_ir": False,
"name": kernel_name,
"tensor_list": [data_x, data_y, res]}
te.lang.cce.cce_build_code(sch, config)
| [
"gekowa@gmail.com"
] | gekowa@gmail.com |
152e376b132ed866e8fedfddd64924a5e4c11302 | 259cc507d97bfeff84d21de3a0ab56640676a9eb | /venv1/Lib/site-packages/tensorflow/contrib/kfac/python/ops/utils.py | 5c7f67f6681bbb365381f2fb1d73dbd2619a0c4a | [
"MIT",
"Apache-2.0"
] | permissive | Soum-Soum/Tensorflow_Face_Finder | c3ef71b6f718f6720b80f8760d28b6ca6e11e6d2 | fec6c15d2df7012608511ad87f4b55731bf99478 | refs/heads/master | 2020-03-22T20:31:39.606644 | 2018-07-12T13:47:56 | 2018-07-12T13:47:56 | 140,607,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,287 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.tpu.python.ops import tpu_ops
from tensorflow.contrib.tpu.python.tpu import tpu_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
# Method used for inverting matrices.
POSDEF_INV_METHOD = "cholesky"
POSDEF_EIG_METHOD = "self_adjoint"
def set_global_constants(posdef_inv_method=None):
"""Sets various global constants used by the classes in this module."""
global POSDEF_INV_METHOD
if posdef_inv_method is not None:
POSDEF_INV_METHOD = posdef_inv_method
class SequenceDict(object):
"""A dict convenience wrapper that allows getting/setting with sequences."""
def __init__(self, iterable=None):
self._dict = dict(iterable or [])
def __getitem__(self, key_or_keys):
if isinstance(key_or_keys, (tuple, list)):
return list(map(self.__getitem__, key_or_keys))
else:
return self._dict[key_or_keys]
def __setitem__(self, key_or_keys, val_or_vals):
if isinstance(key_or_keys, (tuple, list)):
for key, value in zip(key_or_keys, val_or_vals):
self[key] = value
else:
self._dict[key_or_keys] = val_or_vals
def items(self):
return list(self._dict.items())
def tensors_to_column(tensors):
"""Converts a tensor or list of tensors to a column vector.
Args:
tensors: A tensor or list of tensors.
Returns:
The tensors reshaped into vectors and stacked on top of each other.
"""
if isinstance(tensors, (tuple, list)):
return array_ops.concat(
tuple(array_ops.reshape(tensor, [-1, 1]) for tensor in tensors), axis=0)
else:
return array_ops.reshape(tensors, [-1, 1])
def column_to_tensors(tensors_template, colvec):
"""Converts a column vector back to the shape of the given template.
Args:
tensors_template: A tensor or list of tensors.
colvec: A 2d column vector with the same shape as the value of
tensors_to_column(tensors_template).
Returns:
X, where X is tensor or list of tensors with the properties:
1) tensors_to_column(X) = colvec
2) X (or its elements) have the same shape as tensors_template (or its
elements)
"""
if isinstance(tensors_template, (tuple, list)):
offset = 0
tensors = []
for tensor_template in tensors_template:
sz = np.prod(tensor_template.shape.as_list(), dtype=np.int32)
tensor = array_ops.reshape(colvec[offset:(offset + sz)],
tensor_template.shape)
tensors.append(tensor)
offset += sz
tensors = tuple(tensors)
else:
tensors = array_ops.reshape(colvec, tensors_template.shape)
return tensors
def kronecker_product(mat1, mat2):
"""Computes the Kronecker product two matrices."""
m1, n1 = mat1.get_shape().as_list()
mat1_rsh = array_ops.reshape(mat1, [m1, 1, n1, 1])
m2, n2 = mat2.get_shape().as_list()
mat2_rsh = array_ops.reshape(mat2, [1, m2, 1, n2])
return array_ops.reshape(mat1_rsh * mat2_rsh, [m1 * m2, n1 * n2])
def layer_params_to_mat2d(vector):
"""Converts a vector shaped like layer parameters to a 2D matrix.
In particular, we reshape the weights/filter component of the vector to be
2D, flattening all leading (input) dimensions. If there is a bias component,
we concatenate it to the reshaped weights/filter component.
Args:
vector: A Tensor or pair of Tensors shaped like layer parameters.
Returns:
A 2D Tensor with the same coefficients and the same output dimension.
"""
if isinstance(vector, (tuple, list)):
w_part, b_part = vector
w_part_reshaped = array_ops.reshape(w_part,
[-1, w_part.shape.as_list()[-1]])
return array_ops.concat(
(w_part_reshaped, array_ops.reshape(b_part, [1, -1])), axis=0)
elif isinstance(vector, ops.IndexedSlices):
return vector
else: # Tensor or Tensor-like.
return array_ops.reshape(vector, [-1, vector.shape.as_list()[-1]])
def mat2d_to_layer_params(vector_template, mat2d):
"""Converts a canonical 2D matrix representation back to a vector.
Args:
vector_template: A Tensor or pair of Tensors shaped like layer parameters.
mat2d: A 2D Tensor with the same shape as the value of
layer_params_to_mat2d(vector_template).
Returns:
A Tensor or pair of Tensors with the same coefficients as mat2d and the same
shape as vector_template.
"""
if isinstance(vector_template, (tuple, list)):
w_part, b_part = mat2d[:-1], mat2d[-1]
return array_ops.reshape(w_part, vector_template[0].shape), b_part
elif isinstance(vector_template, ops.IndexedSlices):
if not isinstance(mat2d, ops.IndexedSlices):
raise TypeError(
"If vector_template is an IndexedSlices, so should mat2d.")
return mat2d
else:
return array_ops.reshape(mat2d, vector_template.shape)
def posdef_inv(tensor, damping):
"""Computes the inverse of tensor + damping * identity."""
identity = linalg_ops.eye(tensor.shape.as_list()[0], dtype=tensor.dtype)
damping = math_ops.cast(damping, dtype=tensor.dtype)
return posdef_inv_functions[POSDEF_INV_METHOD](tensor, identity, damping)
def posdef_inv_matrix_inverse(tensor, identity, damping):
"""Computes inverse(tensor + damping * identity) directly."""
return linalg_ops.matrix_inverse(tensor + damping * identity)
def posdef_inv_cholesky(tensor, identity, damping):
"""Computes inverse(tensor + damping * identity) with Cholesky."""
chol = linalg_ops.cholesky(tensor + damping * identity)
return linalg_ops.cholesky_solve(chol, identity)
def posdef_inv_eig(tensor, identity, damping):
"""Computes inverse(tensor + damping * identity) with eigendecomposition."""
eigenvalues, eigenvectors = linalg_ops.self_adjoint_eig(
tensor + damping * identity)
return math_ops.matmul(
eigenvectors / eigenvalues, eigenvectors, transpose_b=True)
posdef_inv_functions = {
"matrix_inverse": posdef_inv_matrix_inverse,
"cholesky": posdef_inv_cholesky,
"eig": posdef_inv_eig,
}
def posdef_eig(mat):
"""Computes the eigendecomposition of a positive semidefinite matrix."""
return posdef_eig_functions[POSDEF_EIG_METHOD](mat)
def posdef_eig_svd(mat):
"""Computes the singular values and left singular vectors of a matrix."""
evals, evecs, _ = linalg_ops.svd(mat)
return evals, evecs
def posdef_eig_self_adjoint(mat):
"""Computes eigendecomposition using self_adjoint_eig."""
evals, evecs = linalg_ops.self_adjoint_eig(mat)
evals = math_ops.abs(evals) # Should be equivalent to svd approach.
return evals, evecs
posdef_eig_functions = {
"self_adjoint": posdef_eig_self_adjoint,
"svd": posdef_eig_svd,
}
class SubGraph(object):
"""Defines a subgraph given by all the dependencies of a given set of outputs.
"""
def __init__(self, outputs):
# Set of all ancestor Tensors, Ops to 'outputs'.
self._members = set()
self._iter_add(outputs)
def _iter_add(self, root):
"""Iteratively adds all of nodes' ancestors using depth first search."""
stack = [root]
while stack:
nodes = stack.pop()
for node in nodes:
if node in self._members:
continue
self._members.add(node)
if isinstance(node, ops.Tensor):
stack.append((node.op,))
elif isinstance(node, ops.Operation):
stack.append(node.inputs)
def is_member(self, node):
"""Check if 'node' is in this subgraph."""
return node in self._members
def variable_uses(self, var):
"""Computes number of times a variable is used.
Args:
var: Variable or ResourceVariable instance.
Returns:
Number of times a variable is used within this subgraph.
Raises:
ValueError: If 'var' is not a variable type.
"""
if isinstance(var, resource_variable_ops.ResourceVariable):
var = var.handle
elif isinstance(var, variables.Variable):
var = var.value()
else:
raise ValueError("%s does not appear to be a variable." % str(var))
return len(self._members.intersection(set(var.consumers())))
def filter_list(self, node_list):
"""Filters 'node_list' to nodes in this subgraph."""
filtered_list = []
for node in node_list:
if self.is_member(node):
filtered_list.append(node)
return filtered_list
def generate_random_signs(shape, dtype=dtypes.float32):
"""Generate a random tensor with {-1, +1} entries."""
ints = random_ops.random_uniform(shape, maxval=2, dtype=dtypes.int32)
return 2 * math_ops.cast(ints, dtype=dtype) - 1
def fwd_gradients(ys, xs, grad_xs=None, stop_gradients=None):
"""Compute forward-mode gradients."""
# See b/37888268.
# This version of forward-mode autodiff is based on code by Tim Cooijmans
# and handles list arguments and certain special cases such as when the
# ys doesn't depend on one or more of the xs, and when ops.IndexedSlices are
# generated by the first gradients_impl.gradients call.
us = [array_ops.zeros_like(y) + float("nan") for y in ys]
dydxs = gradients_impl.gradients(
ys, xs, grad_ys=us, stop_gradients=stop_gradients)
# Deal with strange types that gradients_impl.gradients returns but can't
# deal with.
dydxs = [
ops.convert_to_tensor(dydx)
if isinstance(dydx, ops.IndexedSlices) else dydx for dydx in dydxs
]
dydxs = [
array_ops.zeros_like(x) if dydx is None else dydx
for x, dydx in zip(xs, dydxs)
]
dysdx = gradients_impl.gradients(dydxs, us, grad_ys=grad_xs)
return dysdx
def on_tpu():
"""Returns True when building a TPU computation."""
return tpu_function.get_tpu_context().number_of_shards is not None
def cross_replica_mean(tensor, name=None):
"""Takes mean value of a Tensor across all TPU cores.
Args:
tensor: Tensor to be synchronized.
name: None or string. Name of Op.
Returns:
Average of Tensor across all TPU cores.
Raises:
ValueError: If called outside of TPU context.
"""
with ops.name_scope(name, "cross_replica_mean", [tensor]):
num_shards = tpu_function.get_tpu_context().number_of_shards
if num_shards is None:
raise ValueError(
"Cannot take cross_replica_mean() outside of TPU Context.")
if num_shards == 1:
return tensor
return tpu_ops.cross_replica_sum(tensor / num_shards)
def ensure_sequence(obj):
"""If `obj` isn't a tuple or list, return a tuple containing `obj`."""
if isinstance(obj, (tuple, list)):
return obj
else:
return (obj,)
def batch_execute(global_step, thunks, batch_size, name=None):
"""Executes a subset of ops per global step.
Given a list of thunks, each of which produces a single stateful op,
ensures that exactly 'batch_size' ops are run per global step. Ops are
scheduled in a round-robin fashion. For example, with 3 ops
global_step | op0 | op1 | op2
------------+-----+-----+-----
0 | x | x |
------------+-----+-----+-----
1 | x | | x
------------+-----+-----+-----
2 | | x | x
------------+-----+-----+-----
3 | x | x |
------------+-----+-----+-----
4 | x | | x
Does not guarantee order of op execution within a single global step.
Args:
global_step: Tensor indicating time. Determines which ops run.
thunks: List of thunks. Each thunk encapsulates one op. Return values are
ignored.
batch_size: int. Number of ops to execute per global_step.
name: string or None. Name scope for newly added ops.
Returns:
List of ops. Exactly 'batch_size' ops are guaranteed to have an effect
every global step.
"""
def true_fn(thunk):
"""Ensures thunk is executed and returns an Op (not a Tensor)."""
def result():
with ops.control_dependencies([thunk()]):
return control_flow_ops.no_op()
return result
def false_fn(_):
"""Executes a no-op."""
def result():
return control_flow_ops.no_op()
return result
with ops.name_scope(name, "batch_execute"):
true_fns = [true_fn(thunk) for thunk in thunks]
false_fns = [false_fn(thunk) for thunk in thunks]
num_thunks = len(thunks)
conditions = [
math_ops.less(
math_ops.mod(batch_size - 1 + global_step * batch_size - j,
num_thunks), batch_size) for j in range(num_thunks)
]
result = [
control_flow_ops.cond(condition, true_fn, false_fn)
for (condition, true_fn,
false_fn) in zip(conditions, true_fns, false_fns)
]
return result
def matmul_sparse_dense(A, B, name=None): # pylint: disable=invalid-name
"""Computes matmul(A, B) where A is sparse, B is dense.
Args:
A: tf.IndexedSlices with dense shape [m, n].
B: tf.Tensor with shape [n, k].
name: str. Name of op.
Returns:
tf.IndexedSlices resulting from matmul(A, B).
Raises:
ValueError: If A doesn't represent a matrix.
ValueError: If B is not rank-2.
"""
with ops.name_scope(name, "matmul_sparse_dense", [A, B]):
if A.indices.shape.ndims != 1 or A.values.shape.ndims != 2:
raise ValueError("A must represent a matrix. Found: %s." % A)
if B.shape.ndims != 2:
raise ValueError("B must be a matrix.")
new_values = math_ops.matmul(A.values, B)
return ops.IndexedSlices(
new_values,
A.indices,
dense_shape=array_ops.stack([A.dense_shape[0], new_values.shape[1]]))
def matmul_diag_sparse(A_diag, B, name=None): # pylint: disable=invalid-name
"""Computes matmul(A, B) where A is a diagonal matrix, B is sparse.
Args:
A_diag: diagonal entries of matrix A of shape [m, m].
B: tf.IndexedSlices. Represents matrix of shape [m, n].
name: str. Name of op.
Returns:
tf.IndexedSlices resulting from matmul(A, B).
Raises:
ValueError: If A_diag is not rank-1.
ValueError: If B doesn't represent a matrix.
"""
with ops.name_scope(name, "matmul_diag_sparse", [A_diag, B]):
A_diag = ops.convert_to_tensor(A_diag)
if A_diag.shape.ndims != 1:
raise ValueError("A_diag must be a rank-1 Tensor.")
if B.indices.shape.ndims != 1 or B.values.shape.ndims != 2:
raise ValueError("B must represent a matrix. Found: %s." % B)
a = array_ops.gather(A_diag, B.indices)
a = array_ops.reshape(a, list(a.shape) + [1] * (B.values.shape.ndims - 1))
return ops.IndexedSlices(a * B.values, B.indices, dense_shape=B.dense_shape)
# TODO(b/69623235): Add a function for finding tensors that share gradients
# to eliminate redundant fisher factor computations.
| [
"pes.carceller@gmail.com"
] | pes.carceller@gmail.com |
8b6857e0576004fd1ba9bedb211c06e86d3844e6 | c0631f0c4f02f2ed750ec4c48341f87885fbe4ff | /GRACE_Loading/grace_loading_driver.py | d906a0bf0d752aa5b3086ef63dbe64ad78016336 | [] | no_license | whigg/GRACE_loading | bdac4ee5a0087d219ef47813f87170d90bde446d | a5ef26c35854d30a3a0e61fd471675ac519d2e3d | refs/heads/master | 2023-03-21T11:00:34.187732 | 2021-03-11T23:00:46 | 2021-03-11T23:00:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,402 | py | #!/usr/bin/env python
"""
EVALUATE GRACE LOADS ON GPS STATIONS
Written by Kathryn Materna, 2017
This program takes all GRACE gravity loads (MASCON or TELLUS) within a certain distance from station,
and computes the loading effect from each cell.
You can choose:
-to compute loads on a PREM spherical earth structure.
-to use which GRACE solution (JPL, GFZ, CSR, Mascons).
-to use the scaling grid (1) or not use scaling grid (0).
Read in station information for your network: name, lon, lat, T1, T2.
For each station,
For each timestep from T1 to T2,
compute 3D loading displacement
"""
import argparse
from GRACE_Loading_Code import parse_configfile
from GRACE_Loading_Code import prem_earth
def welcome_and_parse():
print("\n\nWelcome to a forward modeling tool for calculating GRACE loading at GPS points. ");
parser = argparse.ArgumentParser(description='Run GRACE load models in Python',
epilog='\U0001f600 \U0001f600 \U0001f600 ');
parser.add_argument('configfile', type=str, help='name of config file for calculation. Required.')
args = parser.parse_args()
print("Config file:", args.configfile);
return args;
if __name__ == "__main__":
# The main driver
args = welcome_and_parse();
params = parse_configfile.configure_calc(args.configfile);
prem_earth.prem_earth_grace_timeseries(params);
| [
"kathrynmaterna@gmail.com"
] | kathrynmaterna@gmail.com |
d1c6dd3c5995200c255e74a3e1ba38467121f7af | d2cffc0a371f9e4d587951755c0eb370ca491d2a | /mod_repair_extended/_build_auto.py | af7c6c36ef4f8b58c68390bccfeaf7ed8a06e153 | [
"WTFPL"
] | permissive | Havenard/spoter-mods | 633d06fa6202a8bd390f642f4847f321b7d3bbb8 | a8c8c143d744d4fe5838fbd92e2f188f5dbffe62 | refs/heads/master | 2020-05-01T08:16:12.138415 | 2019-03-21T17:50:08 | 2019-03-21T17:50:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,245 | py | # -*- coding: utf-8 -*-
import codecs
import datetime
import glob
import json
import os
import re
import shutil
import subprocess
import base64
CLIENT_VERSION = '1.4.1.0'
BUILD = 'auto'
NAME = 'spoter.repair_extended_auto'
ADD_LICENSE = True
class Build(object):
OUT_PATH = '.out'
PYC_PATH = os.path.join(OUT_PATH, 'res', 'scripts', 'client', 'gui', 'mods')
BUILD_PATH = os.path.join('source', BUILD)
VERSION = None
RELEASE = '%s.wotmod' % NAME
DATE = datetime.datetime.now().strftime("%Y-%m-%d")
CONFIG_NAME = None
def __init__(self):
self.clear()
if not os.path.exists('release'): subprocess.check_call(['powershell', 'mkdir', 'release'])
self.readVersion()
self.createFileDict()
self.packWotmod()
self.clear()
print 'created: %s v%s (%s) to %s' % (self.RELEASE, self.VERSION["version"], self.DATE, CLIENT_VERSION)
def clear(self):
try:
shutil.rmtree(self.OUT_PATH, True)
except OSError:
pass
def readVersion(self):
filePath = os.path.join(self.BUILD_PATH, 'VERSION')
with codecs.open(filePath, 'r', encoding='utf-8') as versionFile:
data = versionFile.read().decode('utf-8')
versionFile.close()
self.VERSION = json.loads(data)
def createFileDict(self):
version = '{:.2f}'.format(float(self.VERSION["version"]))
files = []
if self.VERSION["source"]:
files.append((os.path.join(self.BUILD_PATH, self.VERSION["source"]), 'self.version = ', "'v%s (%s)'" % (version, self.DATE)))
files.append((os.path.join(self.BUILD_PATH, self.VERSION["source"]), 'self.version_id = ', re.sub('[.\s]', '', '%s' % version)))
if self.VERSION["meta"]:
files.append((os.path.join(self.BUILD_PATH, self.VERSION["meta"]), '<version>', '%s</version>' % version))
if self.VERSION["config"]:
files.append((os.path.join(self.BUILD_PATH, self.VERSION["config"]), '"version": ', re.sub('[.\s]', '', '%s' % version)))
if self.VERSION["i18n"]:
for path in glob.glob(os.path.join(self.BUILD_PATH, self.VERSION["i18n"], "*.json")):
files.append((path, '"version": ', re.sub('[.\s]', '', '%s' % version)))
for path in files:
self.updateFiles(*path)
def updateFiles(self, path, string, text):
with open(path, 'a+') as xfile:
data = xfile.readlines()
newData = []
for line in data:
if 'self.ids = ' in line:
self.configName = re.split('self.ids = ', line)[1]
if string in line:
newData.append('%s%s%s\n' % (re.split(string, line)[0], string, text))
continue
newData.append(line)
xfile.close()
with open(path, 'w') as xfile:
xfile.writelines(newData)
xfile.close()
def packWotmod(self):
self.RELEASE = '%s_%s.wotmod' % (NAME, '{:.2f}'.format(float(self.VERSION["version"])))
subprocess.check_call(['powershell', 'mkdir', self.PYC_PATH])
py = '%s' % os.path.join(self.BUILD_PATH, self.VERSION["source"])
pyc = '%sc' % self.VERSION["source"]
ps = '%s\%s' % (os.path.realpath(self.OUT_PATH), 'create-7zip.ps1')
metaPath = '%s' % os.path.join(self.BUILD_PATH, os.path.dirname(self.VERSION["meta"]))
metaFile = os.path.basename(self.VERSION["meta"])
subprocess.check_call(['python', '-m', 'compileall', py])
subprocess.call('powershell robocopy %s %s %s /COPYALL /MOV' % (os.path.realpath(self.BUILD_PATH), os.path.realpath(self.PYC_PATH), pyc))
subprocess.call('powershell robocopy %s %s %s /COPYALL' % (os.path.realpath(metaPath), os.path.realpath(self.OUT_PATH), metaFile))
if self.VERSION["resources"]:
for directory in self.VERSION["resources"]:
if os.path.exists(os.path.join(self.BUILD_PATH, directory)):
subprocess.call('powershell robocopy %s %s /COPYALL /E' % (os.path.realpath(os.path.join(self.BUILD_PATH, directory)), os.path.realpath(os.path.join(self.OUT_PATH, 'res', directory))))
with open(ps, 'w') as xfile:
xfile.write('function create-7zip([String] $aDirectory, [String] $aZipfile){ [string]$pathToZipExe = "C:\Program Files\\7-zip\\7z.exe"; [Array]$arguments = "a", "-tzip", "-ssw", "-mx0", "$aZipfile", "$aDirectory"; & $pathToZipExe $arguments; }\n'
'create-7zip "%s" "%s"\n'
'create-7zip "%s" "%s"\n' % (os.path.realpath(os.path.join(self.OUT_PATH, 'res')), os.path.realpath(os.path.join('release', self.RELEASE)),
os.path.realpath(os.path.join(self.OUT_PATH, metaFile)), os.path.realpath(os.path.join('release', self.RELEASE))))
if ADD_LICENSE:
xfile.write('create-7zip "%s" "%s"\n' % (self.createLicense(), os.path.realpath(os.path.join('release', self.RELEASE))))
xfile.close()
subprocess.call('powershell -executionpolicy bypass -command "& {Set-ExecutionPolicy AllSigned; %s; Set-ExecutionPolicy Undefined}"' % ps)
def createLicense(self):
b64 = "DQogICAgICAgIERPIFdIQVQgVEhFIEZVQ0sgWU9VIFdBTlQgVE8gUFVCTElDIExJQ0VOU0UgDQogICAgICAgICAgICAgICAgICAgIFZlcnNpb24gMiwgRGVjZW1iZXIgMjAwNCANCg0KIENvcHlyaWdodCAoQykgMjAwNCBTYW0gSG9jZXZhciA8c2FtQGhvY2V2YXIubmV0PiANCg0KIEV2ZXJ5b25lIGlzIHBlcm1pdHRlZCB0byBjb3B5IGFuZCBkaXN0cmlidXRlIHZlcmJhdGltIG9yIG1vZGlmaWVkIA0KIGNvcGllcyBvZiB0aGlzIGxpY2Vuc2UgZG9jdW1lbnQsIGFuZCBjaGFuZ2luZyBpdCBpcyBhbGxvd2VkIGFzIGxvbmcgDQogYXMgdGhlIG5hbWUgaXMgY2hhbmdlZC4gDQoNCiAgICAgICAgICAgIERPIFdIQVQgVEhFIEZVQ0sgWU9VIFdBTlQgVE8gUFVCTElDIExJQ0VOU0UgDQogICBURVJNUyBBTkQgQ09ORElUSU9OUyBGT1IgQ09QWUlORywgRElTVFJJQlVUSU9OIEFORCBNT0RJRklDQVRJT04gDQoNCiAgMC4gWW91IGp1c3QgRE8gV0hBVCBUSEUgRlVDSyBZT1UgV0FOVCBUTy4NCg=="
output_name = os.path.realpath(os.path.join(self.OUT_PATH, 'LICENSE'))
data = base64.b64decode(b64)
with open(output_name, "wb") as output_file:
output_file.write(data)
output_file.close()
return output_name
build = Build()
| [
"spoter@mail.ru"
] | spoter@mail.ru |
b7cb710949ff1f50d32b039241781affb2175b80 | 8a4933df480d0d834fef74d4e980d31c16a0d05c | /ex002.py | 9663f4186bb5f898599bc41bfee4d7fe3c48a665 | [] | no_license | code-repeat/primeiromundo | e107ae551bf59f53da360ca9d3a1cb756e59756f | 1aaa93c721ad994360ba78a706b00c379b05ec08 | refs/heads/master | 2022-11-23T07:43:12.497109 | 2020-07-28T18:55:30 | 2020-07-28T18:55:30 | 283,300,059 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | nome = input('Qual é o seu nome? ')
print(f"É um prazer te conhecer, {nome}!")
| [
"coderepeat44@gmail.com"
] | coderepeat44@gmail.com |
59473fb7b22ba85c6e7dd56a89249f45908c5ce3 | b4ed708779cab2dc344ca9601ec0d879ab1b6f04 | /indra/tests/test_sif_assembler.py | 911892a93fd959cc6d35cc60252c4792374117b2 | [
"BSD-2-Clause"
] | permissive | budakn/INDRA | e360e17c3de9f2cf9e49f11f003fd2b18ae2cbfc | 393958b2ca7bc1ca5d054885c0634f434ff7496e | refs/heads/master | 2020-03-27T13:15:05.897555 | 2018-08-28T05:24:09 | 2018-08-28T16:31:16 | 146,599,146 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,284 | py | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.assemblers import SifAssembler
from indra.statements import *
def test_simple_assembly():
st1 = Activation(Agent('a'), Agent('b'))
st2 = Inhibition(Agent('a'), Agent('c'))
sa = SifAssembler([st1, st2])
sa.make_model()
assert(len(sa.graph.nodes()) == 3)
assert(len(sa.graph.edges()) == 2)
def test_evidence_assembly():
ev1 = Evidence(pmid='1')
ev2 = Evidence(pmid='2')
ev3 = Evidence(pmid='3')
Evidence(pmid='4')
st1 = Activation(Agent('a'), Agent('b'), evidence=[ev1])
st2 = Inhibition(Agent('a'), Agent('c'), evidence=[ev1, ev2, ev3])
sa = SifAssembler([st1, st2])
sa.make_model()
assert(len(sa.graph.nodes()) == 3)
assert(len(sa.graph.edges()) == 2)
sa.set_edge_weights('support_pmid')
def test_modification():
st1 = Phosphorylation(Agent('BRAF'), Agent('MAP2K1'), 'S', '222')
sa = SifAssembler([st1])
sa.make_model(True, True, True)
assert(len(sa.graph.nodes()) == 2)
assert(len(sa.graph.edges()) == 1)
sa.save_model('test_sif.sif', True)
with open('test_sif.sif', 'rb') as fh:
txt = fh.read().decode('utf-8')
assert txt == 'BRAF 0 MAP2K1\n', txt
| [
"ben.gyori@gmail.com"
] | ben.gyori@gmail.com |
91a2a9ce99746758276cc7f8bafd303e8bdff102 | 1539f86f91ce0ee6150fba7363976d32cd37ece2 | /codes_auto/1544.count-good-nodes-in-binary-tree.py | 4510f4a60ef07892c81e8453597a7c8dca9a27db | [] | no_license | zhpbo/LeetCode_By_Python | fdee0a8b7ea7ed1f61a99f0041e1c748e50f138c | 0017b9db891d36789116f7299d32510a373e68da | refs/heads/master | 2023-07-09T15:38:45.003002 | 2020-08-18T07:04:51 | 2020-08-18T07:04:51 | 281,598,190 | 0 | 0 | null | 2021-08-18T04:58:39 | 2020-07-22T06:47:05 | null | UTF-8 | Python | false | false | 1,169 | py | #
# @lc app=leetcode.cn id=1544 lang=python3
#
# [1544] count-good-nodes-in-binary-tree
#
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def goodNodes(self, root: TreeNode) -> int:
if not root: return 0
self.ans = 0
res = []
dic = set()
def helper(root,tem,dic):
if not root: return
helper(root.left,tem+[root],dic)
helper(root.right,tem+[root],dic)
tem.append(root)
if not root.left and not root.right:
# res.append(tem)
# print(tem)
tem_max = float("-inf")
for i in tem:
# print(tem_max)
if i.val>=tem_max:
tem_max = i.val
if i not in dic:
dic.add(i)
self.ans+=1
return
helper(root,[],dic)
# print(res)
return self.ans
# @lc code=end | [
"liuyang0001@outlook.com"
] | liuyang0001@outlook.com |
c9d6c6348c39a58c856ae3ef6191dcefa82ea589 | bc5dd7be84a43ec53f8e4215761badb9b61a13ad | /kurs_2/newadvito/advito/backend/gallery/models.py | e61ab108c73a146128033a86a8f56c4c15ceb878 | [] | no_license | MaximMak/DL_Academy_Lessons | ef4758be02e43954748031ac95c970077f71cd7e | 427576859657e88fd81683494397af3df920c674 | refs/heads/master | 2023-01-29T19:53:11.650096 | 2020-12-13T21:40:58 | 2020-12-13T21:40:58 | 276,397,551 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,070 | py | from django.db import models
import os
from PIL import Image
from django.db import models
from django.utils import timezone
def get_path_upload_image(file):
"""
Представление для формата сохранения файлов
"""
date = timezone.now().strftime("%Y-%m-%d")
time = timezone.now().strftime("%H-%M-%S")
end_extention = file.split('.')[1]
head = file.split('.')[0]
if len(head) > 10:
head = head[:10]
file_name = head + '_' + time + '.' + end_extention
return os.path.join('photos', '{}', '{}').format(date, file_name)
class Photo(models.Model):
"""
Фото
"""
name = models.CharField("Имя", max_length=50)
image = models.ImageField("Фото", upload_to="gallery/")
created = models.DateTimeField("Дата создания", auto_now_add=True)
slug = models.SlugField("url", max_length=50, unique=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
img = Image.open(self.image.get_path_upload_image())
super().save(*args, **kwargs)
if self.image:
if img.height > 200 or img.width > 200:
output_size = (200, 200)
img.thumbnail(output_size)
img.save(self.avatar.path)
def save(self, *args, **kwargs):
self.image.name = get_path_upload_image(self.image.name)
super().save(*args, **kwargs)
class Meta:
verbose_name = "Изображение"
verbose_name_plural = "Изображения"
class Gallery(models.Model):
"""
Галерея
"""
name = models.CharField("Имя", max_length=50)
photos = models.ManyToManyField(Photo, verbose_name="Фотографии")
created = models.DateTimeField("Дата создания", auto_now_add=True)
slug = models.SlugField("url", max_length=50, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "Галерея"
verbose_name_plural = "Галереи"
| [
"54116778+MaximMak@users.noreply.github.com"
] | 54116778+MaximMak@users.noreply.github.com |
ef9859b8ef969a947eaf6ee0be55db1b4e92c210 | f3fb672cee2919f5032fc8d1ac2e3444c6404ed2 | /Algorithms/GALE_EAST_WEST/Utilities/to_generate_data.py | fca25d77c60a36ae5c6efde884eaad539424a02b | [] | no_license | vivekaxl/Parallel | 62c1b642d3c0e653b8beff90308538f78d3c1900 | 0df147ac941c39dde9bbab05e07fc6342b46e84e | refs/heads/master | 2021-01-19T21:54:46.643017 | 2017-05-05T17:20:37 | 2017-05-05T17:20:37 | 88,720,586 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 552 | py | def generate_data(problem, number_of_points):
dataset = []
while len(dataset) < number_of_points:
print "# ", len(dataset),
import sys
sys.stdout.flush()
temp_dataset = []
for run in xrange(number_of_points):
temp_dataset.append(problem.generateInput())
import itertools
dataset.sort()
dataset.extend(list(temp_dataset for temp_dataset,_ in itertools.groupby(temp_dataset)))
from random import shuffle
shuffle(dataset)
return dataset[:number_of_points]
| [
"vivekaxl@gmail.com"
] | vivekaxl@gmail.com |
87dd7aaa81fbd84def91cd66a834b7ce2e50409a | f8ffa8ff257266df3de9d20d95b291e393f88434 | /Python - advanced/zajecia15/02_plecak_req/api.py | 15d755d2cb022e1ee4d319b992aca8ee57999179 | [] | no_license | janiszewskibartlomiej/Python_Code_Me_Gda | c0583c068ef08b6130398ddf93c3a3d1a843b487 | 7568de2a9acf80bab1429bb55bafd89daad9b729 | refs/heads/master | 2020-03-30T05:06:26.757033 | 2020-03-02T08:53:28 | 2020-03-02T08:53:28 | 150,781,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | import json
from flask import Blueprint, request
from db import get_connection
api_bp = Blueprint('api_endpoints', __name__)
def _policz_sume(przedmioty):
suma = 0
for przedmiot in przedmioty:
if przedmiot['waga']:
suma += przedmiot['ilosc'] * przedmiot['waga']
return suma
@api_bp.route('/przedmioty', methods=['GET'])
def przedmioty():
conn = get_connection()
c = conn.cursor()
if request.method == 'GET':
result = c.execute('SELECT * FROM plecak')
przedmioty = result.fetchall()
# przepakowanie z obiektów Row na słowniki
przedmioty = [dict(p) for p in przedmioty]
suma = _policz_sume(przedmioty)
wynik = {'przedmioty': przedmioty,
'suma': suma}
return json.dumps(wynik)
| [
"janiszewski.bartlomiej@gmail.com"
] | janiszewski.bartlomiej@gmail.com |
1afa40c15c84ae40bfde8729c259f3e988e61bdd | 55940b1d627768de8ac11387f60559bbb42047a0 | /stoploss.py | c9ac0fa624ee863e89d83f938e65ce3adb9968fe | [] | no_license | fengmm521/bitmextrade | 8920a28d03f406db18d2a5d3fd806b72fb319c2f | 400e3fcd6d1b70eaccad01eab6df2b3e8f674877 | refs/heads/master | 2021-05-03T04:26:10.993003 | 2018-05-26T13:17:22 | 2018-05-26T13:17:22 | 120,615,689 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,751 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# encoding: utf-8
#客户端调用,用于查看API返回结果
from OkcoinSpotAPI import OKCoinSpot
from OkcoinFutureAPI import OKCoinFuture
from magetool import urltool
import json
import sys
import os
import time
f = open('../../btc/okexapikey/okexapikey.txt','r')
tmpstr = f.read()
f.close()
apikeydic = json.loads(tmpstr)
#初始化apikey,secretkey,url
apikey = apikeydic['apikey']
secretkey = apikeydic['secretkey']
okcoinRESTURL = 'www.okex.com'#'www.okcoin.com' #请求注意:国内账号需要 修改为 www.okcoin.cn
def sayMsg(msg):
cmd = 'say %s'%(msg)
os.system(cmd)
print msg
class TradeTool(object):
"""docstring for ClassName"""
def __init__(self):
self.okcoinFuture = OKCoinFuture(okcoinRESTURL,apikey,secretkey)
self.depthSells = []
self.depthBuys = []
def getDepth(self):
turl = 'https://www.okex.com/api/v1/future_depth.do?symbol=ltc_usd&contract_type=quarter&size=20'
data = urltool.getUrl(turl)
ddic = json.loads(data)
buys = ddic['bids']
sells = ddic['asks']
return buys,sells
#1:开多 2:开空 3:平多 4:平空
def openShort(self,pprice,pamount):
print ('期货开空')
print time.ctime()
print self.okcoinFuture.future_trade('ltc_usd','quarter',str(pprice),str(pamount),'2','0','10')
def closeShort(self,pprice,pamount):
print ('期货平空')
print time.ctime()
print self.okcoinFuture.future_trade('ltc_usd','quarter',str(pprice),str(pamount),'4','0','10')
def openLong(self,pprice,pamount):
print ('期货开多')
print time.ctime()
print self.okcoinFuture.future_trade('ltc_usd','quarter',str(pprice),str(pamount),'1','0','10')
def closeLong(self,pprice,pamount):
print ('期货平多')
print self.okcoinFuture.future_trade('ltc_usd','quarter',str(pprice),str(pamount),'3','0','10')
def getBuyAndSell(tradetool):
try:
bs,ss = tradetool.getDepth()
ss = ss[::-1]
return bs[0][0],ss[0][0]
except Exception as e:
return None,None
def main(ptype,cprice,amount):
tradetool = TradeTool()
print 'is run'
while True:
b = None
s = None
if ptype == 'cl':
b,s = getBuyAndSell(tradetool)
if b and b > cprice:
try:
tradetool.closeLong(cprice,amount)
break
except Exception as e:
print 'closelong erro'
elif ptype == 'cs':
b,s = getBuyAndSell(tradetool)
if s and s < cprice:
try:
tradetool.closeShort(cprice,amount)
break
except Exception as e:
print 'closeshort erro'
else:
print 'b=',b,',s=',s,',time=',time.ctime
time.sleep(300) #5分钟测一次止损价
def test():
tradetool = TradeTool()
bs,ss = tradetool.getDepth()
ss = ss[::-1]
for s in ss:
print s
print '------'
for b in bs:
print b
print getBuyAndSell(tradetool)
if __name__ == '__main__':
args = sys.argv
if len(args) == 4:
ptype = args[1]
cprice = args[2]
camount = args[3]
print(ptype,cprice,camount)
if ptype and cprice and camount:
main(ptype,cprice,amount)
else:
test()
print '参数错误,要输入止损类型,数量和价格'
else:
test()
print '参数错误,要输入止损类型,数量和价格'
| [
"fengmm521@gmail.com"
] | fengmm521@gmail.com |
8d9f43f1f26d66d8a307fbfd9842cfa46d348dc8 | 685038d4be188fa72e9dba1d2213a47ee3aa00a2 | /ECOS2021/Sizing and Finding Critical Point/Outputs/BSA/Critical Point NPC.py | 65490fd14590236bdfce02ebdf623eb1a2105682 | [] | no_license | CIE-UMSS/Tradeoff-between-Installed-Capacity-and-Unserved-Energy | e5599e4e4ac60b97f0c4c57c5de95e493b1b5ac4 | 459f31552e3ab57a2e52167ab82f8f48558e173c | refs/heads/master | 2023-06-01T18:09:29.839747 | 2021-06-19T15:56:26 | 2021-06-19T15:56:26 | 343,720,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,368 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 1 20:49:49 2021
@author: alejandrosoto
"""
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
BSA=pd.read_csv('BSA.csv', sep=',', decimal='.', encoding='latin1')
df=pd.concat([BSA['LLC'], BSA['NPC (USD)']], axis=1, keys=['LLP', 'NPC'])
df['NPC']=df['NPC']/1000
from scipy.optimize import curve_fit
def funcbsa(x, a, b, c,d,e):
return (a*x-b)/(c*x**e+d)
xdata = df['LLP']
ydata=df['NPC']
popt, pcov = curve_fit(funcbsa, xdata, ydata)
residuals = ydata- funcbsa(xdata, *popt)
ss_res = np.sum(residuals**2)
ss_tot = np.sum((ydata-np.mean(ydata))**2)
r_squared = 1 - (ss_res / ss_tot)
print(r_squared)
x_1 = df['LLP']
y_1 = df['NPC']
a = Symbol('a')
j = Symbol('j')
x = Symbol('x')
from numpy import ones,vstack
from numpy.linalg import lstsq
points = [df.iloc[0],df.iloc[20]]
x_coords, y_coords = zip(*points)
A = vstack([x_coords,ones(len(x_coords))]).T
m, c = lstsq(A, y_coords)[0]
#print("Line Solution is y = {m}x + {c}".format(m=m,c=c))
y1=m*x+c
z1=np.array([m, c])
p1= np.poly1d(z1)
m1=-1/m
#plt.show()
f=(popt[0]*x-popt[1])/(popt[2]*x**popt[4]+popt[3])
#eq1=Eq(l1-j)
eq3=Eq(f-j)
#puntos criticos
print(df.NPC.iloc[20]-m1*df.LLP.iloc[20])
print(df.NPC.iloc[0]-m1*df.LLP.iloc[0])
#Solucionador iterativo de sistema de ecuaciones no lineales
liminf=-0.5
limsup=1.8
r=list()
for a in np.arange(liminf,limsup,0.01):
l1=m1*x+a
z2=np.array([m1, a])
p2=np.poly1d(z2)
eq1=Eq(l1-j)
eq3=Eq(f-j)
sol1 = nsolve((eq1, eq3), (x,j), (0.0005, 1.1))
r.append([sol1])
r=pd.DataFrame(r)
r['sol'] = r[0].astype(str)
r[['x','y']] = r.sol.str.split(",",expand=True)
r[['g','g1','x1']] = r.x.str.split("[",expand=True)
del r['g']
del r['g1']
r[['x1','g1']] = r.x1.str.split("]",expand=True)
del r['g1']
r[['y1','g','g1']] = r.y.str.split("]",expand=True)
del r['g1']
del r['g']
r[['g','y2']] = r.y1.str.split("[",expand=True)
del r['g']
del r['y1']
del r['x']
del r['y']
del r[0]
del r['sol']
r = r.rename(columns={'y2': 'y1'})
r['x1'] = r['x1'].astype(float)
r['y1'] = r['y1'].astype(float)
r1=r
points = [df.iloc[0],df.iloc[20]]
x_coords, y_coords = zip(*points)
A = vstack([x_coords,ones(len(x_coords))]).T
m, c = lstsq(A, y_coords)[0]
#print("Line Solution is y = {m}x + {c}".format(m=m,c=c))
y1=m*x+c
z1=np.array([m, c])
p1= np.poly1d(z1)
#Solucionador iteritvo ecuaciones lineales
r=list()
for a in np.arange(liminf,limsup,0.01):
l1=m1*x+a
z2=np.array([m1, a])
p2=np.poly1d(z2)
eq1=Eq(l1-j)
sol = solve((l1-j, y1-j),(x, j))
x1_1=float(sol[x])
y1_1=float(sol[j])
r.append([sol])
r=pd.DataFrame(r)
r['sol'] = r[0].astype(str)
r[['x','y']] = r.sol.str.split(",",expand=True)
r[['g','x1']] = r.x.str.split(":",expand=True)
del r['g']
r[['g1','y1']] = r.y.str.split(":",expand=True)
del r['g1']
r[['y1','g2']] = r.y1.str.split("}",expand=True)
del r['g2']
del r['sol']
del r[0]
del r['x']
del r['y']
r = r.rename(columns={'x1': 'x', 'y1': 'y'})
r['x'] = r['x'].astype(float)
r['y'] = r['y'].astype(float)
#print(r)
rt = pd.concat([r, r1], axis=1, join='inner')
rt['step']=np.arange(liminf,limsup,0.01)
rt['d']=((rt['x']-rt['x1'])**2+(rt['y']-rt['y1'])**2)**0.5
print('x de d max:',rt['x1'].iloc[rt['d'].idxmax()])
print('y de d max:',rt['y1'].iloc[rt['d'].idxmax()])
print('Distancia Máxima',rt['d'].max())
a=rt['step'].iloc[rt['d'].idxmax()]
l1=m1*x+a
z2=np.array([m1, a])
p2=np.poly1d(z2)
#plt.show()
BSAf=popt
BSAr2=r_squared
BSAx=rt['x1'].iloc[rt['d'].idxmax()]
BSAy=rt['y1'].iloc[rt['d'].idxmax()]
plt.figure(figsize=(10,6.7))
xp = np.linspace(0,1, 100)
_ = plt.plot(x_1, y_1, '.',label='data', color='blue')
o= plt.plot(xp, funcbsa(xp,*popt), '--', label='fit', color='green')
o1=plt.plot(xp, p1(xp), '-', label='secant', color='red')
_=plt.plot(xp, p2(xp), '-', label='distance', color='black')
plt.plot(rt['x1'].iloc[rt['d'].idxmax()], rt['y1'].iloc[rt['d'].idxmax()], marker='o', markersize=3, color="green")
#plt.plot(x_1, y_1, '-')
plt.plot(BSAx,BSAy, marker='o', markersize=5, color="red", label='critical point')
#escala real
plt.ylabel('NPC [Thousand USD]')
plt.xlabel('LLP')
plt.axis('scaled')
plt.legend()
#plt.savefig('critical point1.png',dpi=600,bbox_inches="tight")
#plt.show()
plt.show()
#Results
print('R2=',r_squared)
print('parameters=',popt)
print('critical point=',BSAx)
| [
"asm19971997@gmail.com"
] | asm19971997@gmail.com |
d8b77b98859c658e649402c87b8854c6c7db676d | e14360f5001f865824206e54de6294b57b4ada48 | /vendor/migrations/0003_userdetails.py | 22c9de593da9d4692db694d28274dd69682660e2 | [] | no_license | adnankattekaden/thatsmestore | cb2b038f7b7e5b0f1487a6cce94b76c354e60073 | 034c05285370594f34b8ae87069029dfc5041765 | refs/heads/master | 2023-02-27T06:56:35.923684 | 2021-01-31T13:51:04 | 2021-01-31T13:51:04 | 311,295,309 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | # Generated by Django 3.1.2 on 2020-11-10 11:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vendor', '0002_product_image'),
]
operations = [
migrations.CreateModel(
name='Userdetails',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, null=True, upload_to='userdata/images')),
],
),
]
| [
"adnankattekaden2020@gmail.com"
] | adnankattekaden2020@gmail.com |
3076a77160acd99a7f01575554874a512bd08b22 | 3fbd28e72606e5358328bfe4b99eb0349ca6a54f | /.history/a_expressions_20210608021929.py | 35da28d21e88e58f0ca2df1b44a62dda7901f6a1 | [] | no_license | Tarun1001/codeforces | f0a2ef618fbd45e3cdda3fa961e249248ca56fdb | 576b505d4b8b8652a3f116f32d8d7cda4a6644a1 | refs/heads/master | 2023-05-13T04:50:01.780931 | 2021-06-07T21:35:26 | 2021-06-07T21:35:26 | 374,399,423 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | a= int(input())
b= int(input())
c= int(input())
if(a==1 and c==1):
return 2+b
elif(a==1):
return (a+b)*c
elif(b==1):
return max((a+b)*c,a*(b+c))
elif(c==1):
return a*(b+c)
else:
return a*b*c | [
"tarunsivasai8@gmail.com"
] | tarunsivasai8@gmail.com |
3c854c44e20adec53e0eb0ab351f336fe02ee038 | 04b1803adb6653ecb7cb827c4f4aa616afacf629 | /third_party/blink/tools/blinkpy/common/system/filesystem_mock.py | b96f310072e21a1919c76670aaabeebba4b11d54 | [
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | permissive | Samsung/Castanets | 240d9338e097b75b3f669604315b06f7cf129d64 | 4896f732fc747dfdcfcbac3d442f2d2d42df264a | refs/heads/castanets_76_dev | 2023-08-31T09:01:04.744346 | 2021-07-30T04:56:25 | 2021-08-11T05:45:21 | 125,484,161 | 58 | 49 | BSD-3-Clause | 2022-10-16T19:31:26 | 2018-03-16T08:07:37 | null | UTF-8 | Python | false | false | 20,672 | py | # Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import errno
import hashlib
import os
import re
import StringIO
import unittest
from blinkpy.common.system.filesystem import _remove_contents, _sanitize_filename
class MockFileSystem(object):
# pylint: disable=unused-argument
sep = '/'
pardir = '..'
def __init__(self, files=None, dirs=None, cwd='/'):
"""Initializes a "mock" filesystem that can be used to replace the
FileSystem class in tests.
Args:
files: A dictionary of filenames to file contents. A file contents
value of None indicates that the file does not exist.
"""
self.files = files or {}
self.executable_files = set()
self.written_files = {}
self.last_tmpdir = None
self.current_tmpno = 0
self.cwd = cwd
self.dirs = set(dirs or [])
self.dirs.add(cwd)
for file_path in self.files:
directory = self.dirname(file_path)
while directory not in self.dirs:
self.dirs.add(directory)
directory = self.dirname(directory)
def clear_written_files(self):
# This function can be used to track what is written between steps in a test.
self.written_files = {}
def _raise_not_found(self, path):
raise IOError(errno.ENOENT, path, os.strerror(errno.ENOENT))
def _split(self, path):
# This is not quite a full implementation of os.path.split; see:
# http://docs.python.org/library/os.path.html#os.path.split
if self.sep in path:
return path.rsplit(self.sep, 1)
return ('', path)
def make_executable(self, file_path):
self.executable_files.add(file_path)
def abspath(self, path):
if os.path.isabs(path):
return self.normpath(path)
return self.abspath(self.join(self.cwd, path))
def realpath(self, path):
return self.abspath(path)
def basename(self, path):
return self._split(path)[1]
def expanduser(self, path):
if path[0] != '~':
return path
parts = path.split(self.sep, 1)
home_directory = self.sep + 'Users' + self.sep + 'mock'
if len(parts) == 1:
return home_directory
return home_directory + self.sep + parts[1]
def path_to_module(self, module_name):
return '/mock-checkout/third_party/blink/tools/' + module_name.replace('.', '/') + '.py'
def chdir(self, path):
path = self.normpath(path)
if not self.isdir(path):
raise OSError(errno.ENOENT, path, os.strerror(errno.ENOENT))
self.cwd = path
def copyfile(self, source, destination):
if not self.exists(source):
self._raise_not_found(source)
if self.isdir(source):
raise IOError(errno.EISDIR, source, os.strerror(errno.EISDIR))
if self.isdir(destination):
raise IOError(errno.EISDIR, destination, os.strerror(errno.EISDIR))
if not self.exists(self.dirname(destination)):
raise IOError(errno.ENOENT, destination, os.strerror(errno.ENOENT))
self.files[destination] = self.files[source]
self.written_files[destination] = self.files[source]
def dirname(self, path):
return self._split(path)[0]
def exists(self, path):
return self.isfile(path) or self.isdir(path)
def files_under(self, path, dirs_to_skip=None, file_filter=None):
dirs_to_skip = dirs_to_skip or []
filter_all = lambda fs, dirpath, basename: True
file_filter = file_filter or filter_all
files = []
if self.isfile(path):
if file_filter(self, self.dirname(path), self.basename(path)) and self.files[path] is not None:
files.append(path)
return files
if self.basename(path) in dirs_to_skip:
return []
if not path.endswith(self.sep):
path += self.sep
dir_substrings = [self.sep + d + self.sep for d in dirs_to_skip]
for filename in self.files:
if not filename.startswith(path):
continue
suffix = filename[len(path) - 1:]
if any(dir_substring in suffix for dir_substring in dir_substrings):
continue
dirpath, basename = self._split(filename)
if file_filter(self, dirpath, basename) and self.files[filename] is not None:
files.append(filename)
return files
def getcwd(self):
return self.cwd
def glob(self, glob_string):
# FIXME: This handles '*', but not '?', '[', or ']'.
glob_string = re.escape(glob_string)
glob_string = glob_string.replace('\\*', '[^\\/]*') + '$'
glob_string = glob_string.replace('\\/', '/')
path_filter = lambda path: re.match(glob_string, path)
# We could use fnmatch.fnmatch, but that might not do the right thing on Windows.
existing_files = [path for path, contents in self.files.items() if contents is not None]
return filter(path_filter, existing_files) + filter(path_filter, self.dirs)
def isabs(self, path):
return path.startswith(self.sep)
def isfile(self, path):
return path in self.files and self.files[path] is not None
def isdir(self, path):
return self.normpath(path) in self.dirs
def _slow_but_correct_join(self, *comps):
return re.sub(re.escape(os.path.sep), self.sep, os.path.join(*comps))
def join(self, *comps):
# This function is called a lot, so we optimize it; there are
# unit tests to check that we match _slow_but_correct_join(), above.
path = ''
sep = self.sep
for comp in comps:
if not comp:
continue
if comp[0] == sep:
path = comp
continue
if path:
path += sep
path += comp
if comps[-1] == '' and path:
path += '/'
path = path.replace(sep + sep, sep)
return path
def listdir(self, path):
_, directories, files = list(self.walk(path))[0]
return directories + files
def walk(self, top):
sep = self.sep
if not self.isdir(top):
raise OSError('%s is not a directory' % top)
if not top.endswith(sep):
top += sep
directories = []
files = []
for file_path in self.files:
if self.exists(file_path) and file_path.startswith(top):
remaining = file_path[len(top):]
if sep in remaining:
directory = remaining[:remaining.index(sep)]
if directory not in directories:
directories.append(directory)
else:
files.append(remaining)
file_system_tuples = [(top[:-1], directories, files)]
for directory in directories:
directory = top + directory
tuples_from_subdirs = self.walk(directory)
file_system_tuples += tuples_from_subdirs
return file_system_tuples
def mtime(self, path):
if self.exists(path):
return 0
self._raise_not_found(path)
def mktemp(self, suffix='', prefix='tmp', dir=None, **_): # pylint: disable=redefined-builtin
if dir is None:
dir = self.sep + '__im_tmp'
curno = self.current_tmpno
self.current_tmpno += 1
self.last_tmpdir = self.join(dir, '%s_%u_%s' % (prefix, curno, suffix))
return self.last_tmpdir
def mkdtemp(self, **kwargs):
class TemporaryDirectory(object):
def __init__(self, fs, **kwargs):
self._kwargs = kwargs
self._filesystem = fs
self._directory_path = fs.mktemp(**kwargs) # pylint: disable=protected-access
fs.maybe_make_directory(self._directory_path)
def __str__(self):
return self._directory_path
def __enter__(self):
return self._directory_path
def __exit__(self, exception_type, exception_value, traceback):
# Only self-delete if necessary.
# FIXME: Should we delete non-empty directories?
if self._filesystem.exists(self._directory_path):
self._filesystem.rmtree(self._directory_path)
return TemporaryDirectory(fs=self, **kwargs)
def maybe_make_directory(self, *path):
norm_path = self.normpath(self.join(*path))
while norm_path and not self.isdir(norm_path):
self.dirs.add(norm_path)
norm_path = self.dirname(norm_path)
def move(self, source, destination):
if not self.exists(source):
self._raise_not_found(source)
if self.isfile(source):
self.files[destination] = self.files[source]
self.written_files[destination] = self.files[destination]
self.files[source] = None
self.written_files[source] = None
return
self.copytree(source, destination)
self.rmtree(source)
def _slow_but_correct_normpath(self, path):
return re.sub(re.escape(os.path.sep), self.sep, os.path.normpath(path))
def normpath(self, path):
# This function is called a lot, so we try to optimize the common cases
# instead of always calling _slow_but_correct_normpath(), above.
if '..' in path or '/./' in path:
# This doesn't happen very often; don't bother trying to optimize it.
return self._slow_but_correct_normpath(path)
if not path:
return '.'
if path == '/':
return path
if path == '/.':
return '/'
if path.endswith('/.'):
return path[:-2]
if path.endswith('/'):
return path[:-1]
return path
def open_binary_tempfile(self, suffix=''):
path = self.mktemp(suffix)
return (WritableBinaryFileObject(self, path), path)
def open_binary_file_for_reading(self, path):
if self.files[path] is None:
self._raise_not_found(path)
return ReadableBinaryFileObject(self, path, self.files[path])
def open_binary_file_for_writing(self, path):
return WritableBinaryFileObject(self, path)
def read_binary_file(self, path):
# Intentionally raises KeyError if we don't recognize the path.
if self.files[path] is None:
self._raise_not_found(path)
return self.files[path]
def write_binary_file(self, path, contents):
# FIXME: should this assert if dirname(path) doesn't exist?
self.maybe_make_directory(self.dirname(path))
self.files[path] = contents
self.written_files[path] = contents
def open_text_tempfile(self, suffix=''):
path = self.mktemp(suffix)
return (WritableTextFileObject(self, path), path)
def open_text_file_for_reading(self, path):
if self.files[path] is None:
self._raise_not_found(path)
return ReadableTextFileObject(self, path, self.files[path])
def open_text_file_for_writing(self, path):
return WritableTextFileObject(self, path)
def read_text_file(self, path):
return self.read_binary_file(path).decode('utf-8')
def write_text_file(self, path, contents):
return self.write_binary_file(path, contents.encode('utf-8'))
def sha1(self, path):
contents = self.read_binary_file(path)
return hashlib.sha1(contents).hexdigest()
def relpath(self, path, start='.'):
# Since os.path.relpath() calls os.path.normpath()
# (see http://docs.python.org/library/os.path.html#os.path.abspath )
# it also removes trailing slashes and converts forward and backward
# slashes to the preferred slash os.sep.
start = self.abspath(start)
path = self.abspath(path)
common_root = start
dot_dot = ''
while not common_root == '':
if path.startswith(common_root):
break
common_root = self.dirname(common_root)
dot_dot += '..' + self.sep
rel_path = path[len(common_root):]
if not rel_path:
return '.'
if rel_path[0] == self.sep:
# It is probably sufficient to remove just the first character
# since os.path.normpath() collapses separators, but we use
# lstrip() just to be sure.
rel_path = rel_path.lstrip(self.sep)
elif not common_root == '/':
# We are in the case typified by the following example:
# path = "/tmp/foobar", start = "/tmp/foo" -> rel_path = "bar"
common_root = self.dirname(common_root)
dot_dot += '..' + self.sep
rel_path = path[len(common_root) + 1:]
return dot_dot + rel_path
def remove(self, path, retry=True):
if self.files[path] is None:
self._raise_not_found(path)
self.files[path] = None
self.written_files[path] = None
def rmtree(self, path_to_remove, ignore_errors=True, onerror=None):
path_to_remove = self.normpath(path_to_remove)
for file_path in self.files:
# We need to add a trailing separator to path_to_remove to avoid matching
# cases like path_to_remove='/foo/b' and file_path='/foo/bar/baz'.
if file_path == path_to_remove or file_path.startswith(path_to_remove + self.sep):
self.files[file_path] = None
def should_remove(directory):
return directory == path_to_remove or directory.startswith(path_to_remove + self.sep)
self.dirs = {d for d in self.dirs if not should_remove(d)}
def remove_contents(self, dirname):
return _remove_contents(self, dirname, sleep=lambda *args, **kw: None)
def copytree(self, source, destination):
source = self.normpath(source)
destination = self.normpath(destination)
for source_file in list(self.files):
if source_file.startswith(source):
destination_path = self.join(destination, self.relpath(source_file, source))
self.maybe_make_directory(self.dirname(destination_path))
self.files[destination_path] = self.files[source_file]
def split(self, path):
idx = path.rfind(self.sep)
if idx == -1:
return ('', path)
return (path[:idx], path[(idx + 1):])
def splitext(self, path):
idx = path.rfind('.')
if idx == -1:
idx = len(path)
return (path[0:idx], path[idx:])
def symlink(self, source, link_name):
raise NotImplementedError('Symlink not expected to be called in tests')
def sanitize_filename(self, filename, replacement='_'):
return _sanitize_filename(filename, replacement)
class WritableBinaryFileObject(object):
def __init__(self, fs, path):
self.fs = fs
self.path = path
self.closed = False
self.fs.files[path] = ''
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
self.close()
def close(self):
self.closed = True
def write(self, string):
self.fs.files[self.path] += string
self.fs.written_files[self.path] = self.fs.files[self.path]
class WritableTextFileObject(WritableBinaryFileObject):
def write(self, string):
WritableBinaryFileObject.write(self, string.encode('utf-8'))
def writelines(self, lines):
self.fs.files[self.path] = "".join(lines).encode('utf-8')
self.fs.written_files[self.path] = self.fs.files[self.path]
class ReadableBinaryFileObject(object):
def __init__(self, fs, path, data):
self.fs = fs
self.path = path
self.closed = False
self.data = data
self.offset = 0
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
self.close()
def close(self):
self.closed = True
def read(self, num_bytes=None):
if not num_bytes:
return self.data[self.offset:]
start = self.offset
self.offset += num_bytes
return self.data[start:self.offset]
def seek(self, offset, whence=os.SEEK_SET):
if whence == os.SEEK_SET:
self.offset = offset
elif whence == os.SEEK_CUR:
self.offset += offset
elif whence == os.SEEK_END:
self.offset = len(self.data) + offset
else:
assert False, "Unknown seek mode %s" % whence
class ReadableTextFileObject(ReadableBinaryFileObject):
def __init__(self, fs, path, data):
super(ReadableTextFileObject, self).__init__(fs, path, StringIO.StringIO(data.decode('utf-8')))
def close(self):
self.data.close()
super(ReadableTextFileObject, self).close()
def read(self, num_bytes=-1):
return self.data.read(num_bytes)
def readline(self, length=None):
return self.data.readline(length)
def readlines(self):
return self.data.readlines()
def __iter__(self):
return self.data.__iter__()
def next(self):
return self.data.next()
def seek(self, offset, whence=os.SEEK_SET):
self.data.seek(offset, whence)
class FileSystemTestCase(unittest.TestCase):
# pylint: disable=invalid-name
# Use assertFilesAdded to be consistent with unittest.
class _AssertFilesAddedContext(object):
"""Internal class used by FileTestCase.assertFilesAdded()."""
def __init__(self, test_case, mock_filesystem, expected_files):
self.test_case = test_case
self.mock_filesystem = mock_filesystem
self.expected_files = expected_files
def __enter__(self):
# Make sure that the expected_files aren't already in the mock
# file system.
for filepath in self.expected_files:
assert filepath not in self.mock_filesystem.files, "%s was already in mock file system (%r)" % (
filepath, self.mock_filesystem.files)
return self
def __exit__(self, exc_type, exc_value, tb):
# Exception already occurring, just exit.
if exc_type is not None:
return
for filepath in sorted(self.expected_files):
self.test_case.assertIn(filepath, self.mock_filesystem.files)
self.test_case.assertEqual(self.expected_files[filepath], self.mock_filesystem.files[filepath])
def assertFilesAdded(self, mock_filesystem, files):
"""Assert that the given files where added to the mock_filesystem.
Use in a similar manner to self.assertRaises;
with self.assertFilesAdded(mock_filesystem, {'/newfile': 'contents'}):
code(mock_filesystem)
"""
return self._AssertFilesAddedContext(self, mock_filesystem, files)
| [
"sunny.nam@samsung.com"
] | sunny.nam@samsung.com |
34555dc9dc68a57a4729abaf8e0a07d35f25ae21 | 6a8d047b4502507c67120a0a32640c6a3e60d8a5 | /apps/accounts/factories.py | 89d2759f6901b775f25b65129ebcea660fc7e0f4 | [] | no_license | dwebdevcore/BoardDirector_dashboard | 320f110d7581c065920b7607ef06a457851c4bb4 | 7cd2b2abe1c660531a805d84930c8a6183b863b6 | refs/heads/master | 2020-05-26T05:32:37.501642 | 2019-05-22T22:33:25 | 2019-05-22T22:33:25 | 188,122,429 | 10 | 0 | null | null | null | null | UTF-8 | Python | false | false | 549 | py | # -*- coding: utf-8 -*-
import factory
from django.template.defaultfilters import slugify
from accounts.models import Account
from billing.models import Plan
class AccountFactory(factory.DjangoModelFactory):
class Meta:
model = Account
@factory.lazy_attribute_sequence
def name(self, n):
return 'test company {0}'.format(n)
@factory.lazy_attribute
def url(self):
return slugify(self.name)[-25:]
@factory.lazy_attribute
def plan(self):
return Plan.objects.get(name=Plan.DEFAULT_PLAN)
| [
"dwebdevcore@gmail.com"
] | dwebdevcore@gmail.com |
e6701828639b234f27ab7c6ab310143480cc2991 | bcf11ccd5ec986f461edcf874c2d12e2fad13e76 | /junk/84.py | 31e2338a713ecb18fd50312248969986cb4c64bb | [] | no_license | buxuele/algo_snippet | c1a8a0855e6c2a02f5045d21710baa7fa8b00d13 | 9ab71d523f5079b6d72aef11b09120fee047f66c | refs/heads/master | 2021-05-18T06:45:56.111807 | 2021-02-21T17:13:31 | 2021-02-21T17:13:31 | 251,164,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | # author: fanchuangwater@gmail.com
# date: 2020/5/8 下午8:57
# 目的:
nums = [2,1,5,6,2,3]
| [
"baogebuxuele@163.com"
] | baogebuxuele@163.com |
e29b4d58a41e80e10bad311b3a86b8149f2633e0 | 4f52fc44d00b17eacc402aec1c83a408496432d8 | /addons/print/models/__init__.py | c1570e5074da1442f414cfb1209732f6b260c085 | [] | no_license | sasakuma/odoo-print | cfedd32efb3e937bb077a8ff5454f9b8723f8f4e | bb39c2aad24d2d58c079eb96d3d308562626fc1d | refs/heads/master | 2020-03-29T18:24:58.536633 | 2018-09-24T10:31:02 | 2018-09-24T10:56:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | """Printing models"""
from . import ir_actions_report
from . import res_users
from . import print_printer
| [
"mbrown@fensystems.co.uk"
] | mbrown@fensystems.co.uk |
8af778997ba27021c502b0419516c071f0d68db1 | 30c820b171447ab772e58f04ac0dc55c4d5ffbdf | /archive/code/TransactivePlatform/components/config.py | 709c3f6ad969ad3e22d1ac395380982608426cfe | [] | no_license | TransactiveSCC/TRANSAX | 3b58cff757fb646a825872dc243e04eea3d0b712 | 13c45a1254cb14607d1bfa86267dbde9e61fd538 | refs/heads/main | 2023-05-13T10:50:20.868093 | 2021-05-15T02:45:53 | 2021-05-15T02:45:53 | 316,015,185 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,339 | py | # deployment
DIRECTORY_ADDRESS = 'tcp://127.0.0.1:10001'
TRANSACTION_GAS = "0x100000000"
from bytecode import *
#BYTECODE = "0x606060405260016000806101000a81548167ffffffffffffffff021916908367ffffffffffffffff160217905550640100000000600060086101000a81548167ffffffffffffffff021916908367ffffffffffffffff1602179055506001600060106101000a81548167ffffffffffffffff021916908367ffffffffffffffff1602179055506000600360006101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555034156100ba57600080fd5b61187e806100c96000396000f30060606040526004361061008e576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806340d599c0146100935780634bb278f3146100e657806366948833146100fb578063696e89d6146101285780637f49f33c1461017357806383bbb2cc146101a0578063be38aaf01461022c578063e1f9673814610292575b600080fd5b341561009e57600080fd5b6100e4600480803567ffffffffffffffff1690602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff169060200190919050506102bf565b005b34156100f157600080fd5b6100f961033e565b005b341561010657600080fd5b610126600480803567ffffffffffffffff169060200190919050506105e7565b005b341561013357600080fd5b6101716004808035151590602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff169060200190919050506106d8565b005b341561017e57600080fd5b61019e600480803567ffffffffffffffff16906020019091905050610839565b005b34156101ab57600080fd5b61022a600480803567ffffffffffffffff1690602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff16906020019091905050610960565b005b341561023757600080fd5b610290600480803567ffffffffffffffff1690602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff1690602001909190803567ffffffffffffffff16906020019091905050611368565b005b341561029d57600080fd5b6102bd600480803567ffffffffffffffff169060200190919050506115e3565b005b826000806101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555081600060086101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555080600060106101000a81548167ffffffffffffffff021916908367ffffffffffffffff160217905550505050565b6000806103496116d4565b600060028054905011151561035d57600080fd5b6002600360009054906101000a900467ffffffffffffffff1667ffffffffffffffff1681548110151561038c57fe5b90600052602060002090600302019250600091505b8260010160009054906101000a900467ffffffffffffffff1667ffffffffffffffff168267ffffffffffffffff1610156105e2578260000160008367ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060a060405190810160405290816000820160009054906101000a900467ffffffffffffffff1667ffffffffffffffff1667ffffffffffffffff1681526020016000820160089054906101000a900467ffffffffffffffff1667ffffffffffffffff1667ffffffffffffffff1681526020016000820160109054906101000a900467ffffffffffffffff1667ffffffffffffffff1667ffffffffffffffff1681526020016000820160189054906101000a900467ffffffffffffffff1667ffffffffffffffff1667ffffffffffffffff1681526020016001820160009054906101000a900467ffffffffffffffff1667ffffffffffffffff1667ffffffffffffffff168152505090507fcfe4c3b38a50e08c5047638e0bee801ce070a1cbac2d78542db971956768cd5c81600001518260200151836040015184606001518560800151604051808667ffffffffffffffff1667ffffffffffffffff1681526020018567ffffffffffffffff1667ffffffffffffffff1681526020018467ffffffffffffffff1667ffffffffffffffff1681526020018367ffffffffffffffff1667ffffffffffffffff1681526020018267ffffffffffffffff1667ffffffffffffffff1681526020019550505050505060405180910390a181806001019250506103a1565b505050565b6001805490508167ffffffffffffffff1610151561060457600080fd5b60018167ffffffffffffffff1681548110151561061d57fe5b906000526020600020906003020160000160099054906101000a900460ff16151561064757600080fd5b600060018267ffffffffffffffff1681548110151561066257fe5b906000526020600020906003020160000160096101000a81548160ff0219169083151502179055507ff88bfed0ff979419f2178a7f8d3d4fc2a25d8923222059f20b0ce1169fd9c3e081604051808267ffffffffffffffff1667ffffffffffffffff16815260200191505060405180910390a150565b600180548060010182816106ec9190611736565b9160005260206000209060030201600060606040519081016040528087151581526020018567ffffffffffffffff16815260200160001515815250909190915060008201518160000160006101000a81548160ff02191690831515021790555060208201518160000160016101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555060408201518160000160096101000a81548160ff0219169083151502179055505050507f18c55f4911865bc134532d64f15f56413f8ab9b31f15276018bd73fe3e7388f66001808054905003848484604051808567ffffffffffffffff1667ffffffffffffffff168152602001841515151581526020018367ffffffffffffffff1667ffffffffffffffff1681526020018267ffffffffffffffff1667ffffffffffffffff16815260200194505050505060405180910390a1505050565b6002805480600101828161084d9190611768565b916000526020600020906003020160006040805190810160405280600067ffffffffffffffff168152602001600067ffffffffffffffff16815250909190915060008201518160010160006101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555060208201518160010160086101000a81548167ffffffffffffffff021916908367ffffffffffffffff1602179055505050507f09a6f584fa54b80c174ca819878e0f4e3a6ae2416c51db61c0f275c45df6826260016002805490500382604051808367ffffffffffffffff1667ffffffffffffffff1681526020018267ffffffffffffffff1667ffffffffffffffff1681526020019250505060405180910390a150565b60006002805490508767ffffffffffffffff1610151561097f57600080fd5b6001805490508667ffffffffffffffff1610151561099c57600080fd5b6001805490508567ffffffffffffffff161015156109b957600080fd5b6000809054906101000a900467ffffffffffffffff1667ffffffffffffffff168467ffffffffffffffff161015156109f057600080fd5b60018667ffffffffffffffff16815481101515610a0957fe5b906000526020600020906003020160000160009054906101000a900460ff161515610a3357600080fd5b60018667ffffffffffffffff16815481101515610a4c57fe5b906000526020600020906003020160000160099054906101000a900460ff161515610a7657600080fd5b600060018767ffffffffffffffff16815481101515610a9157fe5b906000526020600020906003020160010160008667ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060009054906101000a900467ffffffffffffffff1667ffffffffffffffff16111515610af257600080fd5b60018567ffffffffffffffff16815481101515610b0b57fe5b906000526020600020906003020160000160009054906101000a900460ff16151515610b3657600080fd5b60018567ffffffffffffffff16815481101515610b4f57fe5b906000526020600020906003020160000160099054906101000a900460ff161515610b7957600080fd5b600060018667ffffffffffffffff16815481101515610b9457fe5b906000526020600020906003020160010160008667ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060009054906101000a900467ffffffffffffffff1667ffffffffffffffff16111515610bf557600080fd5b60028767ffffffffffffffff16815481101515610c0e57fe5b9060005260206000209060030201905060018667ffffffffffffffff16815481101515610c3757fe5b906000526020600020906003020160010160008567ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060009054906101000a900467ffffffffffffffff1667ffffffffffffffff16600060089054906101000a900467ffffffffffffffff1667ffffffffffffffff168467ffffffffffffffff1602811515610cc257fe5b048160020160008867ffffffffffffffff1667ffffffffffffffff1681526020019081526020016000206000828254019250508190555060018567ffffffffffffffff16815481101515610d1257fe5b906000526020600020906003020160010160008567ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060009054906101000a900467ffffffffffffffff1667ffffffffffffffff16600060089054906101000a900467ffffffffffffffff1667ffffffffffffffff168467ffffffffffffffff1602811515610d9d57fe5b048160020160008767ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060008282540192505081905550600060089054906101000a900467ffffffffffffffff1667ffffffffffffffff168160020160008867ffffffffffffffff1667ffffffffffffffff1681526020019081526020016000205411151515610e2c57600080fd5b600060089054906101000a900467ffffffffffffffff1667ffffffffffffffff168160020160008767ffffffffffffffff1667ffffffffffffffff1681526020019081526020016000205411151515610e8457600080fd5b60018667ffffffffffffffff16815481101515610e9d57fe5b906000526020600020906003020160020160008567ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060009054906101000a900467ffffffffffffffff1667ffffffffffffffff168267ffffffffffffffff1610151515610f0a57600080fd5b60018567ffffffffffffffff16815481101515610f2357fe5b906000526020600020906003020160020160008567ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060009054906101000a900467ffffffffffffffff1667ffffffffffffffff168267ffffffffffffffff1611151515610f9057600080fd5b60a0604051908101604052808767ffffffffffffffff1681526020018667ffffffffffffffff1681526020018567ffffffffffffffff1681526020018467ffffffffffffffff1681526020018367ffffffffffffffff168152508160000160008360010160009054906101000a900467ffffffffffffffff1667ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060008201518160000160006101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555060208201518160000160086101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555060408201518160000160106101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555060608201518160000160186101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555060808201518160010160006101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555090505060018160010160008282829054906101000a900467ffffffffffffffff160192506101000a81548167ffffffffffffffff021916908367ffffffffffffffff160217905550828160010160088282829054906101000a900467ffffffffffffffff160192506101000a81548167ffffffffffffffff021916908367ffffffffffffffff1602179055506002600360009054906101000a900467ffffffffffffffff1667ffffffffffffffff168154811015156111d157fe5b906000526020600020906003020160010160089054906101000a900467ffffffffffffffff1667ffffffffffffffff168160010160089054906101000a900467ffffffffffffffff1667ffffffffffffffff1611156112545786600360006101000a81548167ffffffffffffffff021916908367ffffffffffffffff1602179055505b7ff9928d5c344a22e3a5b154d6f4a1e29f9af7e9aa7075a79b36e58d67ce54a31e8787878787878760010160089054906101000a900467ffffffffffffffff16604051808867ffffffffffffffff1667ffffffffffffffff1681526020018767ffffffffffffffff1667ffffffffffffffff1681526020018667ffffffffffffffff1667ffffffffffffffff1681526020018567ffffffffffffffff1667ffffffffffffffff1681526020018467ffffffffffffffff1667ffffffffffffffff1681526020018367ffffffffffffffff1667ffffffffffffffff1681526020018267ffffffffffffffff1667ffffffffffffffff16815260200197505050505050505060405180910390a150505050505050565b6001805490508467ffffffffffffffff1610151561138557600080fd5b60018467ffffffffffffffff1681548110151561139e57fe5b906000526020600020906003020160000160099054906101000a900460ff161515156113c957600080fd5b6000809054906101000a900467ffffffffffffffff1667ffffffffffffffff168367ffffffffffffffff1610151561140057600080fd5b60008267ffffffffffffffff1611151561141957600080fd5b600060109054906101000a900467ffffffffffffffff1667ffffffffffffffff168267ffffffffffffffff161115151561145257600080fd5b8160018567ffffffffffffffff1681548110151561146c57fe5b906000526020600020906003020160010160008567ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060006101000a81548167ffffffffffffffff021916908367ffffffffffffffff1602179055508060018567ffffffffffffffff168154811015156114e257fe5b906000526020600020906003020160020160008567ffffffffffffffff1667ffffffffffffffff16815260200190815260200160002060006101000a81548167ffffffffffffffff021916908367ffffffffffffffff1602179055507f9359ad5d68dbcec8bfe7a88c5f9514cd0f59c955b96ef711b1280b5510845ced84848484604051808567ffffffffffffffff1667ffffffffffffffff1681526020018467ffffffffffffffff1667ffffffffffffffff1681526020018367ffffffffffffffff1667ffffffffffffffff1681526020018267ffffffffffffffff1667ffffffffffffffff16815260200194505050505060405180910390a150505050565b6001805490508167ffffffffffffffff1610151561160057600080fd5b60018167ffffffffffffffff1681548110151561161957fe5b906000526020600020906003020160000160099054906101000a900460ff1615151561164457600080fd5b6001808267ffffffffffffffff1681548110151561165e57fe5b906000526020600020906003020160000160096101000a81548160ff0219169083151502179055507fa968f6f444b162f3419ffb4304d39bda6a5395e1be22debfe04d6d35b24b790f81604051808267ffffffffffffffff1667ffffffffffffffff16815260200191505060405180910390a150565b60a060405190810160405280600067ffffffffffffffff168152602001600067ffffffffffffffff168152602001600067ffffffffffffffff168152602001600067ffffffffffffffff168152602001600067ffffffffffffffff1681525090565b81548183558181151161176357600302816003028360005260206000209182019101611762919061179a565b5b505050565b8154818355818115116117955760030281600302836000526020600020918201910161179491906117fc565b5b505050565b6117f991905b808211156117f557600080820160006101000a81549060ff02191690556000820160016101000a81549067ffffffffffffffff02191690556000820160096101000a81549060ff0219169055506003016117a0565b5090565b90565b61184f91905b8082111561184b5760006001820160006101000a81549067ffffffffffffffff02191690556001820160086101000a81549067ffffffffffffffff021916905550600301611802565b5090565b905600a165627a7a723058202b7f8804d8903590064a589c78504f788712665511c7830091623e9842936a500029"
# 2615231942001
NUM_TYPES =9999999999999;
PRECISION = 4294967296;
MAX_QUANTITY = 100;
START_INTERVAL = 1;
END_INTERVAL = 100;
INTERVAL_LENGTH = 60;
SOLVING_INTERVAL = 5;
POLLING_INTERVAL = 1 # seconds Used in actor to determin how often to check for events
| [
"eiselesr@gmail.com"
] | eiselesr@gmail.com |
bcff822abeae3aecb1b72bfc9d1a72c7a4ed788b | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/aad/v20200101/get_ou_container.py | 0ea2a1af3f72f1602f4b31a3d63ec9725f11276b | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,375 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetOuContainerResult',
'AwaitableGetOuContainerResult',
'get_ou_container',
]
@pulumi.output_type
class GetOuContainerResult:
"""
Resource for OuContainer.
"""
def __init__(__self__, accounts=None, container_id=None, deployment_id=None, distinguished_name=None, domain_name=None, etag=None, id=None, location=None, name=None, provisioning_state=None, service_status=None, tags=None, tenant_id=None, type=None):
if accounts and not isinstance(accounts, list):
raise TypeError("Expected argument 'accounts' to be a list")
pulumi.set(__self__, "accounts", accounts)
if container_id and not isinstance(container_id, str):
raise TypeError("Expected argument 'container_id' to be a str")
pulumi.set(__self__, "container_id", container_id)
if deployment_id and not isinstance(deployment_id, str):
raise TypeError("Expected argument 'deployment_id' to be a str")
pulumi.set(__self__, "deployment_id", deployment_id)
if distinguished_name and not isinstance(distinguished_name, str):
raise TypeError("Expected argument 'distinguished_name' to be a str")
pulumi.set(__self__, "distinguished_name", distinguished_name)
if domain_name and not isinstance(domain_name, str):
raise TypeError("Expected argument 'domain_name' to be a str")
pulumi.set(__self__, "domain_name", domain_name)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if service_status and not isinstance(service_status, str):
raise TypeError("Expected argument 'service_status' to be a str")
pulumi.set(__self__, "service_status", service_status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if tenant_id and not isinstance(tenant_id, str):
raise TypeError("Expected argument 'tenant_id' to be a str")
pulumi.set(__self__, "tenant_id", tenant_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def accounts(self) -> Optional[Sequence['outputs.ContainerAccountResponse']]:
"""
The list of container accounts
"""
return pulumi.get(self, "accounts")
@property
@pulumi.getter(name="containerId")
def container_id(self) -> str:
"""
The OuContainer name
"""
return pulumi.get(self, "container_id")
@property
@pulumi.getter(name="deploymentId")
def deployment_id(self) -> str:
"""
The Deployment id
"""
return pulumi.get(self, "deployment_id")
@property
@pulumi.getter(name="distinguishedName")
def distinguished_name(self) -> str:
"""
Distinguished Name of OuContainer instance
"""
return pulumi.get(self, "distinguished_name")
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> str:
"""
The domain name of Domain Services.
"""
return pulumi.get(self, "domain_name")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Resource etag
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The current deployment or provisioning state, which only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="serviceStatus")
def service_status(self) -> str:
"""
Status of OuContainer instance
"""
return pulumi.get(self, "service_status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
Azure Active Directory tenant id
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
class AwaitableGetOuContainerResult(GetOuContainerResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetOuContainerResult(
accounts=self.accounts,
container_id=self.container_id,
deployment_id=self.deployment_id,
distinguished_name=self.distinguished_name,
domain_name=self.domain_name,
etag=self.etag,
id=self.id,
location=self.location,
name=self.name,
provisioning_state=self.provisioning_state,
service_status=self.service_status,
tags=self.tags,
tenant_id=self.tenant_id,
type=self.type)
def get_ou_container(domain_service_name: Optional[str] = None,
ou_container_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetOuContainerResult:
"""
Resource for OuContainer.
:param str domain_service_name: The name of the domain service.
:param str ou_container_name: The name of the OuContainer.
:param str resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
__args__ = dict()
__args__['domainServiceName'] = domain_service_name
__args__['ouContainerName'] = ou_container_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:aad/v20200101:getOuContainer', __args__, opts=opts, typ=GetOuContainerResult).value
return AwaitableGetOuContainerResult(
accounts=__ret__.accounts,
container_id=__ret__.container_id,
deployment_id=__ret__.deployment_id,
distinguished_name=__ret__.distinguished_name,
domain_name=__ret__.domain_name,
etag=__ret__.etag,
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
service_status=__ret__.service_status,
tags=__ret__.tags,
tenant_id=__ret__.tenant_id,
type=__ret__.type)
| [
"noreply@github.com"
] | MisinformedDNA.noreply@github.com |
442aa6440f1af07f819e78fd08f0264f54413a6a | 4331b28f22a2efb12d462ae2a8270a9f666b0df1 | /.history/dvdstore/webapp/views_20190914161714.py | f4c90cba1304e4416f8ea5fe90884b1402fe6d03 | [] | no_license | ZiyaadLakay/csc312.group.project | ba772a905e0841b17478eae7e14e43d8b078a95d | 9cdd9068b5e24980c59a53595a5d513c2e738a5e | refs/heads/master | 2020-07-26T23:30:22.542450 | 2019-09-16T11:46:41 | 2019-09-16T11:46:41 | 200,703,160 | 0 | 0 | null | 2019-08-05T17:52:37 | 2019-08-05T17:52:37 | null | UTF-8 | Python | false | false | 9,900 | py | from django.shortcuts import render
from .models import DVD, Transaction, Customer
from django.core.paginator import EmptyPage,PageNotAnInteger, Paginator
from django.db.models import Q
from django.contrib.auth.models import User, auth
from django.shortcuts import render, redirect
from django.contrib import messages
from django.core.files.storage import FileSystemStorage
from django.contrib.auth.decorators import login_required, permission_required
from .form import DocumentForm
import datetime
#This is the homepage for the User
def home(request):
dvds = DVD.objects.all() #imports dvds from database
query = request.GET.get("query")
gen = request.GET.get("gen")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query))#Search Function according to name
if not DVD.objects.filter(Q(Title__icontains=query)).exists():
messages.info(request,'No search results for : '+query)
elif gen:
dvds = DVD.objects.filter(Q(genre__icontains=gen))#Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
genre = {'Action', 'Comedy', 'Drama', 'Family', 'Romance'}
return render(request, 'home.html', {'dvds':dvds}, {'genre':genre}) #renders the page
#This is the page for clerks
@login_required
def clerk(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'clerk.html',context_dict)
@login_required
def userstbl(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
users = User.objects.filter(Q(username__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'userstbl.html',context_dict)
@login_required
def transactions(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
trans = Transaction.objects.filter(Q(TransactionNumber__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'transactions.html',context_dict)
def register2(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
username= request.POST['username']
email= request.POST['email']
password1= first_name[0]+last_name
if User.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('clerk')
elif User.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
user = User.objects.create_user(username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
user.save()
messages.info(request, 'User Created')
return redirect('/clerk')
def model_form_upload(request):
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('/clerk')
def booking(request):
username= request.POST['username']
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup=username)
return redirect('home')
def checkout(request):
dvdID= request.POST['dvdID']
numOfDays=request.POST['numDaysBooked']
dvdPrice=request.POST['dvdPrice']
users_ID=request.POST['user_ID']
MovieTitle=request.POST['MovieTitle']
payment=request.POST['payment']
bill=int(numOfDays)*int(dvdPrice)
DVD.objects.filter(id=dvdID).update(NumDaysBooked=numOfDays,InStock=False)
RentDate= datetime.date.today()
DueDate=RentDate+datetime.timedelta(days=int(numOfDays))
t = datetime.datetime.now().strftime("%H%M%S")
TransactionNumber=payment+str(RentDate)[0:4]+str(RentDate)[8:10]+t
#Amount
trans = Transaction(users_ID=users_ID, TransactionNumber=TransactionNumber, RentDate=RentDate, DueDate=DueDate, MovieTitle=MovieTitle, Payment_Method=payment,Amount="R"+str(bill),dvdID=dvdID)
trans.save()
return redirect('/clerk')
def checkin(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup='None',InStock=True,NumDaysBooked=0)
return redirect('/clerk')
def deleteMovie(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).delete()
return redirect('/clerk')
def deleteTransaction(request):
transID= request.POST['transID']
Transaction.objects.filter(id=transID).delete()
return redirect('/transactions')
def deleteUser(request):
userID= request.POST['userID']
User.objects.filter(id=userID).delete()
return redirect('/userstbl')
def user_detail(request):
id = None
if request.user.is_authenticated:
id = request.user.id
print(id)
detail2 = Customer.objects.all()
detail1 = User.objects.filter( id = id )
print(str(detail1[0]))
detail2 = Customer.objects.filter(Q(username__icontains=str(detail1[0]))).values()
answers_list = list(detail2)
myString=str(answers_list[0])
myarray=myString.split(":")
for i i
print("0: "+str(str(answers_list[0]).split(":")[4]))
return render(request, 'user_detail.html',{'detail1':detail1 , 'detail2' : detail2})
def registerCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
password1= request.POST['password1']
password2= request.POST['password2']
username= request.POST['username']
if password1 == password2 :
if Customer.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('register.html')
elif Customer.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
return redirect('register.html')
user = Customer.objects.create_user(phone_number=phone_number, address=address,identification=identification,username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
user.save()
# customer.save()
messages.info(request, 'User Created')
# messages.info(request, 'Customer Created')
return redirect('login.html')
else:
print('password does not match')
messages.info(request, 'Password does not match')
return redirect('register.html')
return redirect('login.html')
else:
return render(request, 'register.html')
def updateCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = Customer.objects.filter(id=userID).update(phone_number=phone_number, address=address,identification=identification,username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
def updateUser(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = User.objects.filter(id=userID).update(username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
| [
"uzairjoneswolf@gmail.com"
] | uzairjoneswolf@gmail.com |
0c31515b7bee2f75d50298d6e5f8034a79cfcdcc | 6f05f7d5a67b6bb87956a22b988067ec772ba966 | /data/train/python/dba09ec8b643897d28ddeb551b50ebe871f56568test_npcdmod.py | dba09ec8b643897d28ddeb551b50ebe871f56568 | [
"MIT"
] | permissive | harshp8l/deep-learning-lang-detection | 93b6d24a38081597c610ecf9b1f3b92c7d669be5 | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | refs/heads/master | 2020-04-07T18:07:00.697994 | 2018-11-29T23:21:23 | 2018-11-29T23:21:23 | 158,597,498 | 0 | 0 | MIT | 2018-11-21T19:36:42 | 2018-11-21T19:36:41 | null | UTF-8 | Python | false | false | 7,988 | py | #!/usr/bin/env python
# Copyright (C) 2009-2010:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test the npcd broker module
#
import os, sys, string, time
from multiprocessing import Queue
from shinken_test import unittest, ShinkenTest
from shinken.objects.module import Module
from shinken.modulesctx import modulesctx
npcdmod_broker = modulesctx.get_module('npcdmod')
Npcd_broker = npcdmod_broker.Npcd_broker
sys.setcheckinterval(10000)
modconf = Module()
modconf.module_name = "ncpd"
modconf.module_type = npcdmod_broker.properties['type']
modconf.modules = []
modconf.properties = npcdmod_broker.properties.copy()
class TestNpcd(ShinkenTest):
def add(self, b):
self.broks[b.id] = b
def fake_check(self, ref, exit_status, output="OK"):
print "fake", ref
now = time.time()
ref.schedule()
check = ref.actions.pop()
self.sched.add(check) # check is now in sched.checks[]
# fake execution
check.check_time = now
check.output = output
check.exit_status = exit_status
check.execution_time = 0.001
check.status = 'waitconsume'
self.sched.waiting_results.append(check)
def scheduler_loop(self, count, reflist):
for ref in reflist:
(obj, exit_status, output) = ref
obj.checks_in_progress = []
for loop in range(1, count + 1):
print "processing check", loop
for ref in reflist:
(obj, exit_status, output) = ref
obj.update_in_checking()
self.fake_check(obj, exit_status, output)
self.sched.consume_results()
self.worker_loop()
for ref in reflist:
(obj, exit_status, output) = ref
obj.checks_in_progress = []
self.sched.update_downtimes_and_comments()
#time.sleep(ref.retry_interval * 60 + 1)
#time.sleep(60 + 1)
def worker_loop(self):
self.sched.delete_zombie_checks()
self.sched.delete_zombie_actions()
checks = self.sched.get_to_run_checks(True, False)
actions = self.sched.get_to_run_checks(False, True)
#print "------------ worker loop checks ----------------"
#print checks
#print "------------ worker loop actions ----------------"
#self.show_actions()
#print "------------ worker loop new ----------------"
for a in actions:
#print "---> fake return of action", a.id
a.status = 'inpoller'
a.exit_status = 0
self.sched.put_results(a)
#self.show_actions()
#print "------------ worker loop end ----------------"
def update_broker(self):
self.sched.get_new_broks()
ids = self.sched.brokers['Default-Broker']['broks'].keys()
ids.sort()
for i in ids:
brok = self.sched.brokers['Default-Broker']['broks'][i]
brok.prepare()
self.npcdmod_broker.manage_brok(brok)
self.sched.broks = {}
def print_header(self):
print "#" * 80 + "\n" + "#" + " " * 78 + "#"
print "#" + string.center(self.id(), 78) + "#"
print "#" + " " * 78 + "#\n" + "#" * 80 + "\n"
def write_correct_config(self):
file = open("npcd.cfg", "w")
file.write("perfdata_file = /tmp/pfnerf")
file.write("perfdata_spool_dir = /tmp/pnp4shinken/var/perfdata")
file.write("perfdata_spool_filename=pferf")
file.close()
def write_incomplete_config(self):
file = open("npcd.cfg", "w")
file.write("perfdata_file = /tmp/pfnerf")
file.write("perfdata_spool_filename=pferf")
file.close()
def test_write_perfdata_file(self):
self.print_header()
if os.path.exists("./perfdata"):
os.unlink("./perfdata")
self.npcdmod_broker = Npcd_broker(modconf, None, './perfdata', '.', 'perfdata-target', 15)
self.npcdmod_broker.properties['to_queue'] = 0
self.npcdmod_broker.init()
self.sched.conf.skip_initial_broks = False
self.sched.brokers['Default-Broker'] = {'broks' : {}, 'has_full_broks' : False}
self.sched.fill_initial_broks('Default-Broker')
print "got initial broks"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [router, 0, 'UP | rtt=10'], [svc, 2, 'BAD | value1=0 value2=0']])
self.update_broker()
self.assert_(os.path.exists("./perfdata"))
if os.path.exists("./perfdata"):
self.npcdmod_broker.logfile.close()
os.unlink("./perfdata")
def test_npcd_got_missing_conf(self):
self.print_header()
if os.path.exists("./perfdata"):
os.unlink("./perfdata")
self.npcdmod_broker = Npcd_broker(modconf, None, './perfdata', '.', 'perfdata-target', 15)
self.npcdmod_broker.properties['to_queue'] = 0
self.npcdmod_broker.from_q = Queue()
self.npcdmod_broker.init()
self.sched.conf.skip_initial_broks = False
self.sched.brokers['Default-Broker'] = {'broks' : {}, 'has_full_broks' : False}
self.sched.fill_initial_broks('Default-Broker')
print "got initial broks"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
# We are a bad guy, and we change the service name
svc.service_description = "Unkown"
# and we force it to raise an asking now
self.npcdmod_broker.last_need_data_send = 0
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [router, 0, 'UP | rtt=10'], [svc, 2, 'BAD | value1=0 value2=0']])
self.update_broker()
self.assert_(os.path.exists("./perfdata"))
if os.path.exists("./perfdata"):
self.npcdmod_broker.logfile.close()
os.unlink("./perfdata")
print "Len" * 20, self.npcdmod_broker.from_q.qsize()
self.assert_(self.npcdmod_broker.from_q.qsize() == 1)
self.npcdmod_broker.from_q.get()
self.npcdmod_broker.from_q.close()
if __name__ == '__main__':
import cProfile
command = """unittest.main()"""
unittest.main()
#cProfile.runctx( command, globals(), locals(), filename="Thruk.profile" )
| [
"aliostad+github@gmail.com"
] | aliostad+github@gmail.com |
e4a5e7dd98e5f4beb7a02fd74614184293a5c0ea | 7b6377050fba4d30f00e9fb5d56dfacb22d388e1 | /fudge/reactionData/doubleDifferentialCrossSection/chargedParticleElastic/CoulombPlusNuclearElastic.py | ce695017031fcd1bcb851fb190d30deb079de403 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | LLNL/fudge | 0a4fe8e3a68b66d58e42d1f4d209ea3f713c6370 | 6ba80855ae47cb32c37f635d065b228fadb03412 | refs/heads/master | 2023-08-16T21:05:31.111098 | 2023-08-01T22:09:32 | 2023-08-01T22:09:32 | 203,678,373 | 21 | 4 | NOASSERTION | 2023-06-28T20:51:02 | 2019-08-21T23:22:20 | Python | UTF-8 | Python | false | false | 15,845 | py | # <<BEGIN-copyright>>
# Copyright 2022, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
# <<END-copyright>>
"""
This module contains a form for storing the differential cross section for charged-particle elastic scattering.
Internally, data can be represented three ways:
- pure Rutherford scattering
- Rutherford scattering along with Legendre expansions for nuclear scattering
and for real and imaginary nuclear/Coulomb interference
- Rutherford scattering along with effective cross sections and distributions,
obtained by summing the nuclear and interference terms
"""
import math
from pqu import PQU as PQUModule
from xData import enums as xDataEnumsModule
from PoPs.chemicalElements import misc as chemicalElementMiscPoPsModule
from PoPs.families import nuclide as nuclideFamilyModule
from fudge.core.math import fudgemath as fudgemathModule
from fudge.reactionData import crossSection as crossSectionModule
from fudge.productData.distributions import angular as angularModule
from .. import base as baseModule
from . import misc as miscModule
from . import RutherfordScattering as RutherfordScatteringModule
from . import nuclearAmplitudeExpansion as nuclearAmplitudeExpansionModule
from . import nuclearPlusInterference as nuclearPlusInterferenceModule
class Form( baseModule.Form ):
moniker = "CoulombPlusNuclearElastic"
keyName = 'label'
subformAttributes = ( 'RutherfordScattering', 'nuclearPlusInterference', 'nuclearAmplitudeExpansion' )
def __init__(self, pid, label, productFrame=xDataEnumsModule.Frame.centerOfMass, RutherfordScattering=None,
nuclearPlusInterference=None, nuclearAmplitudeExpansion=None, identicalParticles=False ):
if( RutherfordScattering is None) : RutherfordScattering = RutherfordScatteringModule.RutherfordScattering( )
if( not isinstance( RutherfordScattering, ( RutherfordScatteringModule.RutherfordScattering, type( None ) ) ) ) :
raise TypeError( "Invalid nuclearPlusInterference type: '%s' not allowed in %s" % ( type( RutherfordScattering ), self.moniker ) )
if( not isinstance( nuclearPlusInterference, ( nuclearPlusInterferenceModule.NuclearPlusInterference, type( None ) ) ) ) :
raise TypeError( "Invalid NuclearPlusInterference type: '%s' not allowed in %s" % ( type( nuclearPlusInterference ), self.moniker ) )
if( not isinstance( nuclearAmplitudeExpansion, ( nuclearAmplitudeExpansionModule.NuclearAmplitudeExpansion, type( None ) ) ) ) :
raise TypeError( "Invalid nuclearAmplitudeExpansion type: '%s' not allowed in %s" % ( type( nuclearAmplitudeExpansion ), self.moniker ) )
baseModule.Form.__init__( self, pid, label, productFrame, ( RutherfordScattering, nuclearPlusInterference, nuclearAmplitudeExpansion ),
identicalParticles = identicalParticles )
self.__data = self.nuclearPlusInterference
if( self.__data is None ) : self.__data = self.nuclearAmplitudeExpansion
self.__etaCoefficient = None
self.__spin = None # Only defined if identicalParticles is True.
@property
def spin( self ) :
self.initialize( )
return( self.__spin )
@property
def etaCoefficient( self ) :
self.initialize( )
return( self.__etaCoefficient )
@property
def kCoefficient( self ) :
self.initialize( )
return( self.__kCoefficient )
@property
def data( self ) :
return self.__data
@property
def domainMin( self) :
if self.data is not None: return self.data.domainMin
return self.RutherfordScattering.domainMin
@property
def domainMax( self ) :
if self.data is not None: return self.data.domainMax
return self.RutherfordScattering.domainMax
@property
def domainUnit( self ) :
if self.data is not None: return self.data.domainUnit
return self.RutherfordScattering.domainUnit
def check( self, info ):
from fudge import warning
warnings = []
RS = info['reactionSuite']
target = RS.PoPs[ RS.target ]
projectile = RS.PoPs[ RS.projectile ]
identicalParticles = target is projectile
if identicalParticles and not self.identicalParticles:
warnings.append( warning.MissingCoulombIdenticalParticlesFlag() )
elif not identicalParticles and self.identicalParticles:
warnings.append( warning.IncorrectCoulombIdenticalParticlesFlag(
RS.projectile, RS.target ) )
if self.data is not None:
dataWarnings = self.data.check( info )
if dataWarnings:
warnings.append( warning.Context('%s:' % self.data.moniker, dataWarnings) )
return warnings
def fixDomains(self, domainMin, domainMax, fixToDomain):
"""This method does nothing."""
return 0
def initialize( self ):
"""
Pre-compute some factors used to calculate the Rutherford cross section.
"""
if( self.__etaCoefficient is not None ) : return # Already initialized.
reactionSuite = self.rootAncestor
projectile = reactionSuite.PoPs[reactionSuite.projectile]
if( isinstance( projectile, nuclideFamilyModule.Particle ) ) : projectile = projectile.nucleus
targetID = reactionSuite.target
if( targetID in reactionSuite.PoPs.aliases ) : targetID = reactionSuite.PoPs[targetID].pid
target = reactionSuite.PoPs[targetID]
Z1 = chemicalElementMiscPoPsModule.ZAInfo( projectile )[0]
Z2 = chemicalElementMiscPoPsModule.ZAInfo( target )[0]
if( self.data is None ) :
domainUnit = reactionSuite.reactions[0].domainUnit
else :
domainUnit = self.data.domainUnit
mass1 = projectile.getMass( '%s / c**2' % domainUnit )
mass2 = target.getMass( '%s / c**2' % domainUnit )
if( self.identicalParticles ) : self.__spin = projectile.spin[0].value
hbar_c = PQUModule.PQU( 1, 'hbar * c' ).getValueAs( 'm * %s' % domainUnit )
alpha = PQUModule.PQU( '1', 'e * e / ( hbar*c * 4 * pi * eps0 )' ).getValueAs( '' )
self.__etaCoefficient = Z1 * Z2 * alpha * math.sqrt( mass1 / 2 )
A = mass2 / mass1
self.__kCoefficient = (A / (A + 1)) * math.sqrt( 2 * mass1 ) / hbar_c * 1e-14 # 1e-14 = sqrt( barn )
def dSigma_dMu(self, energy, muCutoff, accuracy=1e-3, epsilon=1e-6, excludeRutherfordScattering=False, probability=False):
"""
Returns d(Sigma)/d(mu) at the specified incident energy if probability is **False** and P(mu) otherwise.
:param energy: Energy of the projectile.
:param accuracy: The accuracy of the returned *dSigma_dMu*.
:param muMax: Slices the upper domain mu to this value.
:param probability: If **True** P(mu) is returned instead of d(Sigma)/d(mu).
:return: d(Sigma)/d(mu) at *energy*.
"""
def dullPoint( mu, epsilon ) :
if( mu < 0.0 ) : epsilon *= -1
return( mu * ( 1 + epsilon ) )
epsilon = max( 1e-15, min( 0.1, abs( epsilon ) ) )
if( abs( muCutoff ) >= 1.0 ) : raise ValueError( 'muCutoff = %.17e must be in the range ( -1, 1 ).' % muCutoff )
muMin = -1.0
if( self.identicalParticles ) :
muCutoff = abs( muCutoff )
muMin = 0.0
if( ( self.data is None ) or ( energy < self.data.domainMin ) ) :
_dSigma_dMu = angularModule.XYs1d( [ [ -1.0, 0.0 ], [ 1.0, 0.0 ] ], axes = miscModule.defaultAxes( self.domainUnit ) )
else :
_dSigma_dMu = self.data.dSigma_dMu(energy, accuracy=accuracy, muMax=muCutoff, probability=probability)
if not excludeRutherfordScattering:
_dSigma_dMu += self.RutherfordScattering.dSigma_dMu(energy, accuracy=accuracy, muMax=muCutoff)
_dSigma_dMu = _dSigma_dMu.thin( accuracy = 1e-3 )
return( _dSigma_dMu )
def evaluate( self, E, mu, phi = 0.0, excludeRutherfordScattering = False ) :
"""
Compute the cross section at (E, mu), including Coulomb, nuclear and interference terms.
:param E: incident energy in the lab frame.
:param mu: scattering angle cosine in the center-of-mass.
:param phi: scattering azimuthal angle in the center-of-mass.
:param excludeRutherfordScattering: If True, only the nuclear and interference terms are included in the returned value.
:return:
"""
if( excludeRutherfordScattering ) :
RS = 0.0
else :
RS = self.RutherfordScattering.evaluate( E, mu, phi )
NS = 0
if( self.data is not None ) : NS = self.data.evaluate( E, mu, phi )
return RS + NS
def calculateAverageProductData( self, style, indent = '', **kwargs ) :
raise CoulombDepositionNotSupported( "Cannot compute average product data for %s distribution" % self.moniker )
def processCoulombPlusNuclearMuCutoff( self, style, energyMin = None, accuracy = 1e-3, epsilon = 1e-6, excludeRutherfordScattering = False ) :
class Tester :
def __init__( self, dSigma_dMu, muCutoff, relativeTolerance, absoluteTolerance ) :
self.dSigma_dMu = dSigma_dMu
self.muCutoff = muCutoff
self.relativeTolerance = relativeTolerance
self.absoluteTolerance = absoluteTolerance
def evaluateAtX( self, energy ) :
dSigma_dMu = self.dSigma_dMu( energy, muCutoff, accuracy = self.relativeTolerance, excludeRutherfordScattering = excludeRutherfordScattering )
return dSigma_dMu.integrate()
nuclearPlusInterferenceCrossSection = None
if( self.nuclearPlusInterference is None ) :
if( self.nuclearAmplitudeExpansion is None ) :
if( excludeRutherfordScattering ) : return( None, None )
energies = [ self.RutherfordScattering.domainMin, self.RutherfordScattering.domainMax ]
energies.insert( 1, math.sqrt( energies[0] * energies[1] ) )
else :
nuclearTerm = self.nuclearAmplitudeExpansion.nuclearTerm.data
if( isinstance( nuclearTerm, angularModule.XYs2d ) ) :
energies = nuclearTerm.domainGrid
elif( isinstance( nuclearTerm, angularModule.Regions2d ) ) :
energies = []
for region in nuclearTerm : energies += region.domainGrid
energies = sorted( set( energies ) )
else :
raise Exception( 'distribution type "%s" not supported' % type( nuclearTerm ) )
else :
energies = self.nuclearPlusInterference.distribution.data.domainGrid
nuclearPlusInterferenceCrossSection = self.nuclearPlusInterference.crossSection.data.toPointwise_withLinearXYs(lowerEps=1e-6)
if not excludeRutherfordScattering:
RutherfordEnergies = energies.copy( )
RutherfordEnergyMin = self.RutherfordScattering.domainMin
if( RutherfordEnergyMin is None ) : RutherfordEnergyMin = PQUModule.PQU( 1e-4, 'MeV' ).getValueAs( self.domainUnit )
if( energyMin is not None ) :
if( energyMin < RutherfordEnergyMin ) : RutherfordEnergyMin = energyMin
nonRutherfordEnergyMin = energies[0]
index = 0
while( RutherfordEnergyMin < 0.8 * nonRutherfordEnergyMin ) :
RutherfordEnergies.insert( index, RutherfordEnergyMin )
index += 1
RutherfordEnergyMin *= 1.4142135623731
energies = RutherfordEnergies
muCutoff = style.muCutoff
crossSection = []
for energy in energies :
dSigma_dMu = self.dSigma_dMu( energy, muCutoff, accuracy = accuracy, excludeRutherfordScattering = excludeRutherfordScattering )
crossSection.append([ energy, dSigma_dMu.integrate() ])
_tester = Tester( self.dSigma_dMu, muCutoff, accuracy, accuracy * crossSection[-1][1] )
crossSection = fudgemathModule.thickenXYList( crossSection, _tester, biSectionMax = 16 )
crossSectionAxes = crossSectionModule.defaultAxes( self.domainUnit )
crossSection = crossSectionModule.XYs1d( data = crossSection, axes = crossSectionAxes, label = style.label )
xys2d = angularModule.XYs2d( axes = angularModule.defaultAxes( self.domainUnit ) )
crossSectionData = []
probability = nuclearPlusInterferenceCrossSection is not None
for energy in energies :
data = self.dSigma_dMu(energy, muCutoff, accuracy=accuracy, excludeRutherfordScattering=excludeRutherfordScattering, probability=probability)
if( excludeRutherfordScattering ) : data = data.clip( rangeMin = 0.0 )
xSec = data.integrate()
crossSectionData.append( [ energy, xSec ] )
if xSec == 0.0:
data = [[-1.0, 0.5], [1.0, 0.5]]
if self.identicalParticles:
data = [[-1.0, 0.0], [-1e-12, 0.0], [1e-12, 1.0], [1.0, 1.0]]
else :
data /= xSec
xys1d = angularModule.XYs1d(data=data, axes=xys2d.axes, outerDomainValue=energy)
xys2d.append(xys1d)
if excludeRutherfordScattering:
if nuclearPlusInterferenceCrossSection is not None: crossSectionData = nuclearPlusInterferenceCrossSection
crossSection = crossSectionModule.XYs1d(data=crossSectionData, axes=crossSectionAxes, label=style.label)
return( crossSection, xys2d )
def processMultiGroup( self, style, tempInfo, indent ) :
print(' processMultiGroup not implemented for distribution form %s.' % self.moniker)
return( None )
@classmethod
def parseNodeUsingClass(cls, element, xPath, linkData, **kwargs):
xPath.append( element.tag )
RutherfordScattering = None
nuclearPlusInterference = None
nuclearAmplitudeExpansion = None
for child in element:
if child.tag == RutherfordScatteringModule.RutherfordScattering.moniker:
RutherfordScattering = RutherfordScatteringModule.RutherfordScattering.parseNodeUsingClass(child, xPath, linkData, **kwargs)
elif child.tag == nuclearPlusInterferenceModule.NuclearPlusInterference.moniker:
nuclearPlusInterference = nuclearPlusInterferenceModule.NuclearPlusInterference.parseNodeUsingClass(child, xPath, linkData, **kwargs)
elif child.tag == nuclearAmplitudeExpansionModule.NuclearAmplitudeExpansion.moniker:
nuclearAmplitudeExpansion = nuclearAmplitudeExpansionModule.NuclearAmplitudeExpansion.parseNodeUsingClass(child, xPath, linkData, **kwargs)
else:
raise TypeError( "Encountered unexpected element '%s' in %s" % ( child.tag, element.tag ) )
subForms = ( RutherfordScattering, nuclearPlusInterference, nuclearAmplitudeExpansion )
identicalParticles = element.get( 'identicalParticles', '' ) == 'true'
Coul = cls( element.get( 'pid' ), element.get( 'label' ), element.get( 'productFrame' ), identicalParticles = identicalParticles,
RutherfordScattering = RutherfordScattering, nuclearPlusInterference = nuclearPlusInterference, nuclearAmplitudeExpansion = nuclearAmplitudeExpansion )
xPath.pop( )
return Coul
class CoulombDepositionNotSupported( Exception ):
"""
Custom Exception, returned when calculateAverageProductData() is called for
nuclearAmplitudeExpansion or nuclearPlusInterference
"""
pass
| [
"mattoon1@llnl.gov"
] | mattoon1@llnl.gov |
c5424efea98053414c6bf63cc096de8998a411cf | 6d233ad2059a941e4ce4c5b5ee3857b8a4a0d212 | /Everyday_alg/2021/09/2021_09_29/super-washing-machines.py | 4dd95588ead44ee7f767566be49366cffbb885ba | [] | no_license | Alexanderklau/Algorithm | 7c38af7debbe850dfc7b99cdadbf0f8f89141fc6 | eac05f637a55bfcc342fa9fc4af4e2dd4156ea43 | refs/heads/master | 2022-06-12T21:07:23.635224 | 2022-06-12T08:12:07 | 2022-06-12T08:12:07 | 83,501,915 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,553 | py | # coding: utf-8
__author__ = 'Yemilice_lau'
"""
假设有 n 台超级洗衣机放在同一排上。开始的时候,每台洗衣机内可能有一定量的衣服,也可能是空的。
在每一步操作中,你可以选择任意 m (1 <= m <= n) 台洗衣机,与此同时将每台洗衣机的一件衣服送到相邻的一台洗衣机。
给定一个整数数组 machines 代表从左至右每台洗衣机中的衣物数量,请给出能让所有洗衣机中剩下的衣物的数量相等的 最少的操作步数 。
如果不能使每台洗衣机中衣物的数量相等,则返回 -1 。
示例 1:
输入:machines = [1,0,5]
输出:3
解释:
第一步: 1 0 <-- 5 => 1 1 4
第二步: 1 <-- 1 <-- 4 => 2 1 3
第三步: 2 1 <-- 3 => 2 2 2
示例 2:
输入:machines = [0,3,0]
输出:2
解释:
第一步: 0 <-- 3 0 => 1 2 0
第二步: 1 2 --> 0 => 1 1 1
示例 3:
输入:machines = [0,2,0]
输出:-1
解释:
不可能让所有三个洗衣机同时剩下相同数量的衣物。
"""
class Solution(object):
def findMinMoves(self, machines):
"""
:type machines: List[int]
:rtype: int
"""
tot = sum(machines)
n = len(machines)
if tot % n:
return -1
avg = tot // n
ans, s = 0, 0
for num in machines:
num -= avg
s += num
ans = max(ans, abs(s), num)
return ans | [
"429095816@qq.com"
] | 429095816@qq.com |
5d228ece9d0927cce69c4e5f117d66a84a74b75c | 2b2dc38c581d4313dee547af7f9714df29b9e000 | /tests/clients/test_model.py | 5f2080c485fbb4f707a727549250b41ab93f756b | [
"MIT",
"Apache-2.0"
] | permissive | jkeelan/faculty | 72145791171b3b32ee98c956e36d0f65ca74ff87 | 3cf50f243fba1bfe7a346de88654d3616ac35b15 | refs/heads/master | 2020-11-25T23:12:55.335878 | 2019-12-15T15:34:00 | 2019-12-15T15:34:00 | 228,884,443 | 0 | 0 | Apache-2.0 | 2019-12-18T16:55:28 | 2019-12-18T16:55:27 | null | UTF-8 | Python | false | false | 5,077 | py | # Copyright 2018-2019 Faculty Science Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from datetime import datetime
import pytest
import attr
from dateutil.tz import UTC
from marshmallow import ValidationError
from faculty.clients.model import (
ExperimentModelSource,
ExperimentModelSourceSchema,
Model,
ModelClient,
ModelSchema,
ModelVersion,
ModelVersionSchema,
)
PROJECT_ID = uuid.uuid4()
MODEL_ID = uuid.uuid4()
VERSION_ID = uuid.uuid4()
USER_ID = uuid.uuid4()
REGISTERED_AT = datetime(2019, 8, 19, 15, 23, 53, 268000, tzinfo=UTC)
REGISTERED_AT_STRING = "2019-08-19T15:23:53.268Z"
EXPERIMENT_MODEL_SOURCE = ExperimentModelSource(
experiment_id=43, experiment_run_id=uuid.uuid4()
)
EXPERIMENT_MODEL_SOURCE_JSON = {
"type": "experiment",
"experimentId": EXPERIMENT_MODEL_SOURCE.experiment_id,
"experimentRunId": str(EXPERIMENT_MODEL_SOURCE.experiment_run_id),
}
MODEL_VERSION = ModelVersion(
id=VERSION_ID,
version_number=23,
registered_at=REGISTERED_AT,
registered_by=USER_ID,
artifact_path="scheme:path/to/artifact",
source=EXPERIMENT_MODEL_SOURCE,
)
MODEL_VERSION_JSON = {
"modelVersionId": str(VERSION_ID),
"modelVersionNumber": MODEL_VERSION.version_number,
"registeredAt": REGISTERED_AT_STRING,
"registeredBy": str(USER_ID),
"artifactPath": MODEL_VERSION.artifact_path,
"source": EXPERIMENT_MODEL_SOURCE_JSON,
}
MODEL = Model(
id=MODEL_ID,
name="model name",
description="model description",
user_ids=[USER_ID],
latest_version=MODEL_VERSION,
)
MODEL_JSON = {
"modelId": str(MODEL_ID),
"name": MODEL.name,
"description": MODEL.description,
"users": [str(USER_ID)],
"latestVersion": MODEL_VERSION_JSON,
}
def test_experiment_model_source_schema():
data = ExperimentModelSourceSchema().load(EXPERIMENT_MODEL_SOURCE_JSON)
assert data == EXPERIMENT_MODEL_SOURCE
def test_model_version_schema():
data = ModelVersionSchema().load(MODEL_VERSION_JSON)
assert data == MODEL_VERSION
def test_model_schema():
data = ModelSchema().load(MODEL_JSON)
assert data == MODEL
def test_model_schema_without_latest_version():
model_json = MODEL_JSON.copy()
del model_json["latestVersion"]
data = ModelSchema().load(model_json)
assert data == attr.evolve(MODEL, latest_version=None)
@pytest.mark.parametrize(
"schema", [ExperimentModelSourceSchema, ModelVersionSchema, ModelSchema]
)
def test_schemas_invalid(schema):
with pytest.raises(ValidationError):
schema().load({})
def test_model_client_get(mocker):
mocker.patch.object(ModelClient, "_get", return_value=MODEL)
schema_mock = mocker.patch("faculty.clients.model.ModelSchema")
client = ModelClient(mocker.Mock())
assert client.get(PROJECT_ID, MODEL_ID) == MODEL
schema_mock.assert_called_once_with()
ModelClient._get.assert_called_once_with(
"/project/{}/model/{}".format(PROJECT_ID, MODEL_ID),
schema_mock.return_value,
)
def test_model_client_list(mocker):
mocker.patch.object(ModelClient, "_get", return_value=[MODEL])
schema_mock = mocker.patch("faculty.clients.model.ModelSchema")
client = ModelClient(mocker.Mock())
assert client.list(PROJECT_ID) == [MODEL]
schema_mock.assert_called_once_with(many=True)
ModelClient._get.assert_called_once_with(
"/project/{}/model".format(PROJECT_ID, MODEL_ID),
schema_mock.return_value,
)
def test_model_client_get_version(mocker):
mocker.patch.object(ModelClient, "_get", return_value=MODEL_VERSION)
schema_mock = mocker.patch("faculty.clients.model.ModelVersionSchema")
client = ModelClient(mocker.Mock())
assert (
client.get_version(PROJECT_ID, MODEL_ID, VERSION_ID) == MODEL_VERSION
)
schema_mock.assert_called_once_with()
ModelClient._get.assert_called_once_with(
"/project/{}/model/{}/version/{}".format(
PROJECT_ID, MODEL_ID, VERSION_ID
),
schema_mock.return_value,
)
def test_model_client_list_versions(mocker):
mocker.patch.object(ModelClient, "_get", return_value=[MODEL_VERSION])
schema_mock = mocker.patch("faculty.clients.model.ModelVersionSchema")
client = ModelClient(mocker.Mock())
assert client.list_versions(PROJECT_ID, MODEL_ID) == [MODEL_VERSION]
schema_mock.assert_called_once_with(many=True)
ModelClient._get.assert_called_once_with(
"/project/{}/model/{}/version".format(PROJECT_ID, MODEL_ID),
schema_mock.return_value,
)
| [
"wacrozier@gmail.com"
] | wacrozier@gmail.com |
0944e48d424011351fdc9b9140279c65238f531c | 2125593138c50b1fba5e46cd4d88d6c04d0b417a | /06_DJANGO_ADVANCE/03_IMAGE_UPLOAD/sns/migrations/0002_posting_likes.py | 0524e0f4d31ff8c728982f782cc3dca428482c9c | [] | no_license | minkishome/TIL-master | 5f0e6ef61b34a2983961ccf44f7523603ccb5907 | d8edc0ff8abff3b2239a2d751eee263b722013a6 | refs/heads/master | 2023-01-21T00:43:30.165535 | 2020-08-25T14:56:18 | 2020-08-25T14:56:18 | 203,070,283 | 0 | 1 | null | 2023-01-05T01:08:10 | 2019-08-19T00:18:31 | Python | UTF-8 | Python | false | false | 531 | py | # Generated by Django 2.2.6 on 2019-10-21 07:18
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sns', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='posting',
name='likes',
field=models.ManyToManyField(blank=True, related_name='like_postings', to=settings.AUTH_USER_MODEL),
),
]
| [
"minkishome@gmail.com"
] | minkishome@gmail.com |
77443ff141c26d56888e06b61fc6591b0dfe0500 | 13b84a8620f928159b5205d89db61df0e5bfb60a | /20200613/prob_2.py | 11416e76908279b28ca8cfa999550e6de97e1875 | [] | no_license | steinstadt/CodeForces | 548c5a5fe23fba512a4b675eaf264bfce9b44c1e | db130008d3bd1a957bcad9ab40f3a9461c534174 | refs/heads/master | 2021-04-16T15:14:19.736276 | 2020-11-21T07:36:58 | 2020-11-21T07:36:58 | 249,365,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | # Problem 1154 A - Restoring Three Numbers
# input
num_list = list(map(int, input().split()))
num_list = sorted(num_list)
# initialization
ans_list = [0] * 3
# count
for i in range(3):
ans_list[i] = num_list[-1] - num_list[i]
# output
print(" ".join(list(map(str, ans_list))))
| [
"steinstadt@keio.jp"
] | steinstadt@keio.jp |
666e013da80d8695c8333bae27b1d872f86c8955 | a51b1814a9bf2fdcf880772fefaa2ab79e8c7308 | /runestone/chapter-2/proper-python-class.py | 6eda5173d70e4fa37807360532296ec5fc1d76da | [] | no_license | 369geofreeman/MITx_6.00.1x | d38913805168440969034e1d82611b0dbcd7a29a | ba84f70cc4e7cfbd4b685b742aa87d3f85cbbf59 | refs/heads/master | 2023-04-21T01:55:08.538290 | 2021-05-13T13:03:50 | 2021-05-13T13:03:50 | 282,055,845 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,966 | py | # Writing a proper Python class
# A class that works well in the Python ecosystem.
# Each class should have a docstring to provide some level of documentation on how to use the class.
#
# Each class should have a __str__ magic method to give it a meaninigful string representation.
#
# Each class should have a proper __repr__ magic method for representation in the interactive shell, the debugger, and other cases where string conversion does not happen.
#
# Each class should be comparable so it can be sorted and meaningfully compared with other instances. At a minimum this means implementing __eq__ and __lt__.
#
# You should think about access control each instance variable. Which attributes do you want to make public, which attributes do you want to make read only, and which attributes do you want to control or do value checking on before you allow them to be changed.
#
# If the class is a container for other classes then there are some further considerations:
#
# You should be able to find out how many things the container holds using len
#
# You should be able to iterate over the items in the container.
#
# You may want to allow users to access the items in the container using the square bracket index notation.
# In this example we build a basic implementation of the MSDie class:
import random
class MSDie:
"""
Multi-sided die
Instance Variables:
current_value
num_sides
"""
def __init__(self, num_sides):
self.num_sides = num_sides
self.current_value = self.roll()
def roll(self):
self.current_value = random.randrange(1,self.num_sides+1)
return self.current_value
def __str__(self):
return str(self.current_value)
def __repr__(self):
return "MSDie({}) : {}".format(self.num_sides, self.current_value)
my_die = MSDie(6)
for i in range(5):
print(my_die)
my_die.roll()
d_list = [MSDie(6), MSDie(20)]
print(d_list)
| [
"geofreeman369@gmail.com"
] | geofreeman369@gmail.com |
33d6324c9769da677644f2434e606ed24d6384b1 | 3539d0e3ddd7849a14876e95f0332428ec28ebf7 | /Data Scientist Career Path/3. Python Fundamentals/6. Python Loop/2. List Comprehension Code Challenge/9. opposite.py | 2f9de791fc0693d1a41850df40514253a3c825e1 | [
"MIT"
] | permissive | DincerDogan/Data-Science-Learning-Path | ff146de2cf4ebc5fedfa9377babf959208dfe7e6 | 2ba0f104bc67ab6ef0f8fb869aa12aa02f5f1efb | refs/heads/main | 2023-05-08T10:53:47.449974 | 2021-06-06T21:27:31 | 2021-06-06T21:27:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | booleans = [True, False, True]
opposite = [not x for x in booleans] | [
"aristyanto2320@gmail.com"
] | aristyanto2320@gmail.com |
426bcc79c656b5cffc242436a10ed9cfa654f2bb | 2c80605554a75d02d57278a9339217d9e7c37f5d | /bank/tpot/pipeline_gen_5_idx_1_2019.09.10_04-14-24.py | a2b5819cd32b6cf812a35dd23a1e7ebac6f852be | [] | no_license | zhangsheng377/Kesci | e28cbe155d8ff4be3307500a76644ec403dc86ae | 967bb3362ad1c6225eef5ca40baf610e9b0aeb6f | refs/heads/master | 2020-07-07T01:59:17.349753 | 2020-02-02T11:30:28 | 2020-02-02T11:30:28 | 203,208,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 946 | py | import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import RobustScaler
from xgboost import XGBClassifier
# NOTE: Make sure that the class is labeled 'target' in the data file
tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64)
features = tpot_data.drop('target', axis=1).values
training_features, testing_features, training_target, testing_target = \
train_test_split(features, tpot_data['target'].values, random_state=None)
# Average CV score on the training set was:0.9090338029086228
exported_pipeline = make_pipeline(
RobustScaler(),
XGBClassifier(learning_rate=0.1, max_depth=6, min_child_weight=3, n_estimators=100, nthread=1, subsample=0.9000000000000001)
)
exported_pipeline.fit(training_features, training_target)
results = exported_pipeline.predict(testing_features)
| [
"435878393@qq.com"
] | 435878393@qq.com |
cc9e00941de4cb85b76f01ec8444c338862ce15c | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_mooching.py | bb4b21a3d0e13344ce2578feccbf8078202a50f3 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py |
from xai.brain.wordbase.nouns._mooch import _MOOCH
#calss header
class _MOOCHING(_MOOCH, ):
def __init__(self,):
_MOOCH.__init__(self)
self.name = "MOOCHING"
self.specie = 'nouns'
self.basic = "mooch"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
24c147e69c88039955887900d3b812f300ecb882 | 1e187e5aa5fad78541a7afaec38fedbd3e9c81c7 | /src/incidents/__init__.py | 3628ecd6a052f4aa0beae7429dc1bc1db97ad1ca | [] | no_license | mattrobenolt/incidents | 0e60bf3d2b792ff4da1cdf6fc4252a245d645559 | 2b59fab8b762138c5adc3a0a65377fee10d41c95 | refs/heads/master | 2020-05-17T02:04:33.725661 | 2014-09-20T07:50:00 | 2014-09-20T07:50:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | def autodiscover():
pass
default_app_config = 'incidents.apps.IncidentsConfig'
from .celery import app as celery_app # noqa
| [
"matt@ydekproductions.com"
] | matt@ydekproductions.com |
5733cd16bf8346d7f4fafeea4b8362e1ec616684 | 5bc5acc7cac75d26312f8b73ed8a4e80f7e144a2 | /admin_interface/templatetags/admin_interface_tags.py | d2e07d2f091f0cf9c9b752083c8d6c3c583425dd | [
"MIT"
] | permissive | gilsonbp/django-admin-interface | 1019b40ef5d09f0e18d4ef55b37d49416e549876 | f6ce51c839be0bfedda5deb440a2588117a28765 | refs/heads/master | 2020-12-28T20:43:13.038317 | 2016-07-12T16:55:01 | 2016-07-12T16:55:01 | 64,975,304 | 0 | 0 | MIT | 2020-04-10T20:00:59 | 2016-08-05T00:09:29 | Python | UTF-8 | Python | false | false | 433 | py | # -*- coding: utf-8 -*-
from django import template
from admin_interface.models import Theme
register = template.Library()
@register.assignment_tag(takes_context = True)
def get_admin_interface_theme(context):
obj_qs = Theme.objects.filter(active = True)[:1]
obj_ls = list(obj_qs)
obj = None
if len(obj_ls):
obj = obj_ls[0]
else:
obj = Theme.get_or_create_default_theme()
return obj
| [
"fabio.caccamo@gmail.com"
] | fabio.caccamo@gmail.com |
7b98333ac3fd36da58a48b8384faba21df2c93de | 07490c73801dd7d055f852732126506088524725 | /tests/test_extension.py | b3bf510dafc6ce1d4d3a083966ce2359bb25727c | [
"Apache-2.0"
] | permissive | consideRatio/jupyterlab_iframe | 202a3740ff718cf93bdcac8dd3171c92fdf0f8f9 | 3c5f51352225ca9235d6e6d378e22bbdf983912f | refs/heads/master | 2021-02-15T20:40:07.750413 | 2020-02-28T03:17:36 | 2020-02-28T03:17:36 | 244,929,842 | 0 | 0 | Apache-2.0 | 2020-03-04T15:03:33 | 2020-03-04T15:03:32 | null | UTF-8 | Python | false | false | 876 | py | # for Coverage
from mock import patch, MagicMock
from jupyterlab_iframe.extension import load_jupyter_server_extension, IFrameHandler, ProxyHandler, ProxyWSHandler
class TestExtension:
def test_load_jupyter_server_extension(self):
m = MagicMock()
m.web_app.settings = {}
m.web_app.settings['base_url'] = '/test'
load_jupyter_server_extension(m)
def test_handler(self):
import tornado.web
app = tornado.web.Application()
m = MagicMock()
h = IFrameHandler(app, m)
h._transforms = []
h.get()
def test_proxy_handler(self):
import tornado.web
app = tornado.web.Application()
m = MagicMock()
h = ProxyHandler(app, m)
h._transforms = []
with patch('requests.get') as m2:
m2.return_value.text = 'test'
h.get()
| [
"t.paine154@gmail.com"
] | t.paine154@gmail.com |
bebbf95ab1b8f7d8f39f2fe3fbd89239d4814cae | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/observer/pod.py | 485bcc3a0d84e58e4306ff6341181c4e6af77f6e | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 5,183 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class Pod(Mo):
"""
A container for node objects to calculate fabric health.
"""
meta = ClassMeta("cobra.model.observer.Pod")
meta.moClassName = "observerPod"
meta.rnFormat = "pod-%(id)s"
meta.category = MoCategory.REGULAR
meta.label = "Observer Pod"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x8000e000000c001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.childClasses.add("cobra.model.observer.Node")
meta.childNamesAndRnPrefix.append(("cobra.model.observer.Node", "node-"))
meta.parentClasses.add("cobra.model.observer.Topology")
meta.rnPrefixes = [
('pod-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "id", "id", 469, PropCategory.REGULAR)
prop.label = "Id"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 255)]
prop.defaultValue = 1
prop.defaultValueStr = "1"
meta.props.add("id", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "numSpines", "numSpines", 473, PropCategory.REGULAR)
prop.label = "Spine Count"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("numSpines", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "spineCoeff", "spineCoeff", 474, PropCategory.REGULAR)
prop.label = "Spine Coefficient"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("spineCoeff", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "sumLeafHealth", "sumLeafHealth", 470, PropCategory.REGULAR)
prop.label = "Leaf Health Sum"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("sumLeafHealth", prop)
prop = PropMeta("str", "sumLeafWeight", "sumLeafWeight", 471, PropCategory.REGULAR)
prop.label = "Leaf Weight Sum"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("sumLeafWeight", prop)
prop = PropMeta("str", "sumSpineHealth", "sumSpineHealth", 472, PropCategory.REGULAR)
prop.label = "Spine Health Sum"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("sumSpineHealth", prop)
meta.namingProps.append(getattr(meta.props, "id"))
def __init__(self, parentMoOrDn, id, markDirty=True, **creationProps):
namingVals = [id]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
52521d978b955e69155b371789c678f9de0fc4a7 | e1d7b5ead35d6e40d63541d02e3320a01a581055 | /notebooks/_solutions/pandas_02_basic_operations128.py | 52fb0231e9924f92702114f3b4b15706f00bad97 | [
"BSD-3-Clause"
] | permissive | drdwitte/DS-python-data-analysis | 900d9f32683e5131bd6657cd6db95b8f774afe5f | 99db9f763411ae9a67ce60f5b8cc522f5e5db85b | refs/heads/master | 2021-06-13T01:13:14.455810 | 2017-01-03T21:29:45 | 2017-01-03T21:29:45 | 110,525,200 | 1 | 0 | null | 2017-11-13T09:08:55 | 2017-11-13T09:08:55 | null | UTF-8 | Python | false | false | 40 | py | df['Survived'].sum()/len(df['Survived']) | [
"jorisvandenbossche@gmail.com"
] | jorisvandenbossche@gmail.com |
9c12235910c2089f308d5efc8451bb32b2a84b3e | 5d25b942873144363546c8b0ccbd2df4fbec0aa0 | /utils/tserial.py | 0c5267fd8d46ea264eddd7f9d4d6cfea9556b728 | [
"Apache-2.0"
] | permissive | Sumalyo/mjmech | 228d23b3690b1244ec0c6825c231c2368d805045 | c222725b9ee799a595c53d1f85195e013a600a04 | refs/heads/master | 2021-04-14T04:59:54.043304 | 2020-03-19T12:57:37 | 2020-03-19T12:57:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,893 | py | #!/usr/bin/env python
# Copyright 2015 Josh Pieper, jjp@pobox.com. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''%prog [options]
Interact with a serial device capable of reporting tlog style
telemetry. Optionally log the data to disk in a tlog file.
'''
import optparse
import serial
import struct
class Serial(object):
def __init__(self, options):
self.options = options
self.port = serial.Serial(port=options.serial,
baudrate=options.baudrate)
# Try to stop anything that might be spewing.
self.stop()
# Try to dump anything that is still in the receive queue.
self.port.setTimeout(0.1)
result = self.port.read(8192)
print 'ignored %d bytes on start' % len(result)
def stop(self):
self.port.write('\ntel stop\n')
def readline(self):
while True:
line = self.port.readline()
if line.startswith('unknown'):
continue
if line.strip() == '':
continue
return line
def list(self):
result = []
self.port.write('\ntel list\n')
while True:
line = self.readline()
if line.startswith("OK"):
break
result.append(line.strip())
return result
def schema(self, name):
self.port.write('\ntel schema %s\n' % name)
line = self.readline()
assert line.startswith('schema ' + name), 'got unexpected schema response: ' + line
size_str = self.port.read(4)
assert len(size_str) == 4
size = struct.unpack('<I', size_str)[0]
data = self.port.read(size)
return data
def rate(self, name, rate):
self.port.write('\ntel rate %s %d\n' % (name, rate))
def read_next_data(self):
# Read until we get an "emit" line.
self.port.setTimeout(None)
line = ''
while True:
line = self.readline()
if line.startswith('emit '):
break
name = line.split(' ')[1].strip()
size_str = self.port.read(4)
assert len(size_str) == 4
size = struct.unpack('<I', size_str)[0]
data = self.port.read(size)
return name, data
class LogWriter(object):
def __init__(self, name):
self.fd = open(name, 'wb')
self.fd.write('TLOG0002')
self.fd.flush()
self.next_identifier = 1
self.names = {}
BLOCK_SCHEMA = 1
BLOCK_DATA = 2
def make_pstring(self, data):
return struct.pack('<I', len(data)) + data
def _make_schema_block(self, identifier, name, schema):
result = ''
result += struct.pack('<II', identifier, 0)
result += self.make_pstring(name)
result += schema
return result
def _make_data_block(self, identifier, data):
result = struct.pack('<IH', identifier, 0) + data
return result
def write_schema(self, name, schema):
identifier = self.next_identifier
self.next_identifier += 1
self.names[name] = identifier
self.write_block(self.BLOCK_SCHEMA,
self._make_schema_block(identifier, name, schema))
def write_data(self, name, data):
identifier = self.names[name]
self.write_block(self.BLOCK_DATA,
self._make_data_block(identifier, data))
def write_block(self, block_id, data):
self.fd.write(struct.pack('<HI', block_id, len(data)) + data)
self.fd.flush()
def main():
usage, description = __doc__.split('\n\n', 1)
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('--serial', '-s', default='/dev/ttyACM0')
parser.add_option('--baudrate', '-b', type='int', default=115200)
parser.add_option('--list', '-l', action='store_true')
parser.add_option('--name', '-n', action='append', default=[])
parser.add_option('--rate', '-r', type='int', default=1,
help='1 is every update, otherwise ms')
parser.add_option('--output', '-o', help='output tlog file')
options, args = parser.parse_args()
ser = Serial(options)
if options.list:
print '\n'.join(ser.list())
return
if len(options.name) == 0:
# If no names are specified, get everything.
print 'getting names'
options.name = ser.list()
output = None
if options.output:
output = LogWriter(options.output)
print 'getting schemas'
# Get the schema for all the requested things.
for name in options.name:
schema = ser.schema(name)
if output:
output.write_schema(name, schema)
print 'got schema for %s len %d' % (name, len(schema))
print 'setting rates'
# Now start everything being sent out.
for name in options.name:
ser.rate(name, options.rate)
print 'starting to record'
# Now, we just continue reading, looking for more data to come
# out.
try:
records = 0
while True:
name, data = ser.read_next_data()
if output:
output.write_data(name, data)
records += 1
print 'count: %d\r' % records,
finally:
pass
#ser.stop()
if __name__ == '__main__':
main()
| [
"jjp@pobox.com"
] | jjp@pobox.com |
84ad07d973ff96ef23edea1992f9a7f987eb23ca | dcc0bb6b30ab22a2b5aea3b0f2f5bf403c28dc9b | /awx_collection/test/awx/conftest.py | bdaa0db3bf529a94233f8be6be7abf9d089906cb | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"GPL-3.0-only"
] | permissive | EStork09/awx | e358b8eaecdb59693fa7d9883555aa5259cc1641 | 0c0e172caf7036737a511cbd532b4dc72d900725 | refs/heads/devel | 2020-08-22T01:18:12.674380 | 2019-10-20T00:45:02 | 2019-10-20T00:45:02 | 178,763,134 | 0 | 0 | Apache-2.0 | 2019-04-09T16:38:38 | 2019-04-01T01:24:13 | Python | UTF-8 | Python | false | false | 4,187 | py | import io
import json
import datetime
import importlib
from contextlib import redirect_stdout
from unittest import mock
from requests.models import Response
import pytest
from awx.main.tests.functional.conftest import _request
from awx.main.models import Organization, Project, Inventory, Credential, CredentialType
def sanitize_dict(din):
'''Sanitize Django response data to purge it of internal types
so it may be used to cast a requests response object
'''
if isinstance(din, (int, str, type(None), bool)):
return din # native JSON types, no problem
elif isinstance(din, datetime.datetime):
return din.isoformat()
elif isinstance(din, list):
for i in range(len(din)):
din[i] = sanitize_dict(din[i])
return din
elif isinstance(din, dict):
for k in din.copy().keys():
din[k] = sanitize_dict(din[k])
return din
else:
return str(din) # translation proxies often not string but stringlike
@pytest.fixture
def run_module():
def rf(module_name, module_params, request_user):
def new_request(self, method, url, **kwargs):
kwargs_copy = kwargs.copy()
if 'data' in kwargs:
kwargs_copy['data'] = json.loads(kwargs['data'])
# make request
rf = _request(method.lower())
django_response = rf(url, user=request_user, expect=None, **kwargs_copy)
# requests library response object is different from the Django response, but they are the same concept
# this converts the Django response object into a requests response object for consumption
resp = Response()
py_data = django_response.data
sanitize_dict(py_data)
resp._content = bytes(json.dumps(django_response.data), encoding='utf8')
resp.status_code = django_response.status_code
return resp
stdout_buffer = io.StringIO()
# Requies specific PYTHONPATH, see docs
# Note that a proper Ansiballz explosion of the modules will have an import path like:
# ansible_collections.awx.awx.plugins.modules.{}
# We should consider supporting that in the future
resource_module = importlib.import_module('plugins.modules.{}'.format(module_name))
# Ansible params can be passed as an invocation argument or over stdin
# this short circuits within the AnsibleModule interface
def mock_load_params(self):
self.params = module_params
with mock.patch.object(resource_module.TowerModule, '_load_params', new=mock_load_params):
# Call the test utility (like a mock server) instead of issuing HTTP requests
with mock.patch('tower_cli.api.Session.request', new=new_request):
# Ansible modules return data to the mothership over stdout
with redirect_stdout(stdout_buffer):
try:
resource_module.main()
except SystemExit:
pass # A system exit indicates successful execution
module_stdout = stdout_buffer.getvalue().strip()
result = json.loads(module_stdout)
return result
return rf
@pytest.fixture
def organization():
return Organization.objects.create(name='Default')
@pytest.fixture
def project(organization):
return Project.objects.create(
name="test-proj",
description="test-proj-desc",
organization=organization,
playbook_files=['helloworld.yml'],
local_path='_92__test_proj',
scm_revision='1234567890123456789012345678901234567890',
scm_url='localhost',
scm_type='git'
)
@pytest.fixture
def inventory(organization):
return Inventory.objects.create(
name='test-inv',
organization=organization
)
@pytest.fixture
def machine_credential(organization):
ssh_type = CredentialType.defaults['ssh']()
ssh_type.save()
return Credential.objects.create(
credential_type=ssh_type, name='machine-cred',
inputs={'username': 'test_user', 'password': 'pas4word'}
)
| [
"arominge@redhat.com"
] | arominge@redhat.com |
c7f42f7749b90a9c3dd47f55c00466c5b63d3493 | 97884252481ff208519194ecd63dc3a79c250220 | /pyobs/robotic/lco/scripts/script.py | 0c57a0bb4cb505d10961df96b185ca12ba1e1049 | [
"MIT"
] | permissive | pyobs/pyobs-core | a1f30137d7f991bad4e115de38f543e59a6e30d2 | 2d7a06e5485b61b6ca7e51d99b08651ea6021086 | refs/heads/master | 2023-09-01T20:49:07.610730 | 2023-08-29T09:20:05 | 2023-08-29T09:20:05 | 174,351,157 | 9 | 3 | NOASSERTION | 2023-09-14T20:39:48 | 2019-03-07T13:41:27 | Python | UTF-8 | Python | false | false | 2,064 | py | import logging
from typing import Dict, Any, Optional
from pyobs.robotic.scripts import Script
from pyobs.robotic import TaskSchedule, TaskArchive, TaskRunner
log = logging.getLogger(__name__)
class LcoScript(Script):
"""Auto SCRIPT script for LCO configs."""
def __init__(self, scripts: Dict[str, Script], **kwargs: Any):
"""Initialize a new LCO auto focus script.
Args:
scripts: External scripts to run
"""
Script.__init__(self, **kwargs)
# store
self.scripts = scripts
def _get_config_script(self, config: Dict[str, Any]) -> Script:
"""Get config script for given configuration.
Args:
config: Config to create runner for.
Returns:
Script for running config
Raises:
ValueError: If could not create runner.
"""
# what do we run?
config_type = config["extra_params"]["script_name"]
if config_type not in self.scripts:
raise ValueError('No script found for script type "%s".' % config_type)
# create script handler
return self.get_object(self.scripts[config_type], Script, configuration=config)
async def can_run(self) -> bool:
"""Checks, whether this task could run now.
Returns:
True, if task can run now.
"""
# get config runner
runner = self._get_config_script(self.configuration)
# if any runner can run, we proceed
return await runner.can_run()
async def run(
self,
task_runner: TaskRunner,
task_schedule: Optional[TaskSchedule] = None,
task_archive: Optional[TaskArchive] = None,
) -> None:
"""Run script.
Raises:
InterruptedError: If interrupted
"""
# get config runner
runner = self._get_config_script(self.configuration)
# run it
await runner.run(task_runner=task_runner, task_schedule=task_schedule, task_archive=task_archive)
__all__ = ["LcoScript"]
| [
"thusser@uni-goettingen.de"
] | thusser@uni-goettingen.de |
837f297f47a3d31dca3856ca30dc1c07ea466c51 | 0c469c4100fe9d352e83731688e388062a3c55c7 | /Graphs/1135. Connecting Cities With Minimum Cost.py | 079eb10f622723f7ab046130350ee32299f45381 | [] | no_license | asperaa/back_to_grind | 9e055c7e6561384e5b7ae52f01063e4beb34a298 | 5ea1976b9d5c6d04800e296e45e8ff90fdde5001 | refs/heads/master | 2022-12-16T18:32:01.443743 | 2020-09-05T13:29:39 | 2020-09-05T13:29:39 | 254,910,528 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,081 | py | """We are the captains of oue ships, and we stay 'till the end. We see our stories through.
"""
"""1135. Connecting Cities With Minimum Cost
"""
class Solution:
def minimumCost(self, n ,edges):
parent = [i for i in range(n+1)]
rank = [0] * (n+1)
self.cost = 0
self.connected_components = n
def find(x):
if parent[x] != x:
parent[x] = find(parent[x])
return parent[x]
def union(x, y, w):
rx, ry = find(x), find(y)
if rx != ry:
if rank[rx] < rank[ry]:
parent[rx] = ry
elif rank[ry] < rank[rx]:
parent[ry] = rx
else:
parent[rx] = ry
rank[ry] += 1
self.connected_components -= 1
self.cost += w
edges.sort(key=lambda x: x[2])
for u, v, w in edges:
union(u, v, w)
if self.connected_components == 1:
return self.cost
return -1 | [
"adityaankur44@gmail.com"
] | adityaankur44@gmail.com |
01822b078b05a3660a8aaa3154dd9afeb8922100 | ef605b30b118dbb5902a360c2dc74634f8d8023a | /ve/Lib/site-packages/tests/test_archaism.py | edf8f96ad05816c5b8f19b27af3ad2eee643f92d | [] | no_license | lugnitdgp/avskr2.0 | 691b82e529fba667ebf0885b52f0c58b5076f3cb | 278e4f6f8ce4677e213150716704330d83debf9f | refs/heads/master | 2022-12-06T03:10:38.743582 | 2018-10-29T13:14:38 | 2018-10-29T13:14:38 | 151,551,343 | 4 | 10 | null | 2022-11-22T13:59:07 | 2018-10-04T09:58:45 | Python | UTF-8 | Python | false | false | 620 | py | """Tests for archaism.misc check."""
from __future__ import absolute_import
from .check import Check
from proselint.checks.archaism import misc as chk
class TestCheck(Check):
"""The test class for archaism.misc."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_smoke(self):
"""Basic smoke test for archaism.misc."""
assert self.passes("""Smoke phrase with nothing flagged.""")
assert self.passes("""I want to sleep, and maybe dream.""")
assert not self.passes("""I want to sleep, perchance to dream.""")
| [
"divyanshumehta@outlook.com"
] | divyanshumehta@outlook.com |
e717b68e6f98bb33f19c3f68eb602455a799f7f6 | dc80f94c1a244002db468fc7242d5fcaafe439dc | /powerdns_client/models/error.py | f5299db89c3ad43e6f2cbbe70c6da422abc9ead3 | [
"MIT"
] | permissive | sanvu88/python-powerdns-client | f675e1ee162bb76190b41ddf0cfc34e2305a757b | 57dd0460995a5407c6f5c963553b4df0f4859667 | refs/heads/master | 2023-02-04T07:05:31.095951 | 2020-12-15T16:48:15 | 2020-12-15T16:48:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,975 | py | # coding: utf-8
"""
PowerDNS Authoritative HTTP API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 0.0.13
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Error(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'error': 'str',
'errors': 'list[str]'
}
attribute_map = {
'error': 'error',
'errors': 'errors'
}
def __init__(self, error=None, errors=None): # noqa: E501
"""Error - a model defined in Swagger""" # noqa: E501
self._error = None
self._errors = None
self.discriminator = None
self.error = error
if errors is not None:
self.errors = errors
@property
def error(self):
"""Gets the error of this Error. # noqa: E501
A human readable error message # noqa: E501
:return: The error of this Error. # noqa: E501
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""Sets the error of this Error.
A human readable error message # noqa: E501
:param error: The error of this Error. # noqa: E501
:type: str
"""
if error is None:
raise ValueError("Invalid value for `error`, must not be `None`") # noqa: E501
self._error = error
@property
def errors(self):
"""Gets the errors of this Error. # noqa: E501
Optional array of multiple errors encountered during processing # noqa: E501
:return: The errors of this Error. # noqa: E501
:rtype: list[str]
"""
return self._errors
@errors.setter
def errors(self, errors):
"""Sets the errors of this Error.
Optional array of multiple errors encountered during processing # noqa: E501
:param errors: The errors of this Error. # noqa: E501
:type: list[str]
"""
self._errors = errors
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Error, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Error):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"67791576+underline-bot@users.noreply.github.com"
] | 67791576+underline-bot@users.noreply.github.com |
2784d73cd9183dd106ee06fba4d4c4e10409acc4 | 4fdaee9f2612a8c429991a2042dffcee80e7a641 | /rootfs/qboxhd/rootfs/usr/local/lib/enigma2/python/Screens/Scart.py | 64e4a69e009284ed03b409f8e33f5041d03a4d0e | [] | no_license | OpenSH4/qboxhd | 841072db3b0eaecdcac116b5f96268d47115cdec | 91dd37a5311b5c53fb088ab0ce902ee49552ece0 | refs/heads/master | 2020-09-07T17:55:36.114816 | 2012-01-08T21:33:02 | 2012-01-08T21:33:02 | 220,866,062 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py | from qboxhd import QBOXHD
from Screen import Screen
from MessageBox import MessageBox
from Components.AVSwitch import AVSwitch
class Scart(Screen):
def __init__(self, session, start_visible=True):
Screen.__init__(self, session)
self.avswitch = AVSwitch()
if start_visible:
self.onExecBegin.append(self.showMessageBox)
self.msgVisible = None
else:
self.msgVisible = False
def showMessageBox(self):
if self.msgVisible is None:
self.onExecBegin.remove(self.showMessageBox)
self.msgVisible = False
if not self.msgVisible:
self.msgVisible = True
self.avswitch.setInput("SCART")
self.msgBox = self.session.openWithCallback(self.MsgBoxClosed, MessageBox, _("If you see this, something is wrong with\nyour scart connection. Press OK to return."), MessageBox.TYPE_ERROR)
def MsgBoxClosed(self, *val):
self.msgBox = None
self.switchToTV()
def switchToTV(self, *val):
if self.msgVisible:
if self.msgBox:
self.msgBox.close() # ... MsgBoxClosed -> switchToTV again..
return
self.avswitch.setInput("ENCODER")
self.msgVisible = False
| [
"duopaguilar@0410bcea-ab32-4fec-9f21-c18eae94034e"
] | duopaguilar@0410bcea-ab32-4fec-9f21-c18eae94034e |
37f45c2414abdc4e1e1c66d8ba58d473235b0f3b | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/numenta_nupic/nupic-master/src/nupic/frameworks/viz/network_visualization.py | 6c37b7608dbabfbc809f5fc33aba4327f6ef7336 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 2,674 | py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2017, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import networkx as nx
from nupic.frameworks.viz import DotRenderer as DEFAULT_RENDERER
class NetworkVisualizer(object):
"""
Network visualization framework entry point.
Usage:
NetworkVisualizer(network).render()
You may optionally specify a specific renderers. e.g.:
viz = NetworkVisualizer(network)
viz.render(renderer=GraphVizRenderer)
viz.render(renderer=NetworkXRenderer)
"""
def __init__(self, network):
"""
:param network: nupic.engine.network
"""
self.network = network
def export(self):
"""
Exports a network as a networkx MultiDiGraph intermediate representation
suitable for visualization.
:return: networkx MultiDiGraph
"""
graph = nx.MultiDiGraph()
# Add regions to graph as nodes, annotated by name
regions = self.network.getRegions()
for idx in xrange(regions.getCount()):
regionPair = regions.getByIndex(idx)
regionName = regionPair[0]
graph.add_node(regionName, label=regionName)
# Add links between regions to graph as edges, annotate by input-output
# name pairs
for linkName, link in self.network.getLinks():
graph.add_edge(link.getSrcRegionName(),
link.getDestRegionName(),
src=link.getSrcOutputName(),
dest=link.getDestInputName())
return graph
def render(self, renderer=DEFAULT_RENDERER):
"""
Render network.
:param renderer: Constructor parameter to a "renderer" implementation.
Return value for which must have a "render" method that accepts a single
argument (a networkx graph instance).
"""
renderer().render(self.export())
| [
"659338505@qq.com"
] | 659338505@qq.com |
ca88d83b98155eab6d93550d5db549dcf68549b9 | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/servicebus/aaz/latest/servicebus/queue/_create.py | 795920ad6461e68726913b5c980a825f83410405 | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] | permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 17,863 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"servicebus queue create",
)
class Create(AAZCommand):
"""Create a Service Bus queue. This operation is idempotent.
"""
_aaz_info = {
"version": "2022-01-01-preview",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.servicebus/namespaces/{}/queues/{}", "2022-01-01-preview"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.namespace_name = AAZStrArg(
options=["--namespace-name"],
help="The namespace name",
required=True,
fmt=AAZStrArgFormat(
max_length=50,
min_length=6,
),
)
_args_schema.queue_name = AAZStrArg(
options=["-n", "--name", "--queue-name"],
help="The queue name.",
required=True,
fmt=AAZStrArgFormat(
min_length=1,
),
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
# define Arg Group "Properties"
_args_schema = cls._args_schema
_args_schema.auto_delete_on_idle = AAZDurationArg(
options=["--auto-delete-on-idle"],
arg_group="Properties",
help="ISO 8061 timeSpan idle interval after which the queue is automatically deleted. The minimum duration is 5 minutes.",
)
_args_schema.enable_dead_lettering_on_message_expiration = AAZBoolArg(
options=["--message-expiration", "--enable-dead-lettering-on-message-expiration"],
arg_group="Properties",
help="A value that indicates whether this queue has dead letter support when a message expires.",
)
_args_schema.default_message_time_to_live = AAZDurationArg(
options=["--default-message-time-to-live"],
arg_group="Properties",
help="ISO 8601 default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.",
)
_args_schema.duplicate_detection_history_time_window = AAZDurationArg(
options=["-d", "--duplicate-detection-history-time-window"],
arg_group="Properties",
help="ISO 8601 timeSpan structure that defines the duration of the duplicate detection history. The default value is 10 minutes.",
)
_args_schema.enable_batched_operations = AAZBoolArg(
options=["--enable-batched-operations"],
arg_group="Properties",
help="Value that indicates whether server-side batched operations are enabled.",
)
_args_schema.enable_express = AAZBoolArg(
options=["--enable-express"],
arg_group="Properties",
help="A value that indicates whether Express Entities are enabled. An express queue holds a message in memory temporarily before writing it to persistent storage.",
)
_args_schema.enable_partitioning = AAZBoolArg(
options=["--enable-partitioning"],
arg_group="Properties",
help="A value that indicates whether the queue is to be partitioned across multiple message brokers.",
)
_args_schema.forward_dead_lettered_messages_to = AAZStrArg(
options=["--forward-dead-lettered-messages-to"],
arg_group="Properties",
help="Queue/Topic name to forward the Dead Letter message",
)
_args_schema.forward_to = AAZStrArg(
options=["--forward-to"],
arg_group="Properties",
help="Queue/Topic name to forward the messages",
)
_args_schema.lock_duration = AAZDurationArg(
options=["--lock-duration"],
arg_group="Properties",
help="ISO 8601 timespan duration of a peek-lock; that is, the amount of time that the message is locked for other receivers. The maximum value for LockDuration is 5 minutes; the default value is 1 minute.",
)
_args_schema.max_delivery_count = AAZIntArg(
options=["--max-delivery-count"],
arg_group="Properties",
help="The maximum delivery count. A message is automatically deadlettered after this number of deliveries. default value is 10.",
)
_args_schema.max_message_size_in_kilobytes = AAZIntArg(
options=["--max-message-size", "--max-message-size-in-kilobytes"],
arg_group="Properties",
help="Maximum size (in KB) of the message payload that can be accepted by the topic. This property is only used in Premium today and default is 1024.",
)
_args_schema.max_size_in_megabytes = AAZIntArg(
options=["--max-size", "--max-size-in-megabytes"],
arg_group="Properties",
help="Maximum size of the topic in megabytes, which is the size of the memory allocated for the topic. Default is 1024.",
)
_args_schema.enable_duplicate_detection = AAZBoolArg(
options=["--duplicate-detection", "--enable-duplicate-detection"],
arg_group="Properties",
help="A value indicating if this queue requires duplicate detection.",
)
_args_schema.enable_session = AAZBoolArg(
options=["--enable-session"],
arg_group="Properties",
help="A value that indicates whether the queue supports the concept of sessions.",
)
_args_schema.status = AAZStrArg(
options=["--status"],
arg_group="Properties",
help="Enumerates the possible values for the status of a messaging entity.",
enum={"Active": "Active", "Creating": "Creating", "Deleting": "Deleting", "Disabled": "Disabled", "ReceiveDisabled": "ReceiveDisabled", "Renaming": "Renaming", "Restoring": "Restoring", "SendDisabled": "SendDisabled", "Unknown": "Unknown"},
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.QueuesCreateOrUpdate(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class QueuesCreateOrUpdate(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/queues/{queueName}",
**self.url_parameters
)
@property
def method(self):
return "PUT"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"namespaceName", self.ctx.args.namespace_name,
required=True,
),
**self.serialize_url_param(
"queueName", self.ctx.args.queue_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-01-01-preview",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Content-Type", "application/json",
),
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
@property
def content(self):
_content_value, _builder = self.new_content_builder(
self.ctx.args,
typ=AAZObjectType,
typ_kwargs={"flags": {"required": True, "client_flatten": True}}
)
_builder.set_prop("properties", AAZObjectType, typ_kwargs={"flags": {"client_flatten": True}})
properties = _builder.get(".properties")
if properties is not None:
properties.set_prop("autoDeleteOnIdle", AAZStrType, ".auto_delete_on_idle")
properties.set_prop("deadLetteringOnMessageExpiration", AAZBoolType, ".enable_dead_lettering_on_message_expiration")
properties.set_prop("defaultMessageTimeToLive", AAZStrType, ".default_message_time_to_live")
properties.set_prop("duplicateDetectionHistoryTimeWindow", AAZStrType, ".duplicate_detection_history_time_window")
properties.set_prop("enableBatchedOperations", AAZBoolType, ".enable_batched_operations")
properties.set_prop("enableExpress", AAZBoolType, ".enable_express")
properties.set_prop("enablePartitioning", AAZBoolType, ".enable_partitioning")
properties.set_prop("forwardDeadLetteredMessagesTo", AAZStrType, ".forward_dead_lettered_messages_to")
properties.set_prop("forwardTo", AAZStrType, ".forward_to")
properties.set_prop("lockDuration", AAZStrType, ".lock_duration")
properties.set_prop("maxDeliveryCount", AAZIntType, ".max_delivery_count")
properties.set_prop("maxMessageSizeInKilobytes", AAZIntType, ".max_message_size_in_kilobytes")
properties.set_prop("maxSizeInMegabytes", AAZIntType, ".max_size_in_megabytes")
properties.set_prop("requiresDuplicateDetection", AAZBoolType, ".enable_duplicate_detection")
properties.set_prop("requiresSession", AAZBoolType, ".enable_session")
properties.set_prop("status", AAZStrType, ".status")
return self.serialize_content(_content_value)
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.location = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.properties = AAZObjectType(
flags={"client_flatten": True},
)
_schema_on_200.system_data = AAZObjectType(
serialized_name="systemData",
flags={"read_only": True},
)
_schema_on_200.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.properties
properties.accessed_at = AAZStrType(
serialized_name="accessedAt",
flags={"read_only": True},
)
properties.auto_delete_on_idle = AAZStrType(
serialized_name="autoDeleteOnIdle",
)
properties.count_details = AAZObjectType(
serialized_name="countDetails",
)
properties.created_at = AAZStrType(
serialized_name="createdAt",
flags={"read_only": True},
)
properties.dead_lettering_on_message_expiration = AAZBoolType(
serialized_name="deadLetteringOnMessageExpiration",
)
properties.default_message_time_to_live = AAZStrType(
serialized_name="defaultMessageTimeToLive",
)
properties.duplicate_detection_history_time_window = AAZStrType(
serialized_name="duplicateDetectionHistoryTimeWindow",
)
properties.enable_batched_operations = AAZBoolType(
serialized_name="enableBatchedOperations",
)
properties.enable_express = AAZBoolType(
serialized_name="enableExpress",
)
properties.enable_partitioning = AAZBoolType(
serialized_name="enablePartitioning",
)
properties.forward_dead_lettered_messages_to = AAZStrType(
serialized_name="forwardDeadLetteredMessagesTo",
)
properties.forward_to = AAZStrType(
serialized_name="forwardTo",
)
properties.lock_duration = AAZStrType(
serialized_name="lockDuration",
)
properties.max_delivery_count = AAZIntType(
serialized_name="maxDeliveryCount",
)
properties.max_message_size_in_kilobytes = AAZIntType(
serialized_name="maxMessageSizeInKilobytes",
)
properties.max_size_in_megabytes = AAZIntType(
serialized_name="maxSizeInMegabytes",
)
properties.message_count = AAZIntType(
serialized_name="messageCount",
flags={"read_only": True},
)
properties.requires_duplicate_detection = AAZBoolType(
serialized_name="requiresDuplicateDetection",
)
properties.requires_session = AAZBoolType(
serialized_name="requiresSession",
)
properties.size_in_bytes = AAZIntType(
serialized_name="sizeInBytes",
flags={"read_only": True},
)
properties.status = AAZStrType()
properties.updated_at = AAZStrType(
serialized_name="updatedAt",
flags={"read_only": True},
)
count_details = cls._schema_on_200.properties.count_details
count_details.active_message_count = AAZIntType(
serialized_name="activeMessageCount",
flags={"read_only": True},
)
count_details.dead_letter_message_count = AAZIntType(
serialized_name="deadLetterMessageCount",
flags={"read_only": True},
)
count_details.scheduled_message_count = AAZIntType(
serialized_name="scheduledMessageCount",
flags={"read_only": True},
)
count_details.transfer_dead_letter_message_count = AAZIntType(
serialized_name="transferDeadLetterMessageCount",
flags={"read_only": True},
)
count_details.transfer_message_count = AAZIntType(
serialized_name="transferMessageCount",
flags={"read_only": True},
)
system_data = cls._schema_on_200.system_data
system_data.created_at = AAZStrType(
serialized_name="createdAt",
)
system_data.created_by = AAZStrType(
serialized_name="createdBy",
)
system_data.created_by_type = AAZStrType(
serialized_name="createdByType",
)
system_data.last_modified_at = AAZStrType(
serialized_name="lastModifiedAt",
)
system_data.last_modified_by = AAZStrType(
serialized_name="lastModifiedBy",
)
system_data.last_modified_by_type = AAZStrType(
serialized_name="lastModifiedByType",
)
return cls._schema_on_200
class _CreateHelper:
"""Helper class for Create"""
__all__ = ["Create"]
| [
"noreply@github.com"
] | Azure.noreply@github.com |
05267bb3879a6e0fadb7cb02e558d512ea7128ca | e9538b7ad6d0ce0ccfbb8e10c458f9e0b73926f6 | /tests/unit/modules/remote_management/lxca/test_lxca_cmms.py | 96d4f5c61556d09603513b44da7afb31557e60e3 | [] | no_license | ansible-collection-migration/misc.not_a_real_collection | b3ef8090c59de9ac30aca083c746ec3595d7f5f5 | 7ab1af924a3db4ada2f714b09bb392614344cb1e | refs/heads/master | 2020-12-18T13:48:51.849567 | 2020-01-22T17:39:18 | 2020-01-22T17:39:18 | 235,400,821 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,778 | py | import json
import pytest
from ansible_collections.misc.not_a_real_collection.tests.unit.compat import mock
from ansible_collections.misc.not_a_real_collection.plugins.modules import lxca_cmms
@pytest.fixture(scope='module')
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.remote_management.lxca.common.close_conn', autospec=True)
def setup_module(close_conn):
close_conn.return_value = True
class TestMyModule():
@pytest.mark.parametrize('patch_ansible_module',
[
{},
{
"auth_url": "https://10.240.14.195",
"login_user": "USERID",
},
{
"auth_url": "https://10.240.14.195",
"login_password": "Password",
},
{
"login_user": "USERID",
"login_password": "Password",
},
],
indirect=['patch_ansible_module'])
@pytest.mark.usefixtures('patch_ansible_module')
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.remote_management.lxca.common.setup_conn', autospec=True)
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.lxca_cmms.execute_module', autospec=True)
def test_without_required_parameters(self, _setup_conn, _execute_module,
mocker, capfd, setup_module):
"""Failure must occurs when all parameters are missing"""
with pytest.raises(SystemExit):
_setup_conn.return_value = "Fake connection"
_execute_module.return_value = "Fake execution"
lxca_cmms.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed']
assert 'missing required arguments' in results['msg']
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.remote_management.lxca.common.setup_conn', autospec=True)
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.lxca_cmms.execute_module', autospec=True)
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.lxca_cmms.AnsibleModule', autospec=True)
def test__argument_spec(self, ansible_mod_cls, _execute_module, _setup_conn, setup_module):
expected_arguments_spec = dict(
login_user=dict(required=True),
login_password=dict(required=True, no_log=True),
command_options=dict(default='cmms', choices=['cmms', 'cmms_by_uuid',
'cmms_by_chassis_uuid']),
auth_url=dict(required=True),
uuid=dict(default=None),
chassis=dict(default=None),
)
_setup_conn.return_value = "Fake connection"
_execute_module.return_value = []
mod_obj = ansible_mod_cls.return_value
args = {
"auth_url": "https://10.243.30.195",
"login_user": "USERID",
"login_password": "password",
"command_options": "cmms",
}
mod_obj.params = args
lxca_cmms.main()
assert(mock.call(argument_spec=expected_arguments_spec,
supports_check_mode=False) == ansible_mod_cls.call_args)
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.remote_management.lxca.common.setup_conn', autospec=True)
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.lxca_cmms._cmms_by_uuid',
autospec=True)
@mock.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.lxca_cmms.AnsibleModule',
autospec=True)
def test__cmms_empty_list(self, ansible_mod_cls, _get_cmms, _setup_conn, setup_module):
mod_obj = ansible_mod_cls.return_value
args = {
"auth_url": "https://10.243.30.195",
"login_user": "USERID",
"login_password": "password",
"uuid": "3C737AA5E31640CE949B10C129A8B01F",
"command_options": "cmms_by_uuid",
}
mod_obj.params = args
_setup_conn.return_value = "Fake connection"
empty_nodes_list = []
_get_cmms.return_value = empty_nodes_list
ret_cmms = _get_cmms(mod_obj, args)
assert mock.call(mod_obj, mod_obj.params) == _get_cmms.call_args
assert _get_cmms.return_value == ret_cmms
| [
"ansible_migration@example.com"
] | ansible_migration@example.com |
a023ca5921b4dae35ed7b05846b33c20b9bb7352 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_rimmed.py | 5ba54997dbd01072557b8d6aa85dd5252bdbec4d | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py |
#calss header
class _RIMMED():
def __init__(self,):
self.name = "RIMMED"
self.definitions = rim
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['rim']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
cbd5229791354c1ff4eb92cd07ccdc2794be4190 | 9f7512711f78d71a5de7ec54001411cb9c319424 | /contents/serializers.py | 3353c1a830a69e69410f7cedfb9c577b5f4d1ca5 | [] | no_license | charles-co/glc_project | a819ad5b401ba2279901f8f752f7a9331271d376 | ae8c3cba6dcb416d7afa3abbbf439f48003b6e9f | refs/heads/main | 2023-03-29T17:28:05.518810 | 2021-04-03T18:00:16 | 2021-04-03T18:00:16 | 336,250,626 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 841 | py | from rest_framework import serializers
from .models import Audio, Video, Podcast, TV
class AudioSerializer(serializers.ModelSerializer):
class Meta:
model = Audio
fields = ['title', 'audio_file', 'created_at']
read_only_fields = ['created_at', 'audio_file', 'title']
class VideoSerializer(serializers.ModelSerializer):
class Meta:
model = Video
fields = ['title', 'file', 'created_at']
read_only_fields = ['created_at', 'file', 'title']
class PodcastSerializer(serializers.ModelSerializer):
class Meta:
model = Podcast
fields = ['title', 'file', 'created_at']
read_only_fields = ['created_at', 'file', 'title']
class TVSerializer(serializers.ModelSerializer):
class Meta:
model = TV
fields = "__all__" | [
"charlesboy49@gmail.com"
] | charlesboy49@gmail.com |
75e33e57ef1502b4f1a14c2688c82459ca329830 | 34599596e145555fde0d4264a1d222f951f49051 | /pcat2py/class/215f5b12-5cc5-11e4-af55-00155d01fe08.py | 5f97ccd66399240e21dbbe12dc4b66d9264c974b | [
"MIT"
] | permissive | phnomcobra/PCAT2PY | dc2fcbee142ce442e53da08476bfe4e68619346d | 937c3b365cdc5ac69b78f59070be0a21bdb53db0 | refs/heads/master | 2021-01-11T02:23:30.669168 | 2018-02-13T17:04:03 | 2018-02-13T17:04:03 | 70,970,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,522 | py | #!/usr/bin/python
################################################################################
# 215f5b12-5cc5-11e4-af55-00155d01fe08
#
# Justin Dierking
# justindierking@hardbitsolutions.com
# phnomcobra@gmail.com
#
# 10/24/2014 Original Construction
################################################################################
class Finding:
def __init__(self):
self.output = []
self.is_compliant = False
self.uuid = "215f5b12-5cc5-11e4-af55-00155d01fe08"
def check(self, cli):
# Initialize Compliance
self.is_compliant = False
# Get Registry DWORD
dword = cli.get_reg_dword(r'HKCU:\Software\Policies\Microsoft\Office\15.0\outlook\options\pubcal', 'DisableOfficeOnline')
# Output Lines
self.output = [r'HKCU:\Software\Policies\Microsoft\Office\15.0\outlook\options\pubcal', ('DisableOfficeOnline=' + str(dword))]
if dword == 1:
self.is_compliant = True
return self.is_compliant
def fix(self, cli):
cli.powershell(r"New-Item -path 'HKCU:\Software\Policies\Microsoft\Office\15.0\outlook'")
cli.powershell(r"New-Item -path 'HKCU:\Software\Policies\Microsoft\Office\15.0\outlook\options'")
cli.powershell(r"New-Item -path 'HKCU:\Software\Policies\Microsoft\Office\15.0\outlook\options\pubcal'")
cli.powershell(r"Set-ItemProperty -path 'HKCU:\Software\Policies\Microsoft\Office\15.0\outlook\options\pubcal' -name 'DisableOfficeOnline' -value 1 -Type DWord")
| [
"phnomcobra@gmail.com"
] | phnomcobra@gmail.com |
f58631458dc7d58f759d59e1985ada8b6acda70d | c5611d343da32ab98e14beaa5df296c43b39de6c | /fec/version/v6_4/F132.py | 2cf21bbc4cd3db3fe21b36a4e31de26196f89912 | [
"Unlicense"
] | permissive | h4ck3rm1k3/FEC-Field-Documentation | 97973fc9cd919cdb161a416647ae5752ef5815d9 | c2f1f36e14c67ac3656c09f801b9f595d3e9f92e | refs/heads/master | 2018-12-28T16:07:52.499054 | 2014-06-07T18:35:49 | 2014-06-07T18:35:49 | 12,922,288 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,545 | py | import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER COMMITTEE ID NUMBER', 'number': '2'},
{'name': 'TRANSACTION ID NUMBER', 'number': '3'},
{'name': 'BACK REFERENCE TRAN ID NUMBER', 'number': '4'},
{'name': 'BACK REFERENCE SCHED NAME', 'number': '5'},
{'name': 'ENTITY TYPE', 'number': '6'},
{'name': 'CONTRIBUTOR ORGANIZATION NAME', 'number': '7'},
{'name': 'CONTRIBUTOR LAST NAME', 'number': '8'},
{'name': 'CONTRIBUTOR FIRST NAME', 'number': '9'},
{'name': 'CONTRIBUTOR MIDDLE NAME', 'number': '10'},
{'name': 'CONTRIBUTOR PREFIX', 'number': '11'},
{'name': 'CONTRIBUTOR SUFFIX', 'number': '12'},
{'name': 'CONTRIBUTOR STREET 1', 'number': '13'},
{'name': 'CONTRIBUTOR STREET 2', 'number': '14'},
{'name': 'CONTRIBUTOR CITY', 'number': '15'},
{'name': 'CONTRIBUTOR STATE', 'number': '16'},
{'name': 'CONTRIBUTOR ZIP', 'number': '17'},
{'name': 'DONATION DATE', 'number': '18'},
{'name': 'DONATION AMOUNT', 'number': '19'},
{'name': 'DONATION AGGREGATE AMOUNT', 'number': '20'},
{'name': 'MEMO CODE', 'number': '21'},
{'name': 'MEMO TEXT/DESCRIPTION', 'number': '22'},
]
self.fields_names = self.hash_names(self.fields)
| [
"jamesmikedupont@googlemail.com"
] | jamesmikedupont@googlemail.com |
78c843f2dea725cfaa78a0907790d59f17b90b98 | 131cf803a1f7b9638ab0a604d61ab2de22906014 | /tests/unit/web/test_send.py | d1f30e09ec65d49ecb4eb2ce59be3271d1f25115 | [
"Apache-2.0"
] | permissive | dimensigon/dimensigon | 757be1e61e57f7ce0a610a9531317761393eaad0 | 079d7c91a66e10f13510d89844fbadb27e005b40 | refs/heads/master | 2023-03-09T06:50:55.994738 | 2021-02-21T11:45:01 | 2021-02-21T11:45:01 | 209,486,736 | 2 | 0 | Apache-2.0 | 2021-02-26T02:59:18 | 2019-09-19T07:11:35 | Python | UTF-8 | Python | false | false | 11,860 | py | import hashlib
import os
from unittest import mock, TestCase
from flask import url_for
from dimensigon.domain.entities import Server, Software, SoftwareServerAssociation
from dimensigon.web import db, errors
from dimensigon.web.network import Response
from tests.base import ValidateResponseMixin, OneNodeMixin
class Test(ValidateResponseMixin, OneNodeMixin, TestCase):
def setUp(self):
"""Create and configure a new app instance for each test."""
# create the app with common test config
super().setUp()
self.source_path = '/software'
self.filename = 'filename.zip'
self.content = b'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
self.size = len(self.content)
self.checksum = hashlib.md5(self.content).hexdigest()
self.dest_path = '/dest_repo'
self.soft = Software(name='test_software', version=1, filename=self.filename, size=self.size,
checksum=self.checksum)
self.ssa = SoftwareServerAssociation(software=self.soft, server=self.s1, path=self.source_path)
self.node2 = Server('node2', port=5000)
db.session.add_all([self.soft, self.ssa, self.node2])
db.session.commit()
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.asyncio.run', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_software(self, mock_post, mock_send_file, mock_exists):
mock_post.return_value = Response(msg={'id': '1'}, code=200)
mock_exists.return_value = True
resp = self.client.post(url_for('api_1_0.send'),
json=dict(software_id=str(self.soft.id), dest_server_id=str(self.node2.id),
dest_path=self.dest_path),
headers=self.auth.header)
mock_post.assert_called_once()
server, view = mock_post.call_args[0]
kwargs = mock_post.call_args[1]
self.assertEqual(self.node2, db.session.merge(server))
self.assertDictEqual({'software_id': str(self.soft.id), 'num_chunks': 1,
'dest_path': self.dest_path}, kwargs['json'])
self.assertEqual(202, resp.status_code)
self.assertDictEqual({'transfer_id': '1'}, resp.get_json())
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.asyncio.run', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_foreground(self, mock_post, mock_send_file, mock_exists):
mock_post.return_value = Response(msg={'id': '1'}, code=200)
mock_exists.return_value = True
resp = self.client.post(url_for('api_1_0.send'),
json=dict(software_id=str(self.soft.id), dest_server_id=str(self.node2.id),
dest_path=self.dest_path, background=False),
headers=self.auth.header)
mock_post.assert_called_once()
server, view = mock_post.call_args[0]
kwargs = mock_post.call_args[1]
self.assertEqual(self.node2, db.session.merge(server))
self.assertDictEqual({'software_id': str(self.soft.id), 'num_chunks': 1,
'dest_path': self.dest_path}, kwargs['json'])
self.assertEqual(201, resp.status_code)
self.assertDictEqual({'transfer_id': '1'}, resp.get_json())
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.md5', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.getsize', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.asyncio.run', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_file(self, mock_post, mock_asyncio_run, mock_exists, mock_getsize, mock_md5):
mock_post.return_value = Response(msg={'id': '1'}, code=200)
mock_exists.return_value = True
mock_getsize.return_value = self.size
mock_md5.return_value = self.checksum
resp = self.client.post(url_for('api_1_0.send'),
json=dict(file=os.path.join(self.source_path, self.filename),
dest_server_id=str(self.node2.id),
dest_path=self.dest_path, background=False),
headers=self.auth.header)
mock_post.assert_called_once()
server, view = mock_post.call_args[0]
kwargs = mock_post.call_args[1]
mock_asyncio_run.assert_called_once()
self.assertEqual(self.node2, db.session.merge(server))
self.assertDictEqual({'filename': self.filename, 'num_chunks': 1, 'dest_path': self.dest_path,
'checksum': self.checksum, 'size': self.size},
kwargs['json'])
self.assertEqual(201, resp.status_code)
self.assertDictEqual({'transfer_id': '1'}, resp.get_json())
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.md5', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.getsize', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.executor.submit', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_file_background(self, mock_post, mock_executor_submit, mock_exists, mock_getsize, mock_md5):
mock_post.return_value = Response(msg={'id': '1'}, code=200)
mock_exists.return_value = True
mock_getsize.return_value = self.size
mock_md5.return_value = self.checksum
resp = self.client.post(url_for('api_1_0.send'),
json=dict(file=os.path.join(self.source_path, self.filename),
dest_server_id=str(self.node2.id),
dest_path=self.dest_path),
headers=self.auth.header)
mock_post.assert_called_once()
server, view = mock_post.call_args[0]
kwargs = mock_post.call_args[1]
mock_executor_submit.assert_called_once()
self.assertEqual(self.node2, db.session.merge(server))
self.assertDictEqual({'filename': self.filename, 'num_chunks': 1, 'dest_path': self.dest_path,
'checksum': self.checksum, 'size': self.size},
kwargs['json'])
self.assertEqual(202, resp.status_code)
self.assertDictEqual({'transfer_id': '1'}, resp.get_json())
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.async_send_file', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_NoSoftwareServer(self, mock_post, mock_send_file, mock_exists):
mock_post.return_value = Response(msg={'id': '1'}, code=200)
mock_exists.return_value = True
db.session.delete(self.ssa)
resp = self.client.post(url_for('api_1_0.send'),
json=dict(software_id=str(self.soft.id), dest_server_id=str(self.node2.id),
dest_path=self.dest_path),
headers=self.auth.header)
self.validate_error_response(resp, errors.NoSoftwareServer(str(self.soft.id)))
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.async_send_file', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_software_FileNotFound(self, mock_post, mock_send_file, mock_exists):
mock_post.return_value = Response(msg={'transfer_id': '1'}, code=200)
mock_exists.return_value = False
resp = self.client.post(url_for('api_1_0.send'),
json=dict(software_id=str(self.soft.id), dest_server_id=str(self.node2.id),
dest_path=self.dest_path),
headers=self.auth.header)
self.validate_error_response(resp, errors.FileNotFound(os.path.join(self.source_path, self.filename)))
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.md5', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.getsize', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.async_send_file', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_file_FileNotFound(self, mock_post, mock_send_file, mock_exists, mock_getsize, mock_md5):
mock_post.return_value = Response(msg={'id': '1'}, code=200)
mock_exists.return_value = False
mock_getsize.return_value = self.size
mock_md5.return_value = self.checksum
resp = self.client.post(url_for('api_1_0.send'),
json=dict(file=os.path.join(self.source_path, self.filename),
dest_server_id=str(self.node2.id),
dest_path=self.dest_path),
headers=self.auth.header)
self.validate_error_response(resp, errors.FileNotFound(os.path.join(self.source_path, self.filename)))
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.get', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.os.path.exists', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.executor.submit', autospec=True)
@mock.patch('dimensigon.web.api_1_0.urls.use_cases.ntwrk.post', autospec=True)
def test_send_software_get_transfer_data(self, mock_post, mock_send_file, mock_exists, mock_get):
mock_post.return_value = Response(msg={'id': '1'}, code=200)
mock_exists.return_value = True
mock_get.return_value = Response(msg={'transfer_id': '1', 'status': 'COMPLETED'}, code=200)
resp = self.client.post(url_for('api_1_0.send'),
json=dict(software_id=str(self.soft.id), dest_server_id=str(self.node2.id),
dest_path=self.dest_path, include_transfer_data=True),
headers=self.auth.header)
mock_get.assert_called_once()
server, view = mock_get.call_args[0]
kwargs = mock_get.call_args[1]
self.assertEqual(self.node2, db.session.merge(server))
self.assertEqual('api_1_0.transferresource', view)
self.assertDictEqual({'transfer_id': '1'}, kwargs['view_data'])
self.assertEqual(202, resp.status_code)
self.assertDictEqual({'transfer_id': '1', 'status': 'COMPLETED'}, resp.get_json())
mock_get.return_value = Response(msg={'error': {'message': 'some error content'}}, code=404)
resp = self.client.post(url_for('api_1_0.send'),
json=dict(software_id=str(self.soft.id), dest_server_id=str(self.node2.id),
dest_path=self.dest_path, include_transfer_data=True),
headers=self.auth.header)
self.validate_error_response(resp, errors.HTTPError(mock_get.return_value))
| [
"joan.prat@knowtrade.eu"
] | joan.prat@knowtrade.eu |
b8978e81877849076f0a1387b6b4004126382019 | 6b8dc095ef6e10c9ccf92e3c6402e80919d747ad | /glad/__main__.py | 4f394de5da9ab3674e0995e5bff47587ea218118 | [
"MIT"
] | permissive | caomw/glad | 5daecc0562e063240d7678ee2e5361e8562f0899 | 1fb8f8e68be000dd8b2c1634083939340ed33b06 | refs/heads/master | 2021-01-17T10:57:14.364606 | 2016-02-16T15:15:36 | 2016-02-16T15:15:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,594 | py | #!/usr/bin/env python
"""
Uses the official Khronos-XML specs to generate a
GL/GLES/EGL/GLX/WGL Loader made for your needs. Glad currently supports
the languages C, D and Volt.
"""
from collections import namedtuple
import logging
import sys
from glad.opener import URLOpener
from glad.spec import SPECS
import glad.lang
Version = namedtuple('Version', ['major', 'minor'])
logger = logging.getLogger('glad')
def main():
import os.path
import argparse
from argparse import ArgumentParser
opener = URLOpener()
def get_spec(value):
if value not in SPECS:
raise argparse.ArgumentTypeError('Unknown specification')
spec_cls = SPECS[value]
if os.path.exists(value + '.xml'):
logger.info('using local specification: \'%s.xml\'', value)
return spec_cls.from_file(value + '.xml')
logger.info('getting \'%s\' specification from SVN', value)
return spec_cls.from_svn(opener=opener)
def ext_file(value):
msg = 'Invalid extensions argument'
if os.path.exists(value):
msg = 'Invalid extensions file'
try:
with open(value, 'r') as f:
return f.read().split()
except IOError:
pass
else:
return [v.strip() for v in value.split(',') if v]
raise argparse.ArgumentTypeError(msg)
def version(value):
if value is None or len(value.strip()) == 0:
return None
v = value
if '.' not in v:
v = '{}.0'.format(v)
try:
return Version(*map(int, v.split('.')))
except ValueError:
pass
raise argparse.ArgumentTypeError('Invalid version: "{}"'.format(value))
def cmdapi(value):
try:
return dict((p[0], version(p[1])) for p in
(list(map(str.strip, e.split('='))) for e in
filter(bool, map(str.strip, value.split(',')))))
except IndexError:
pass
raise argparse.ArgumentTypeError(
'Invalid api-string: "{}"'.format(value)
)
description = __doc__
parser = ArgumentParser(description=description)
parser.add_argument('--profile', dest='profile',
choices=['core', 'compatibility'],
default='compatibility',
help='OpenGL profile (defaults to compatibility)')
parser.add_argument('--out-path', dest='out', required=True,
help='Output path for loader')
parser.add_argument('--api', dest='api', type=cmdapi,
help='API type/version pairs, like "gl=3.2,gles=", '
'no version means latest')
parser.add_argument('--generator', dest='generator', default='d',
choices=['c', 'c-debug', 'd', 'volt'], required=True,
help='Language to generate the binding for')
parser.add_argument('--extensions', dest='extensions',
default=None, type=ext_file,
help='Path to extensions file or comma separated '
'list of extensions, if missing '
'all extensions are included')
parser.add_argument('--spec', dest='spec', default='gl',
choices=['gl', 'egl', 'glx', 'wgl'],
help='Name of the spec')
parser.add_argument('--no-loader', dest='no_loader', action='store_true')
parser.add_argument('--quiet', dest='quiet', action='store_true')
ns = parser.parse_args()
if not ns.quiet:
logging.basicConfig(
format='[%(asctime)s][%(levelname)s\t][%(name)-7s\t]: %(message)s',
datefmt='%m/%d/%Y %H:%M:%S', level=logging.DEBUG
)
spec = get_spec(ns.spec)
if spec.NAME == 'gl':
spec.profile = ns.profile
api = ns.api
if api is None or len(api.keys()) == 0:
api = {spec.NAME: None}
generator_cls, loader_cls = glad.lang.get_generator(
ns.generator, spec.NAME.lower()
)
if loader_cls is None:
return parser.error('API/Spec not yet supported')
loader = loader_cls(api)
loader.disabled = ns.no_loader
logger.info('generating \'%s\' bindings', spec.NAME)
with generator_cls(ns.out, spec, api, ns.extensions, loader=loader, opener=opener) as generator:
generator.generate()
logger.info('generating \'%s\' bindings - done', spec.NAME)
if __name__ == '__main__':
main()
| [
"admin@dav1d.de"
] | admin@dav1d.de |
d851910d19a0d3466e44744e769c7836bf963a17 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /gaussiana/ch3_2020_03_09_19_03_02_961002.py | b6ec6c3e2fc6cb965c085057e28d3e3249ad4873 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | import math
def calcula_gaussiana(x,μ,σ):
a = 1 / (σ * (math.sqrt(2*math.pi)))
e = math.e**(-0.5*((x-μ/σ)**2))
f = a*e
return f | [
"you@example.com"
] | you@example.com |
7d003ce2dd96154be17d0e9c27d616d75141b708 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02664/s139946174.py | a9fab029ec7d95fbc4310c62188b5118675ab097 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 678 | py | s = input()
for i in range(len(s)):
if s[i] == '?':
if i == 0 and len(s) == 1:
s = s.replace('?','D',1)
elif i == 0 and s[1] == 'D':
s = s.replace('?','P',1)
elif i == 0 and s[1] == 'P':
s = s.replace('?','D',1)
elif i == 0 and s[1] == '?':
s = s.replace('?','D',1)
elif s[i-1] =='P':
s = s.replace('?','D',1)
elif s[i-1] =='D' and (i ==len(s)-1):
s = s.replace('?','D',1)
elif s[i-1] =='D' and (i <len(s)-1 and(s[i+1] == 'P' or s[i+1] == '?')):
s = s.replace('?','D',1)
else:
s = s.replace('?','P',1)
print(s) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
387bbbaa243d5506e9160eaeb8c0d8cae9238225 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_contributions.py | f025a05c56b781cdfac70c6760e2ec39d2810e27 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py |
from xai.brain.wordbase.nouns._contribution import _CONTRIBUTION
#calss header
class _CONTRIBUTIONS(_CONTRIBUTION, ):
def __init__(self,):
_CONTRIBUTION.__init__(self)
self.name = "CONTRIBUTIONS"
self.specie = 'nouns'
self.basic = "contribution"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
bbe80e141ae120828745a2e0c964fe0706b6d465 | 86ca43076bd78484a32b527308ac15ed19844d81 | /tests/configuration_tests.py | eaca320507e178bf5a30dfc5ff75d25cff363b05 | [] | no_license | simonemmott/k2_core | d393ec5685dbb80f5c9301f6f1f3b4eb17feda24 | ef102ac151b3819714aa5f02d5aab8c1235030d6 | refs/heads/master | 2020-06-10T02:08:51.107293 | 2019-06-28T22:25:58 | 2019-06-28T22:25:58 | 193,552,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | from unittest import TestCase
class ConfigurationTests(TestCase):
def test_pass(self):
self.assertTrue(True, 'Testing is not working') | [
"simon.emmott@yahoo.co.uk"
] | simon.emmott@yahoo.co.uk |
f3b8ddd933238a900cc93cfe07fca3e814be5673 | 4526ed71f39d70111c3787ec90b4932a183c452c | /2016/Pyquen_DYtoMuMu_M_30_TuneZ2_8TeV16_pythia6_cfi.py | f842b4cf0e3b2f718e982fc53fa749012b9573a1 | [] | no_license | CMS-HIN-dilepton/MCRequest | 773f414739efc529dc957a044232478b1c4f1c03 | ff49d22fde2c4a006fe7fa02d4cf53d794f91888 | refs/heads/master | 2021-05-02T12:16:51.891664 | 2020-06-20T18:35:52 | 2020-06-20T18:35:52 | 45,127,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,465 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.PythiaUEZ2Settings_cfi import *
generator = cms.EDFilter("PyquenGeneratorFilter",
comEnergy = cms.double(8160.0),
aBeamTarget = cms.double(208.0),
protonSide = cms.untracked.int32(1),
qgpInitialTemperature = cms.double(1.0), ## initial temperature of QGP; allowed range [0.2,2.0]GeV;
qgpProperTimeFormation = cms.double(0.1), ## proper time of QGP formation; allowed range [0.01,10.0]fm/c;
hadronFreezoutTemperature = cms.double(0.14),
doRadiativeEnLoss = cms.bool(True), ## if true, perform partonic radiative en loss
doCollisionalEnLoss = cms.bool(False),
qgpNumQuarkFlavor = cms.int32(0), ## number of active quark flavors in qgp; allowed values: 0,1,2,3
numQuarkFlavor = cms.int32(0), ## to be removed
doIsospin = cms.bool(True),
angularSpectrumSelector = cms.int32(0), ## angular emitted gluon spectrum :
embeddingMode = cms.bool(False),
backgroundLabel = cms.InputTag("generator"), ## ineffective in no mixing
doQuench = cms.bool(False),
bFixed = cms.double(0.0), ## fixed impact param (fm); valid only if cflag_=0
cFlag = cms.int32(0), ## centrality flag
bMin = cms.double(0.0), ## min impact param (fm); valid only if cflag_!=0
bMax = cms.double(0.0), ## max impact param (fm); valid only if cflag_!=0
pythiaPylistVerbosity = cms.untracked.int32(1),
pythiaHepMCVerbosity = cms.untracked.bool(True),
maxEventsToPrint = cms.untracked.int32(0),
PythiaParameters = cms.PSet(
pythiaUESettingsBlock,
processParameters = cms.vstring('MSEL=0 !User defined processes',
'MSUB(1)=1 !Incl Z0/gamma* production',
'MSTP(43)=3 !Both Z0 and gamma*',
'MDME(174,1)=0 !Z decay into d dbar',
'MDME(175,1)=0 !Z decay into u ubar',
'MDME(176,1)=0 !Z decay into s sbar',
'MDME(177,1)=0 !Z decay into c cbar',
'MDME(178,1)=0 !Z decay into b bbar',
'MDME(179,1)=0 !Z decay into t tbar',
'MDME(182,1)=0 !Z decay into e- e+',
'MDME(183,1)=0 !Z decay into nu_e nu_ebar',
'MDME(184,1)=1 !Z decay into mu- mu+',
'MDME(185,1)=0 !Z decay into nu_mu nu_mubar',
'MDME(186,1)=0 !Z decay into tau- tau+',
'MDME(187,1)=0 !Z decay into nu_tau nu_taubar',
'CKIN(1)=30. !Minimum sqrt(s_hat) value (=Z mass)'),
# This is a vector of ParameterSet names to be read, in this order
parameterSets = cms.vstring('pythiaUESettings',
'processParameters')
)
)
configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.1 $'),
name = cms.untracked.string('$Source: /local/reps/CMSSW/CMSSW/Configuration/GenProduction/python/HI/Pyquen_DYtoMuMu_M_30_TuneZ2_5TeV02_pythia6_cfi.py,v $'),
annotation = cms.untracked.string('PYQUEN DYmumu Mass 30 Tune Z2 at 5.023 TeV')
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"emilien.chapon@cern.ch"
] | emilien.chapon@cern.ch |
fe3b3ed217100b8e438eba5edcd8c3e95c0dfa46 | 4b2a333ddb07ba7b5dbbf382eee5851468e69635 | /sharpy/interfaces/unit_cache.py | b01665ffbbab5da46dfcfb35a3d1a203ba25c465 | [
"MIT"
] | permissive | rwill128/sharpy-sc2 | 36810c69d2563b7cc839d353e47c3c3c9cc255cb | 999a101fd2f6ecb8bccb405bf3ace276be08c112 | refs/heads/develop | 2023-01-29T11:52:55.687375 | 2020-12-06T11:26:35 | 2020-12-06T11:26:35 | 319,097,836 | 1 | 0 | MIT | 2020-12-06T18:03:31 | 2020-12-06T18:03:31 | null | UTF-8 | Python | false | false | 1,824 | py | from abc import abstractmethod, ABC
from typing import Optional, List, Union, Iterable, Dict
from sc2 import UnitTypeId
from sc2.position import Point2
from sc2.unit import Unit
from sc2.units import Units
class IUnitCache(ABC):
@property
@abstractmethod
def own_unit_cache(self) -> Dict[UnitTypeId, Units]:
pass
@property
@abstractmethod
def enemy_unit_cache(self) -> Dict[UnitTypeId, Units]:
pass
@property
@abstractmethod
def own_townhalls(self) -> Units:
"""Returns all of our own townhalls."""
pass
@property
@abstractmethod
def enemy_townhalls(self) -> Units:
"""Returns all known enemy townhalls."""
pass
@property
@abstractmethod
def enemy_workers(self) -> Units:
pass
@property
@abstractmethod
def mineral_fields(self) -> Dict[Point2, Unit]:
pass
@property
@abstractmethod
def mineral_wall(self) -> Units:
"""Returns all known mineral wall mineral field units."""
pass
@abstractmethod
def by_tag(self, tag: int) -> Optional[Unit]:
pass
@abstractmethod
def by_tags(self, tags: List[int]) -> Units:
pass
@abstractmethod
def own(self, type_id: Union[UnitTypeId, Iterable[UnitTypeId]]) -> Units:
"""Returns all own units of the specified type(s)."""
pass
@abstractmethod
def enemy(self, type_id: Union[UnitTypeId, Iterable[UnitTypeId]]) -> Units:
"""Returns all enemy units of the specified type(s)."""
pass
@abstractmethod
def own_in_range(self, position: Point2, range: Union[int, float]) -> Units:
pass
@abstractmethod
def enemy_in_range(self, position: Point2, range: Union[int, float], only_targetable=True) -> Units:
pass
| [
"aki.vanttinen@sedgestudios.com"
] | aki.vanttinen@sedgestudios.com |
4aee4411b6209081f04e28197515d86a72e8e17b | 38f15289bd03cef50f1013926a641c789fe338aa | /pythonCodeReference/PythonExamples/Examples/Example2/code1.py | 0d4beb6f70ab9daddd4947171b36dc2f8b6e80c2 | [] | no_license | Recorichardretardo/Python | e9f9a9b8291dc015fe7438329da066754d0ba965 | 5a810b9d53257e321dd15bd105100da89cef7835 | refs/heads/master | 2021-05-26T08:35:23.271610 | 2020-04-10T12:02:46 | 2020-04-10T12:02:46 | 254,060,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | import random
friends = ["Rolf","Bob","Jen"]
print("Jen" in friends)
number = random.randint(2,9)
user_input = input("Enter 'y' if you would like to play: ")
if user_input in ("y","Y"):
user_number = int(input("Guess our number: "))
if user_number == number:
print("you guessed correctly!")
else:
print("Sorry, it's wrong!") | [
"you@example.com"
] | you@example.com |
655dbe890caab2ea8cc40abe15d4af3a98428d0f | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2465/49405/292005.py | 2b387ce901b762548c373be7f86a41affd703a7c | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | a = input()
print("if a == \"%s\":\n print()\n exit()" % a) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
72e3ff38fd3cbb9042e232d08d74054b4ba37d32 | 219d7cf7cf00b778ff1a5709406c144fcf2132f3 | /Conditional Statements - Exercise/07. World Swimming Record.py | 3e853322a0847be5d79e473bc6890c3d9585ab8e | [] | no_license | SilviaKoynova/Softuni-Programming-Basics-Python | e8e175419383815c65c4e110fdb2b752d940e887 | 0dfef0850f2cb8471dfee1af89f137be4e887cb8 | refs/heads/main | 2023-07-13T00:35:09.389302 | 2021-08-27T07:43:45 | 2021-08-27T07:43:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | from math import floor
record = float(input())
distance_in_metres = float(input())
time_in_seconds = float(input())
distance_needed = distance_in_metres * time_in_seconds
slowing = floor(distance_in_metres / 15)
slowing_add = slowing * 12.5
total_time = slowing_add + distance_needed
if record > total_time:
print(f"Yes, he succeeded! The new world record is {total_time:.2f} seconds.")
else:
print(f"No, he failed! He was {total_time - record:.2f} seconds slower.") | [
"noreply@github.com"
] | SilviaKoynova.noreply@github.com |
30f772c9774da896ad9f128a42b7578e18156e0e | 7426522061b222e8d3336b18ff941bb98ff9626c | /qtoggleserver/core/api/funcs/firmware.py | d85ee25f86309a65400e2f535bc0d019072cc04b | [
"Apache-2.0"
] | permissive | DigitEgal/qtoggleserver | 82833aaeb6f0bdad5f28243f132a639f4b406001 | 54b6ac53742af9529fd349d4fc207b0dc8a38d3b | refs/heads/dev | 2023-05-07T14:49:11.273023 | 2021-04-30T20:40:08 | 2021-04-30T20:40:08 | 360,039,836 | 0 | 0 | Apache-2.0 | 2021-04-21T05:18:08 | 2021-04-21T05:13:07 | null | UTF-8 | Python | false | false | 1,739 | py |
import logging
from qtoggleserver.core import api as core_api
from qtoggleserver.core.api import schema as core_api_schema
from qtoggleserver.core.typing import GenericJSONDict
from qtoggleserver.system import fwupdate
logger = logging.getLogger(__name__)
@core_api.api_call(core_api.ACCESS_LEVEL_ADMIN)
async def get_firmware(request: core_api.APIRequest) -> GenericJSONDict:
current_version = await fwupdate.get_current_version()
status = await fwupdate.get_status()
if status == fwupdate.STATUS_IDLE:
try:
latest_version, latest_date, latest_url = await fwupdate.get_latest()
return {
'version': current_version,
'latest_version': latest_version,
'latest_date': latest_date,
'latest_url': latest_url,
'status': status
}
except Exception as e:
logger.error('get latest firmware failed: %s', e, exc_info=True)
return {
'version': current_version,
'status': status
}
else:
return {
'version': current_version,
'status': status
}
@core_api.api_call(core_api.ACCESS_LEVEL_ADMIN)
async def patch_firmware(request: core_api.APIRequest, params: GenericJSONDict) -> None:
core_api_schema.validate(params, core_api_schema.PATCH_FIRMWARE)
status = await fwupdate.get_status()
if status not in (fwupdate.STATUS_IDLE, fwupdate.STATUS_ERROR):
raise core_api.APIError(503, 'busy')
if params.get('url'):
await fwupdate.update_to_url(params['url'])
else: # Assuming params['version']
await fwupdate.update_to_version(params['version'])
| [
"ccrisan@gmail.com"
] | ccrisan@gmail.com |
5109e55b796ea28097133a4580c8e1352d2c68c2 | 751a99e39d358f0d138d582e4c1cfeb76cf58b8c | /catalogdata.py | b7686d2e945f748dfb17b273ca21d61fc0d5b50b | [] | no_license | anikcreative/guitar-catalog | 2981e62ec416f11478e74f59d2d707028385d11e | 05fed2a2341471220fefa285553a33301b6aab7d | refs/heads/master | 2021-09-10T10:03:02.187022 | 2018-03-24T08:20:39 | 2018-03-24T08:20:39 | 126,077,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,880 | py | #!/usr/bin/env python
# coding: utf-8
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from dbsetup import Base, Category, Guitar
print "\n\n------------------------\nWorking...\n\n"
print "==> Establishing database link..."
engine = create_engine('sqlite:///gtrcatalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
print " Database connection established."
print "==> Adding items to database..."
# Add Categories
gtrcat1 = Category(name="Classical Guitars",
description="Wooden nylon-string acoustic guitars for classical concert, orchestral, and chamber music.",
color="maroon")
session.add(gtrcat1)
gtrcat2 = Category(name="Steel-String Acoustic Guitars",
description="Modern wooden steel-string acoustic guitars.",
color="saddlebrown")
session.add(gtrcat2)
gtrcat3 = Category(name="Electric Guitars",
description="Steel-string guitars with higher string action and electromagnetic pickups, for use with amps.",
color="midnightblue")
session.add(gtrcat3)
gtrcat4 = Category(name="Electro-Acoustic Guitars",
description="Hybrid acoustic guitars with electric pickups.",
color="indigo")
session.add(gtrcat4)
gtrcat5 = Category(name="Twelve-String Guitars",
description="Modern steel-string acoustic guitars with 12 strings, typically used as rhythm instruments.",
color="darkolivegreen")
session.add(gtrcat5)
gtrcat6 = Category(name="Resonator Guitars",
description="Steel-string guitars with a resonator plate, usually played Dobro-style, with a slide.",
color="slategray")
session.add(gtrcat6)
gtrcat7 = Category(name="Steel Lap Guitars",
description="Lap steel guitars (often called Hawaiian guitars) played with pedals and metal slide.",
color="darkgreen")
session.add(gtrcat7)
gtrcat8 = Category(name="Electric Bass Guitars",
description="Four-string electric guitars with lower range and darker timbre, typically used as a rhythm instrument.",
color="darkslategray")
session.add(gtrcat8)
gtrcat9 = Category(name="Double-Neck Guitars",
description="Wh-who... who even plays these things?!",
color="deeppink")
session.add(gtrcat9)
session.commit()
# Add Classical Guitars
gtr_cls_1 = Guitar(name="Cordoba C5",
description="The Cordoba C5 Classical Guitar is perfect for any aspiring classical guitarist or steel-string/electric wizard looking to take a walk on the wild nylon-string side. The solid cedar top produces an amazingly rich tone while the wide string placement, easy string tension, and low action make it a breeze to play. The C5 comes with mahogany back and sides and a Spanish cedar neck.",
slug="cordoba_c5",
views=232,
category=gtrcat1)
gtr_cls_2 = Guitar(name="Yamaha C40",
description="The Yamaha C40 PKG is an attractive package for beginners, featuring the full-size C40 nylon-string classical guitar along with a digital tuner, instructional DVD, and a gig bag for safe storage and transport. The C40 guitar features a spruce top with meranti back and sides, a nato neck with a rosewood fretboard, rosewood bridge, and a gloss finish. It is designed to serve as an affordable, yet high-quality, full-size starter guitar.",
slug="yamaha_c40",
views=458,
category=gtrcat1)
gtr_cls_3 = Guitar(name="Lucero LC100",
description="Nicely made with a laminated spruce top, mahogany body, and rosewood fingerboard and bridge. Natural gloss finish. The Lucero LC100 Classical Guitar features a laminated spruce top and mahogany back and sides. An excellent guitar for the beginner.",
slug="lucero_lc100",
views=91,
category=gtrcat1)
gtr_cls_4 = Guitar(name="Cordoba 45MR",
description="The Cordoba 45MR CD/MR nylon-string acoustic guitar is a handmade traditional acoustic guitar featuring a solid cedar top with Madagascar rosewood back and sides, an ebony fingerboard, a Spanish neck heel joint that provides instrument strength and doesn't lose any tone. The guitar's High-gloss PU finish lets the wood's beauty shine through for all to admire.",
slug="cordoba_45mr",
views=186,
category=gtrcat1)
gtr_cls_5 = Guitar(name="Kremona Sofia",
description="The Kremona Sofia is crafted from highly sustainable West African sapele, offering tonal vibrancy and balance. A red cedar top adds warmth and articulation to Sofia's lyrical timbre. Specifications include solid matte back and sides, solid gloss top, African mahogany neck, adjustable truss rod, Indian rosewood fingerboard, bridge and headstock overlay, bone nut and saddle, wood binding and rosette, gold machines with amber buttons, 650 mm scale length, 52mm neck width at nut, and Kremona Arete medium-high tension strings.",
slug="kremona_sofia",
views=208,
category=gtrcat1)
session.add(gtr_cls_1)
session.add(gtr_cls_2)
session.add(gtr_cls_3)
session.add(gtr_cls_4)
session.add(gtr_cls_5)
session.commit()
# Add Modern Acoustic Guitars
gtr_acs_1 = Guitar(name="Taylor 114ce",
description="This cutaway acoustic/electric Grand Auditorium blends layered walnut back and sides with a solid Sitka spruce top, making it a great all-around acoustic that responds well to strumming, flatpicking and fingerpicking. Enjoy the easier playability and accurate intonation with slightly narrower 1-11/16-inch nut width that makes forming barre chords easier on the hands.",
slug="taylor_114ce",
views=1853,
category=gtrcat2)
gtr_acs_2 = Guitar(name="Martin DC-16GTE",
description="The DC-16GTE acoustic-electric guitar features a D-14 platform and a Dreadnought cutaway body design equipped with balanced tonewoods that produce a rich acoustic tone for recording or live performance. The sapele back and sides complement the solid Sitka spruce top finished in a polished gloss.",
slug="martin_dc16gte",
views=1103,
category=gtrcat2)
gtr_acs_3 = Guitar(name="Takamine P3DC",
description="The P3DC cutaway dreadnought features resonant tonewoods, elegant appointments and state-of-the-art electronics that deliver an exquisite acoustic experience onstage and off. Other premium features include a solid sapele back, mahogany neck, rosewood fingerboard with wood dot-in-dot inlays, gold tuners with amber buttons, natural satin finish, and the highly acclaimed CT4B II preamp system with three-band EQ, volume control and built-in tuner.",
slug="takamine_p3dc",
views=985,
category=gtrcat2)
gtr_acs_4 = Guitar(name="Fender CD-60S",
description="Fender's redesigned CD-60S features a solid spruce top, mahogany back and sides, a mahogany neck with a comfortable easy-to-play profile, and a smooth rosewood fingerboard with rolled edges. This instrument's large dreadnought body size and classic tonewoods deliver a huge, full-bodied sound with ample projection for vocal accompaniment, solo work, and many other unplugged or live applications.",
slug="fender_cd60s",
views=851,
category=gtrcat2)
gtr_acs_5 = Guitar(name="Fender FA-100",
description="This instrument features a protective and glossy finish encompassing the body, time tested quartersawn X bracing, a compensated saddle and laminated Spruce top. This popular budget-conscious acoustic delivers much of the tone that you'd get from more expensive instruments.",
slug="fender_fa100",
views=1550,
category=gtrcat2)
gtr_acs_6 = Guitar(name="Alvarez AF30",
description="The Folk body shape of the AF30 has impressive projection for its size. Its tone is enhanced with advanced scalloped bracing and other premium components such as bone saddles and nuts and a bi-level rosewood bridge. The sound is warm, open and powerful, and both the treble and bass registers are clearly present and balanced in relation to each other.",
slug="alvarez_af30",
views=344,
category=gtrcat2)
gtr_acs_7 = Guitar(name="Yamaha LS6",
description="The Yamaha LS6 handcrafted acoustic guitar features a grand auditorium sized body that is small and yet deep enough to deliver excellent volume that cannot normally be found in a compact body. Handcrafted with a solid Englemann Spruce top, the LS6 offers an incredible experience in total exuberance.",
slug="yamaha_ls6",
views=562,
category=gtrcat2)
gtr_acs_8 = Guitar(name="Seagull S6",
description="Wild cherry back and sides provide a unique tone on this S6 Original from Seagull, blending the warmth of mahogany with the crisp definition of maple. Silverleaf maple neck with a rosewood fretboard is easy on the fingers, while specially aligned machine heads make for quick, stable tuning.",
slug="seagull_s6",
views=705,
category=gtrcat2)
gtr_acs_9 = Guitar(name="Blueridge BR-70",
description="With exquisite Santos rosewood back and sides, and a full D-41 style abalone trim, this dreadnought faithfully delivers solid, classic tone with a sharp modern look. Expertly applied white binding adorns the body, neck, and headstock. The abalone and pearl inlay work on the headstock and the back center stripe of intricate wood marquetry are touches of fine art. Aged-tone finish completes the look. The slim mahogany neck and traditional dreadnaught body make it a truly enjoyable guitar to play.",
slug="blueridge_br70",
views=522,
category=gtrcat2)
session.add(gtr_acs_1)
session.add(gtr_acs_2)
session.add(gtr_acs_3)
session.add(gtr_acs_4)
session.add(gtr_acs_5)
session.add(gtr_acs_6)
session.add(gtr_acs_7)
session.add(gtr_acs_8)
session.add(gtr_acs_9)
session.commit()
# Add Electric Guitars
gtr_elc_1 = Guitar(name="Epiphone Les Paul Standard",
description="The mahogany body of the Les Paul Standard provides superb resonance while the Alnico Classic humbucker pickups deliver loads of warmth. The set mahogany neck with slim-tapered profile and rosewood fretboard give you the familiar feel and fast action that Les Paul players love so dearly. Neck and body binding and trapezoid inlays produce the classic look seen on stages around the world for decades. The LockTone Tune-O-Matic bridge and stopbar tailpiece provide more sustain and make string changing easier.",
slug="epiphone_lespaulstd",
views=3055,
category=gtrcat3)
gtr_elc_2 = Guitar(name="Fender American Stratocaster",
description="This latest iteration of the time-honored classic is the very essence of Strat tone and remains a beauty to see, hear and feel. Features include hand-rolled fingerboard edges, classic 50s pickups, staggered tuners, improved bridge with bent steel saddles and copper-infused high-mass block for increased resonance and sustain, tinted neck, high-gloss maple fretboard, satin neck back for smooth playability, and thin-finish undercoat that lets the body breathe and improves resonance.",
slug="fender_stratocaster",
views=3212,
category=gtrcat3)
gtr_elc_3 = Guitar(name="PRS S2 Custom 24",
description="The PRS S2 Custom 24 solidbody electric guitar's comfortable asymmetric bevel-cut double-cutaway body is built from maple-topped mahogany, so it's loaded with warmth, resonance, and copious amounts of bite and sustain. Instill your playing with incredible vintage sweetness, clarity, and extended tonal range, courtesy of the PRS S2 Custom 24's upgraded 85/15 S humbuckers. This guitar's Pattern Regular neck features an extended 24-fret rosewood fingerboard that makes high-register soloing easy.",
slug="prs_s2custom24",
views=1812,
category=gtrcat3)
gtr_elc_4 = Guitar(name="Shecter Hellraiser C-1",
description="Featuring the tried-and-true combination of a mahogany body and a quilted maple top, the Hellraiser C-1 sports a big midrange with a sweet top end. All of this tonal muscle is pushed through your amp by an EMG 81TW at the bridge with an EMG 89 at the neck. The 81TW's dual-mode design gives you the classic EMG 81 sound with the addition of a single-coil sound and a fatter tone with punch and clarity.Other cool features on the Hellraiser C-1 include a 3-piece mahogany neck for awesome stability, a TonePros TOM bridge with through-body construction, and Schecter locking tuners to keep you in tune.",
slug="shecter_hellraiserc1",
views=1255,
category=gtrcat3)
gtr_elc_5 = Guitar(name="Gibson Flying V 120",
description="The Flying V is the original rabble-rousing rocker, way ahead of its time in the late 50s and still a major style statement today. Combining time-tested tonewoods, versatile pickups, and unparalleled craftsmanship at an unbeatable price, the Flying V 120 launches your music into the stratosphere, while making the perfect ticket to the party for collector and player alike.",
slug="gibson_flyingv120",
views=1240,
category=gtrcat3)
gtr_elc_6 = Guitar(name="B.C. Rich MK5 Warlock",
description="This unique model is brought to bear with classic electronic layout: Twin covered high output B.C.Rich pickups, each with an individual Volume and Tone control and Master 3 way pickup selector give Mk5 Warlock a familiar feel and control while delivering pure, bewitching tones.",
slug="bcrich_mk5warlock",
views=769,
category=gtrcat3)
gtr_elc_7 = Guitar(name="ESP LTD Elite Eclipse-1",
description="This solidbody electric guitar is features a single-cutaway body made from mahogany topped with flame maple, giving the Elite Eclipse-I the same tonal foundation as some of the most-played guitars in rock. Top-notch touches like the Gotoh Magnum Lock tuners and Gotoh TOM bridge/tailpiece provide performance you can count on day in, day out. Factor in a tone-packed pair of Seymour Duncan humbucking pickups, and you've got a ton of potential.",
slug="esp_eclipse1",
views=941,
category=gtrcat3)
gtr_elc_8 = Guitar(name="Fender Standard Telecaster",
description="The Standard Telecaster features the best of the old and the new: a fast-action gloss maple neck, cast/sealed machine heads, 2 classic single-coil pickups, and a 6-saddle string-thru-body bridge. Since its introduction in the early '50s, guitarists in all musical genres have relied on the Fender Telecaster guitar for its timeless, powerful tone and smooth playability.",
slug="fender_telecaster",
views=2984,
category=gtrcat3)
gtr_elc_9 = Guitar(name="Yamaha PAC112V",
description="With solid alder body and pro-level hardware and electronics, this electric guitar is well known for great tone and outstanding playability. It features a comfort-contoured body, bolt-on neck design, vintage-style vibratos, and 5-way switching of the H-S-S pickup configuration. And, to top it off, it's an amazing value in its price range.",
slug="yamaha_pac112v",
views=2510,
category=gtrcat3)
gtr_elc_10 = Guitar(name="Ibanez RG450DX",
description="The RG450DX comes with a super-resonant, lightweight, and balanced mahogany body. This made-for-metal beast also boasts a 3-piece Wizard III neck that's ultra fast and eminently shreddable. For tonal diversity, the RG450DX slams with two humbucking pickups and a single-coil squarely in the middle. If you want an axe that's built for speed, the Ibanez RG450DX belongs in your hands and plugged into your amp.",
slug="ibanez_rg450dx",
views=1012,
category=gtrcat3)
gtr_elc_11 = Guitar(name="Oscar Schmidt OE30",
description="From volume and tone controls to the fully adjustable truss rods, this guitar as a whole is even greater than its many great parts. The dot inlay and stop tailpiece combine with a rosewood fingerboard and twin Washburn HH pickups to provide an excellent playground for processing musical ideas from your mind to your fingers to the strings.",
slug="oscarschmidt_oe30",
views=584,
category=gtrcat3)
session.add(gtr_elc_1)
session.add(gtr_elc_2)
session.add(gtr_elc_3)
session.add(gtr_elc_4)
session.add(gtr_elc_5)
session.add(gtr_elc_6)
session.add(gtr_elc_7)
session.add(gtr_elc_8)
session.add(gtr_elc_9)
session.add(gtr_elc_10)
session.add(gtr_elc_11)
session.commit()
# Add Electro-Acoustic Guitars
gtr_eac_1 = Guitar(name="Fender FA135CE",
description="With laminated spruce top, basswood back and sides, and Fishman ION-T preamp system, the Fender FA-135CE is built on the concert-style platform for a sleek, modern design. The laminated spruce top features X-bracing for bright, punchy tone, ideal for lead guitar. The neck is nato, and the back and sides are laminated basswood-both tone woods known for letting the mid and high frequencies sing out.",
slug="fender_fa135ce",
views=600,
category=gtrcat4)
gtr_eac_2 = Guitar(name="Epiphone Hummingbird Pro",
description="This 6-string acoustic-electric guitar is instantly recognizable, both in look and its warm sound. The Hummingbird Pro features a solid spruce top, a mahogany body, and a mahogany neck for pure tone. What's more, a distinctive hummingbird pickguard sets off the body while beautiful, split parallelogram inlays give the rosewood fingerboard an elegance all its own.",
slug="epiphone_hummingbird",
views=712,
category=gtrcat4)
gtr_eac_3 = Guitar(name="Kona K2",
description="The Kona K2 Series Natural Gloss Thin Body Electric Acoustic Guitars strike the balance between the low profile electric guitar feel, and the acoustic guitar tone. This guitar is the perfect crossover instrument for the electric guitarists looking for a true, balanced acoustic sound without large body adjustment, or the smaller player that is uncomfortable with the deep dreadnought stretch.",
slug="kona_k2",
views=513,
category=gtrcat4)
gtr_eac_4 = Guitar(name="Ibanez V70CE",
description="The V70CE's high-quality electronics and select spruce top deliver articulate amplified tone. It features a rosewood fretboard, chrome tuners, mahogany neck, back, and sides. The V70CE is equipped with a soft cutaway for higher access and is a very responsive instrument with capacity for great dynamic range both acoustically and through the output.",
slug="ibanez_v70ce",
views=455,
category=gtrcat4)
gtr_eac_5 = Guitar(name="Yamaha APX500III Thinline",
description="The APX600 is a thinline acoustic-electric guitar, with a thinner profile that fits up close to your body, so it's easy and comfortable to play. It features a spruce top, a nato body and neck, and rosewood fingerboard and bridge that combine to put out a superior, far-reaching tone. It comes with a built-in tuner and a System 65A preamp piezo pickup system, great for plugging in and playing coffeehouse gigs with a natural acoustic sound.",
slug="yamaha_apx500iii",
views=206,
category=gtrcat4)
session.add(gtr_eac_1)
session.add(gtr_eac_2)
session.add(gtr_eac_3)
session.add(gtr_eac_4)
session.add(gtr_eac_5)
session.commit()
# Add Twelve-String Guitars
gtr_tst_1 = Guitar(name="Takamine EF381SC",
description="The versatility of this 12-string guitar begins with its solid cedar top that can play sweet mellow passages or power chords with equal ability. The onboard CT4B II preamp system (with three-band EQ, volume control and built-in tuner), paired with the unique Palathetic under-saddle pickup, provide peerless amplified response.",
slug="takamine_ef381sc12",
views=255,
category=gtrcat5)
gtr_tst_2 = Guitar(name="Martin D12x1ae",
description="The Martin D12X1AE acoustic-electric 12-string guitar adds a modern flair to a classic instrument. With its onboard Fishman Sonitone electronics, this versatile 12-string guitar projects the full, robust sound that can fill the room. The D12X1AE's mahogany grained HPL on the back and sides reflects Martin's environmentally friendly mindset. You'll love the warm, classic tones you hear as you play this modern guitar.",
slug="martin_d12x1ae",
views=382,
category=gtrcat5)
gtr_tst_3 = Guitar(name="Yamaha FG820-12",
description="Yamaha's 12 string acoustic model, with simple and traditional looks and outstanding quality, at an affordable price. It's a solid-top guitar with an authentic sound that is well balanced without sacrificing its robust strength, thanks to the newly developed scalloped bracing design. In addition to warmer and stronger sound, the body binding and fingerboard binding are cream plastic, for an upgraded look.",
slug="yamaha_fg82012",
views=533,
category=gtrcat5)
gtr_tst_4 = Guitar(name="Gretsch G5422TG-12",
description="Profoundly stylish, the G5422TG offers full hollow-body build and electrifying authenticity with Filter Tron humbucking pickups, versatile upgraded controls, oversized bound F holes, aged multi-ply body binding, smaller late- 50s G6120 bound headstock, Graph Tech NuBone nut, pearloid Neo-Classic thumbnail fingerboard inlays and a gold Bigsby B60 vibrato tailpiece.",
slug="gretsch_g5422tg12",
views=259,
category=gtrcat5)
gtr_tst_5 = Guitar(name="Ibanez AS7312 Artcore",
description="The AS7312 features an all-maple body, set-neck construction, and a pair of ART humbucking pickups. The Artcore's combination of quality workmanship and affordability has created legions of fans from diverse genres as blues, country, rock and jazz. Musicians can find the purity of an old school style jazz-box to a hybrid semi-hollow rocker.",
slug="ibanez_as7312artcore",
views=448,
category=gtrcat5)
gtr_tst_6 = Guitar(name="Epiphone DR-212",
description="The DR-212 features a select spruce top with scalloped bracing and features a mahogany body and a mahogany neck to balance its strong voice with warmth. With its blend of quality woods and superb tone, Epiphone DR-212 is the perfect first 12-string guitar. And thanks to its affordable price, that signature 12-string sound doesn't have to be out of reach.",
slug="epiphone_dr212",
views=206,
category=gtrcat5)
session.add(gtr_tst_1)
session.add(gtr_tst_2)
session.add(gtr_tst_3)
session.add(gtr_tst_4)
session.add(gtr_tst_5)
session.add(gtr_tst_6)
session.commit()
# Add Resonator Guitars
gtr_rsn_1 = Guitar(name="Gretsch G9200",
description="The Gretsch G9200 Roundneck Boxcar standard resonator guitar gives you a whole new tonal palette. The G9200 sports a mahogany top, body, and neck; a rosewood fingerboard; and Gretsch's hand-spun Ampli-Sonic spider cone and bridge - all working in concert to serve up authentic resonator tone. You'll find this great-sounding resonator as easy to play as any other guitar.",
slug="gretsch_g9200",
views=156,
category=gtrcat6)
gtr_rsn_2 = Guitar(name="Gold Tone PBS-D",
description="Designed by legendary maker Paul Beard, the Gold Tone Paul Beard Square Neck Deluxe (PBS-D) signature model guitar is hand made and provides unmatched tone in its price range. The PBS-D features an ebony fingerboard, maple back and sides, and a high gloss tobacco sunburst finish.",
slug="goldtone_pbsd",
views=269,
category=gtrcat6)
gtr_rsn_3 = Guitar(name="Rogue Classic Spider Resonator",
description="The Classic Spider's die-cast spider bridge and 10.5-inch spun aluminum resonator cone give it exceptional projection and volume. It's constructed with a spruce top; mahogany back, sides, and neck; and rosewood fretboard. Mother-of-pearl diamond fretboard inlays, a chromeplated bell, and brass coverplate and tailpiece give it authentic looks.",
slug="rogue_spiderres",
views=251,
category=gtrcat6)
gtr_rsn_4 = Guitar(name="Dean Resonator Heirloom",
description="The newest addition to Dean Guitars' line of resonators, the Heirloom is made of solid distressed copper or solid distressed brass with matching inlays and truss rod cover. Each Heirloom has a unique voice and a truly one-of-a-kind look and feel and unique distressed characteristics such as stains in the finish.",
slug="dean_resheirloom",
views=402,
category=gtrcat6)
gtr_rsn_5 = Guitar(name="Regal RC-55",
description="The new RC-50 features a brass body with an antiqued nickel finish and distinctive tone - ideal for the blues and Hawaiian music - and a volume and carrying power that has to be heard to be believed!",
slug="regal_rc55",
views=366,
category=gtrcat6)
session.add(gtr_rsn_1)
session.add(gtr_rsn_2)
session.add(gtr_rsn_3)
session.add(gtr_rsn_4)
session.add(gtr_rsn_5)
session.commit()
# Add Steel Lap Guitars
gtr_stl_1 = Guitar(name="Morrell JMPTB-6",
description="The JMPTB-6 steel lap guitar is loaded with a Kent Armstrong HR1R Hot Rails hum cancelling pickup. This is a dual blade mini humbucker is designed for high gain applications with emphasis on bass and midrange frequencies. The maple body yields greater sustain and volume projection while the poplar body ads warmth to the sound.",
slug="morrell_jmptb6",
views=399,
category=gtrcat7)
gtr_stl_2 = Guitar(name="Rogue RLS-1",
description="Slide into some classic country, Hawaiian, and blues tones with the affordable Rogue RLS1 Lap Steel Guitar. It features a hardwood body and neck with position markers, a single-coil pickup, volume and tone controls, chrome hardware, and a stainless steel pickguard.",
slug="rogue_rls1",
views=176,
category=gtrcat7)
gtr_stl_3 = Guitar(name="Gretsch G5700 Electromatic Lap",
description="The G5700 is a value-packed proposition with a solid mahogany body for vibrant, wide ranging tone, and chrome hardware. The Gretsch single-coil pickup puts out lucidly smooth sounds with just the right amount of bite and jangle.",
slug="gretsch_electromaticlap",
views=279,
category=gtrcat7)
gtr_stl_4 = Guitar(name="Epiphone Electar",
description="The Electar is a reissue of one of the company's most popular vintage designs, with 1-piece mahogany body, Circus Tent control knobs, traditional fretboard markings, and vintage-style metal Epiphone headstock badge. You'll also love the modern conveniences built in, such as a powerful Epiphone 500B Blade humbucking pickup and inset non-slip rubber pads.",
slug="epiphone_electar",
views=224,
category=gtrcat7)
gtr_stl_5 = Guitar(name="Imperial Royal Hawaiian Teardrop",
description="This limited-edition lap guitar returns with luxurious features, such as Solid Sapele Mahogany top, back and sides, Fishman Sonicore pickup with Presys preamp system, on-board tuner and notch filter, rosewood fingerboard with inlaid diamond pearl position markers.",
slug="imperial_rhteardrop",
views=251,
category=gtrcat7)
gtr_stl_6 = Guitar(name="Gold Tone LM Weissenborn",
description="The Weissenborn Hawaiian steel, a platapus among guitars to the uninitiated, is an instrument brilliantly and specifically conceived for Hawaiian playing. These hollow-neck Hawaiians are enjoying a renaissance with players nearly 60 years after the last one was made. Many session pros now routinely carry along a Weissenborn for steel or Dobro calls.",
slug="goldtone_weissenborn",
views=357,
category=gtrcat7)
session.add(gtr_stl_1)
session.add(gtr_stl_2)
session.add(gtr_stl_3)
session.add(gtr_stl_4)
session.add(gtr_stl_5)
session.add(gtr_stl_6)
session.commit()
# Add Electric Bass Guitars
gtr_bas_1 = Guitar(name="Fender Standard Precision Bass",
description="Combining traditional design with contemporary features, the Standard Precision Bass is an elegant and affordable classic designed for the bassist who appreciates great style, rich and muscular tone, and excellent value. Time-honored Fender style and performance-minded modern upgrades don't have to break the bank, and this model delivers the best of both in a design ideal for Precision Bass players everywhere at every level.",
slug="fender_pbass",
views=2328,
category=gtrcat8)
gtr_bas_2 = Guitar(name="ESP F-104",
description="The ESP F-104 bass not only looks nasty, its 35-inch scale and ESP-designed SB-4 pickups with active EQ lend themselves especially well to crushing, de-tuned sounds. Tune to D or C with heavy strings or string up B-E-A-D for brutal low end. Has a wildly sculpted agathis body, bolt-on maple neck, rosewood fingerboard, dot fretboard inlays with the model name at 12th fret, 24 extra-jumbo frets, and chrome hardware.",
slug="esp_f104",
views=1844,
category=gtrcat8)
gtr_bas_3 = Guitar(name="Washburn Taurus T24",
description="The Washburn T24 bass is completely pro in every way, crafted using fine tonewoods, advanced construction techniques, and high-quality components. The multi-laminate neck-thru construction features a mahogany body, maple/mahogany neck, rosewood fingerboard with offset dot inlays, custom JJ pickups, and Grover bass tuners.",
slug="washburn_t24",
views=1045,
category=gtrcat8)
gtr_bas_4 = Guitar(name="Yamaha TRBX305",
description="The TRBX305's perfectly balanced, ultra-comfortable solid mahogany body with a fast, ultra-comfortable 5-piece maple and mahogany neck and rosewood fingerboard provides the optimum tonal foundation while the Performance EQ active circuitry gives instant access to perfectly dialed-in stage-ready tones coupled with the expressive control you need.",
slug="yamaha_trbx305",
views=985,
category=gtrcat8)
gtr_bas_5 = Guitar(name="Ibanez SR400QM",
description="With its thin five-piece SR4 maple/rosewood neck, the Ibanez SR400QM electric bass stands proudly in the wide line up of the SR family. The lightweight, balanced comfort-contoured mahogany body helps make playing easy and comfortable, while the CAP EXF-N2 pickups provide well-balanced tone from each string. The gorgeous quilted maple top adds a killer look to a bass that does it all.",
slug="ibanez_sr400qm",
views=996,
category=gtrcat8)
gtr_bas_6 = Guitar(name="Epiphone Thunderbird-IV",
description="The Thunderbird IV was one of the most radical designs to come out of the Gibson and Epiphone Kalamazoo factory in the early 60s, thanks to legendary automotive designer Ray Dietrich, who was asked to put a new twist on solidbody guitars and basses. The sound of the Thunderbird IV was as cutting edge as its design and now the Thunderbird Classic-IV PRO returns with all of Epiphone's first-class quality and a lifetime guarantee, but without the hassles of owning (or hiding) a vintage instrument.",
slug="epiphone_thunderbird",
views=805,
category=gtrcat8)
gtr_bas_7 = Guitar(name="Squier Jaguar Bass V",
description="Squier's most versatile Jaguar bass model is even more versatile now, with the extended range of the Vintage Modified Jaguar Bass V Special five-string model. You get all the sharp looks, fantastic tone and great features of its four-string brother, now with the addition of an earth-shaking low B string.",
slug="squier_jaguarv",
views=664,
category=gtrcat8)
gtr_bas_8 = Guitar(name="Epiphone EB-3",
description="The EB-3 Bass quickly became one of the most attractive and distinctive basses in rock and players like Bill Wyman of The Rolling Stones, Cream's Jack Bruce, and The Who's John Entwistle made rock history on SG-style basses. This latest iteration of a legendary mainstay is a stunning recreation of the vintage marvel with all the tone and feel of the original without the vintage price tag and vintage problems.",
slug="epiphone_eb3",
views=2041,
category=gtrcat8)
gtr_bas_9 = Guitar(name="Rogue LX200BF",
description="The LX200BF fretless bass guitar features an extended maple neck, rosewood fingerboard, covered traditional-style split and single-coil pickups, 2 volume and 2 tone controls, die-cast machine heads, and black hardware. Rogue priced the 4-string LX200BF bass to make it easy to add a fretless to your arsenal.",
slug="rogue_lx200bf",
views=1225,
category=gtrcat8)
session.add(gtr_bas_1)
session.add(gtr_bas_2)
session.add(gtr_bas_3)
session.add(gtr_bas_4)
session.add(gtr_bas_5)
session.add(gtr_bas_6)
session.add(gtr_bas_7)
session.add(gtr_bas_8)
session.add(gtr_bas_9)
session.commit()
# Add Double-Neck Guitars
gtr_dbl_1 = Guitar(name="Epiphone LE G-1275 Custom",
description="The G-1275 is based on the vintage original 1235 Doubleneck that was first produced at the legendary Gibson/Epiphone Kalamazoo, Michigan factory in 1963. It quickly became one of the most sought-after and original guitars in rock. The 6-string and 12-string each have their own Epiphone all-metal 3-way toggle switch to select each guitar's pickups. There is also a second master all-metal 3-way toggle switch located between the 6-string and 12-string bridge.",
slug="epiphone_g1275",
views=36,
category=gtrcat9)
gtr_dbl_2 = Guitar(name="Ovation CSE225-RRB",
description="With its specially designed Super-Shallow composite body-a favorite of stage performers-Ovation's Celebrity double neck combines 6- and 12-string guitars into a single, easy-to-play instrument. With a figured maple top and matched, lightweight bracing that's designed to enhance punch and projection, this guitar covers all the bases. Ovation's pioneering multi-sound hole design enhances string vibration and sustain by improving soundboard efficiency. Both slim necks are easy to play, and twin cutaways insure easy access to the either fret board.",
slug="ovation_cse225rrb",
views=102,
category=gtrcat9)
gtr_dbl_3 = Guitar(name="Dean Gran Sport DBL WBS",
description="This guitar sports a more traditional form that's easily recognizable with its double-cutaway mahogany body and set mahogany C-shape necks. The rosewood fingerboards have pearl GS inlays and single-ply neck bindings. Chrome hardware includes Grover tuners and Tune-O-Matic bridges. Sound is what counts and the GS DBL is equipped with DMT Series pickups wired to a 3-way switch and dual master Volume and Tone controls.",
slug="dean_gsdblwbs",
views=87,
category=gtrcat9)
gtr_dbl_4 = Guitar(name="Zenison Double-Neck Electric",
description="The Zenison double-neck guitar features custom-painted neck and headstock, pearloid white pickguard, and 22-fret necks with a fixed bridge on the 12-string neck and a vintage-style tremolo on the six-string neck. Unique, affordable option for anyone looking for that double-neck look, feel, and sound without having to break the bank.",
slug="zenison_dblneck",
views=75,
category=gtrcat9)
session.add(gtr_dbl_1)
session.add(gtr_dbl_2)
session.add(gtr_dbl_3)
session.add(gtr_dbl_4)
session.commit()
print " All items were added successfully!"
session.close()
print "==> Database connection terminated; exiting application...\n\n"
| [
"vagrant@vagrant.vm"
] | vagrant@vagrant.vm |
8810778d919db3e759f97b0b6e2e03e245363908 | 8fd4a35c61532f98e4e3888b1ca013ca3f7a2072 | /tests/test_configdict.py | 1f4e317e1ddb7aed6ebb704a3231691b9ea1c8c5 | [
"MIT"
] | permissive | menchant/bio96 | c785e0e2b1634f0f9df8645266139e62a4e2b3f0 | 97db6f7ae7b8f247c08ade1021c2906f71fdf1a5 | refs/heads/master | 2022-03-30T09:12:30.901537 | 2019-12-09T20:32:05 | 2019-12-09T20:32:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,157 | py | #!/usr/bin/env python3
from bio96 import *
def test_empty():
config = configdict({})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {}
def test_user():
config = configdict({'x': 1})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_meta():
config = configdict({'x': 1, 'meta': {'y': 2}})
assert config.meta == {'y': 2}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_rows():
config = configdict({'x': 1, 'row': {'y': 2}})
assert config.meta == {}
assert config.rows == {'y': 2}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_irows():
config = configdict({'x': 1, 'irow': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {'y': 2}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_cols():
config = configdict({'x': 1, 'col': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {'y': 2}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_icols():
config = configdict({'x': 1, 'icol': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {'y': 2}
assert config.wells == {}
assert config.user == {'x': 1}
def test_wells():
config = configdict({'x': 1, 'well': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {'y': 2}
assert config.user == {'x': 1}
def test_getattr():
config = configdict({})
config.meta['x'] = 1; assert config.meta == {'x': 1}
config.rows['x'] = 2; assert config.rows == {'x': 2}
config.irows['x'] = 3; assert config.irows == {'x': 3}
config.cols['x'] = 4; assert config.cols == {'x': 4}
config.icols['x'] = 5; assert config.icols == {'x': 5}
config.wells['x'] = 6; assert config.wells == {'x': 6}
def test_setattr():
config = configdict({})
config.meta = {'x': 1}; assert config['meta']['x'] == 1
config.rows = {'x': 2}; assert config['row']['x'] == 2
config.irows = {'x': 3}; assert config['irow']['x'] == 3
config.cols = {'x': 4}; assert config['col']['x'] == 4
config.icols = {'x': 5}; assert config['icol']['x'] == 5
config.wells = {'x': 6}; assert config['well']['x'] == 6
| [
"kale@thekunderts.net"
] | kale@thekunderts.net |
51c9734e2bb76d57a89179adee9869b3f01dc271 | acf7457d3a799cb9bff12686d2d616688bcd4b5b | /packages/python/plotly/plotly/validators/image/legendgrouptitle/font/_size.py | 0703f95a46c6524e5ae4e1e051274c52c66992be | [
"MIT"
] | permissive | plotly/plotly.py | f4f61639f08160f16195efc95b5901dc5a937346 | 975a704074f01c078e0fdfa32bdf17130bf89e69 | refs/heads/master | 2023-09-06T06:15:08.340035 | 2023-08-24T12:28:14 | 2023-08-24T12:28:14 | 14,579,099 | 14,751 | 2,989 | MIT | 2023-09-08T19:55:32 | 2013-11-21T05:53:08 | Python | UTF-8 | Python | false | false | 461 | py | import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="size", parent_name="image.legendgrouptitle.font", **kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
min=kwargs.pop("min", 1),
**kwargs,
)
| [
"nicolas@plot.ly"
] | nicolas@plot.ly |
adaf7e6ef72f64e19e496a1b8f02b930b702595a | ae6189642a07fd789f51caadb924328a54919cac | /abc154/b.py | ecd4dfa9aa2e83c93f514ac89647c165b4f3743c | [] | no_license | d-matsui/atcorder | 201e32403653b2fdf0d42188faf095eb8b793b86 | 22ec1af8206827e10a986cb24cf12acc52ab1d6a | refs/heads/master | 2020-09-27T23:15:27.281877 | 2020-09-01T13:24:34 | 2020-09-01T13:24:34 | 226,632,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | #!/usr/bin/env python3
S = str(input())
len_S = len(S)
for i in range(len_S):
print('x', end="")
| [
"mti.daiki@gmail.com"
] | mti.daiki@gmail.com |
ebc358fe1d3d3c0c6443b2922f596ffa70817af4 | 1f44c056f79b0b8f2f32cdf417c80efe1913ed77 | /ABC60C.py | 0d759ee33dd5d4caef3293fe2a219e0c801f9d8f | [] | no_license | saki-engineering/PyAtCoder | 4cc88d2a43991a5202cd71b48be0e936fb32137e | b008e86523c6d500beec53344172311872d50ff4 | refs/heads/master | 2020-09-15T08:39:17.698396 | 2020-04-24T05:55:02 | 2020-04-24T05:55:02 | 223,397,683 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 324 | py | #coding: utf-8
import math
import heapq
import bisect
import numpy as np
from collections import Counter, deque
#from scipy.misc import comb
N,T = map(int, input().split())
t = list(map(int, input().split()))
ans = 0
for i in range(N-1):
if t[i+1] < t[i]+T: ans += (t[i+1]-t[i])
else: ans += T
ans += T
print(ans) | [
"sakiharu31415@yahoo.co.jp"
] | sakiharu31415@yahoo.co.jp |
28e05c285d256dc10661cfb3388091b923e9de6d | 7d45be20297033536b16a139a017bcec0e8880dc | /13.py | db9a4c6a5a31b3d4c73ad3543b8f6f9b8a9ecd56 | [] | no_license | BhagyashreeKarale/dichackathon | f6e199e65eaddaaa5f1594c444c469d9359e94c3 | 3aba9caaf26f96f70f8b5315358a3dfcf5264da3 | refs/heads/main | 2023-08-10T11:18:31.101163 | 2021-09-11T18:56:17 | 2021-09-11T18:56:17 | 405,456,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | # Q16.Write a Python program to map two lists into a dictionary.
l1=[1,2,3,4,5,6,7]
l2=["riya","ankita","rahul","priya","anshika","rose","aarti"]
dic={}
for i in range(len(l1)):
dic[(l1[i])]=l2[i]
print(dic)
#using zip function
dic=dict(zip(l1,l2))
print(dic) | [
"noreply@github.com"
] | BhagyashreeKarale.noreply@github.com |
37c6d17dfd9ccc0f2d0c1c121cb6154050ccd519 | 22251b1ada3681204a1f4e75ce6f42ca94b9b939 | /api/migrations/0001_initial.py | 9fdce3d738c81ba66205e2a797fe1cf73bde3d12 | [] | no_license | vipin-s0106/Basic_Django_Angular_RestAPI | 755dc79216642306205aad2cdb2f47f310407437 | ce0afb721c8b04fc932a076b0509e36583d3728c | refs/heads/master | 2022-04-14T16:48:57.779977 | 2020-04-12T11:57:43 | 2020-04-12T11:57:43 | 255,039,751 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 581 | py | # Generated by Django 3.0.4 on 2020-04-11 10:28
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Movie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('movie_name', models.CharField(max_length=150)),
('caption_image', models.FileField(blank=True, null=True, upload_to='')),
],
),
]
| [
"vipin.s0106@gmail.com"
] | vipin.s0106@gmail.com |
2612ca728d330306f8e47ea5564a10249735f8b0 | b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1 | /tensorflow/python/estimator/canned/dnn_linear_combined.py | 103503382500fff9b56683b255782ae3d189f6e1 | [
"Apache-2.0"
] | permissive | uve/tensorflow | e48cb29f39ed24ee27e81afd1687960682e1fbef | e08079463bf43e5963acc41da1f57e95603f8080 | refs/heads/master | 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 | Apache-2.0 | 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null | UTF-8 | Python | false | false | 1,383 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""dnn_linear_combined python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_estimator.python.estimator.canned import dnn_linear_combined
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
dnn_linear_combined.__all__ = [
s for s in dir(dnn_linear_combined) if not s.startswith('__')
]
from tensorflow_estimator.python.estimator.canned.dnn_linear_combined import *
| [
"v-grniki@microsoft.com"
] | v-grniki@microsoft.com |
2529cc61a7d88aa268a9a9af8cf9b21c8ad0bc54 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_118/2411.py | a9582de00a37f3252cf64204b04fe4ab4faf23f1 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | #!/usr/bin/python
f = open("C-small-attempt0.in", "r")
T = int(f.readline())
def palindrome(num):
strNum = str(num)
l = len(strNum)
for i in range(l/2):
if strNum[i] != strNum[l-i-1]:
return False
return True
for t in range(1, T+1):
lb, ub = map(int, f.readline().strip().split())
c = 0
for i in range(lb, ub+1):
if palindrome(i):
root = int(i**0.5)
if root*root == i and palindrome(root):
c += 1
print "Case #{0}: {1}".format(t, c)
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
a0219b69915ddc78aefd7ec3b24915e0dbaa14fd | 08a415f16ce22a6d351181aebaa402753a41588a | /antgo/automl/suggestion/searchspace/branch.py | d7a6a4a2e4a82844cfa5bbf4c18b2d2e8879a65c | [] | no_license | Hislocked/antgo | a054c4cdaab00bf71ca6130de9e9f8b0be049d01 | 77cbd07cfecea48359361fa258139a2de57e1647 | refs/heads/master | 2020-04-19T10:26:11.363330 | 2019-01-28T10:04:12 | 2019-01-28T10:04:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45,249 | py | # -*- coding: UTF-8 -*-
# @Time : 2019/1/11 1:47 PM
# @File : branch.py
# @Author : jian<jian@mltalker.com>
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from antgo.automl.basestublayers import *
import random
class Branch(StubLayer):
def __init__(self, input=None, output=None, **kwargs):
super(Branch, self).__init__(input, output, **kwargs)
self.branch_name = kwargs.get('branch_name', '')
class DummyNode(object):
def __init__(self, shape, id=-1):
self.shape = shape
self.id = id
class ConvBnBranch(Branch):
def __init__(self, output_channel, input=None, output=None, **kwargs):
super(ConvBnBranch, self).__init__(input, output, **kwargs)
self.layer_name = 'convbn_branch'
self.output_channel = output_channel
self.layer_1 = BaseStubConv2d(None,
self.output_channel,
1,
1,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.layer_2 = BaseStubBatchNormalization2d(cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.layer_3 = BaseStubReLU(cell_name=self.cell_name,
block_name=self.block_name,
group=self)
@property
def output_shape(self):
self.layer_1.input = self.input
self.layer_2.input = DummyNode(self.layer_1.output_shape)
self.layer_3.input = DummyNode(self.layer_2.output_shape)
return self.layer_3.output_shape
def flops(self):
self.layer_1.input = self.input
self.layer_2.input = DummyNode(self.layer_1.output_shape)
self.layer_3.input = DummyNode(self.layer_2.output_shape)
return self.layer_1.flops() + self.layer_2.flops() + self.layer_3.flops()
def __call__(self, *args, **kwargs):
# layer_1_c = self.layer_factory.conv2d(None, self.output_channel, 1, 1, cell_name=self.cell_name, block_name=self.block_name)
# layer_1 = layer_1_c(*args, **kwargs)
#
# layer_2_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# layer_2 = layer_2_c(layer_1)
#
# layer_3_c = self.layer_factory.relu(cell_name=self.cell_name, block_name=self.block_name)
# layer_3 = layer_3_c(layer_2)
# return layer_3
layer_1_tensor = self.layer_1(*args, **kwargs)
layer_2_tensor = self.layer_2(layer_1_tensor)
layer_3_tensor = self.layer_3(layer_2_tensor)
return layer_3_tensor
@property
def layer_type_encoder(self):
# 0 ~ 0.1
return 0.05
class SeperableConvBranch(Branch):
def __init__(self, output_channel, input=None, output=None, **kwargs):
super(SeperableConvBranch, self).__init__(input, output, **kwargs)
self.layer_name = 'seperableconv_branch'
# 3x3 atrous separable convolution
# rate 1,3,6,9,12,15,18,21
if 'rate_h' not in kwargs:
rate_list = [1, 3, 6, 9, 12, 15, 18, 21]
self.rate_h_index = random.randint(0, len(rate_list) - 1)
self.rate_h = rate_list[self.rate_h_index]
self.rate_w_index = random.randint(0, len(rate_list) - 1)
self.rate_w = rate_list[self.rate_w_index]
else:
self.rate_h = kwargs['rate_h']
self.rate_w = kwargs['rate_w']
self.rate_h_index = kwargs['rate_h_index']
self.rate_w_index = kwargs['rate_w_index']
self.output_channel = output_channel
self.layer_1 = BaseStubSeparableConv2d(input_channel=None,
filters=self.output_channel,
kernel_size_h=3,
kernel_size_w=3,
rate_h=self.rate_h,
rate_w=self.rate_w,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.layer_2 = BaseStubBatchNormalization2d(cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.layer_3 = BaseStubReLU(cell_name=self.cell_name,
block_name=self.block_name,
group=self)
@property
def output_shape(self):
self.layer_1.input = self.input
self.layer_2.input = DummyNode(self.layer_1.output_shape)
self.layer_3.input = DummyNode(self.layer_2.output_shape)
return self.layer_3.output_shape
def flops(self):
self.layer_1.input = self.input
self.layer_2.input = DummyNode(self.layer_1.output_shape)
self.layer_3.input = DummyNode(self.layer_2.output_shape)
return self.layer_1.flops() + self.layer_2.flops() + self.layer_3.flops()
def __call__(self, *args, **kwargs):
# layer_1_c = self.layer_factory.separable_conv2d(input_channel=None,
# filters=self.output_channel,
# kernel_size_h=3,
# kernel_size_w=3,
# rate_h=self.rate_h,
# rate_w=self.rate_w,
# cell_name=self.cell_name,
# block_name=self.block_name)
# layer_1 = layer_1_c(*args, **kwargs)
#
# layer_2_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# layer_2 = layer_2_c(layer_1)
#
# layer_3_c = self.layer_factory.relu(cell_name=self.cell_name, block_name=self.block_name)
# layer_3 = layer_3_c(layer_2)
layer_1_tensor = self.layer_1(*args, **kwargs)
layer_2_tensor = self.layer_2(layer_1_tensor)
layer_3_tensor = self.layer_3(layer_2_tensor)
return layer_3_tensor
@property
def layer_type_encoder(self):
# 0.1 ~ 0.2
return (self.rate_h_index * 8 + self.rate_w_index) / 720.0 + 0.1
class SPPBranch(Branch):
def __init__(self, input=None, output=None, **kwargs):
super(SPPBranch, self).__init__(input, output, **kwargs)
# spatial pyramid pooling
# shape = clone_graph.node_list[output_node_id].shape
# min_hw = min(shape[1], shape[2])
self.layer_name = 'spp_branch'
if 'grid_h' not in kwargs:
gh = [1, 2, 4, 8]
# gh = [n for n in gh if n < min_hw]
self.grid_h_index = random.randint(0, len(gh) - 1)
self.grid_h = gh[self.grid_h_index]
gw = [1, 2, 4, 8]
# gw = [n for n in gw if n < min_hw]
self.grid_w_index = random.randint(0, len(gw) - 1)
self.grid_w = gw[self.grid_w_index]
else:
self.grid_h = kwargs['grid_h']
self.grid_w = kwargs['grid_w']
self.grid_h_index = kwargs['grid_h_index']
self.grid_w_index = kwargs['grid_w_index']
self.layer_1 = BaseStubSPP(grid_h=self.grid_h,
grid_w=self.grid_w,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
@property
def output_shape(self):
self.layer_1.input = self.input
return self.layer_1.output_shape
def flops(self):
self.layer_1.input = self.input
return self.layer_1.flops()
def __call__(self, *args, **kwargs):
# layer_1_c = self.layer_factory.spp(grid_h=self.grid_h,
# grid_w=self.grid_w,
# cell_name=self.cell_name,
# block_name=self.block_name)
# layer_1_c.input = self.input
# layer_1 = layer_1_c(*args, **kwargs)
self.layer_1.input = self.input
layer_1_tensor = self.layer_1(*args, **kwargs)
return layer_1_tensor
@property
def layer_type_encoder(self):
# 0.2 ~ 0.3
return (self.grid_h_index * 4 + self.grid_w_index) / 200.0 + 0.2
class FocusBranch(Branch):
def __init__(self, output_channel, input=None, output=None, **kwargs):
super(FocusBranch, self).__init__(input, output, **kwargs)
self.layer_name = 'focus_branch'
self.output_channel = output_channel
if 'rate_list' not in kwargs:
candidate_rate_list = [1, 2, 4, 6, 8]
self.rate_list = random.sample(candidate_rate_list, 3)
self.rate_list = sorted(self.rate_list)
else:
self.rate_list = kwargs['rate_list']
self.group_1_conv = BaseStubSeparableConv2d(input_channel=None,
filters=self.output_channel,
kernel_size_h=3,
kernel_size_w=3,
rate_h=self.rate_list[0],
rate_w=self.rate_list[0],
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_1_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name,block_name=self.block_name,group=self)
self.group_1_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name,group=self)
self.group_2_conv = BaseStubSeparableConv2d(input_channel=None,
filters=self.output_channel,
kernel_size_h=3,
kernel_size_w=3,
rate_h=self.rate_list[1],
rate_w=self.rate_list[1],
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_2_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name,block_name=self.block_name,group=self)
self.group_2_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_12_add = BaseStubAdd(group=self)
self.group_3_conv = BaseStubSeparableConv2d(input_channel=None,
filters=self.output_channel,
kernel_size_h=3,
kernel_size_w=3,
rate_h=self.rate_list[2],
rate_w=self.rate_list[2],
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_3_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name,block_name=self.block_name,group=self)
self.group_3_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_4_12_add = BaseStubAdd(group=self)
self.group_4_123_add = BaseStubAdd(group=self)
@property
def output_shape(self):
return (self.input.shape[0], self.input.shape[1], self.input.shape[2], self.output_channel)
def flops(self):
self.group_1_conv.input = self.input
self.group_1_bn.input = DummyNode(self.group_1_conv.output_shape)
self.group_1_relu.input = DummyNode(self.group_1_bn.output_shape)
self.group_2_conv.input = DummyNode(self.group_1_relu.output_shape)
self.group_2_bn.input = DummyNode(self.group_2_conv.output_shape)
self.group_2_relu.input = DummyNode(self.group_2_bn.output_shape)
self.group_12_add.input = [DummyNode(self.group_1_relu.output_shape), DummyNode(self.group_2_relu.output_shape)]
self.group_3_conv.input = DummyNode(self.group_12_add.output_shape)
self.group_3_bn.input = DummyNode(self.group_3_conv.output_shape)
self.group_3_relu.input = DummyNode(self.group_3_bn.output_shape)
self.group_4_12_add.input = [DummyNode(self.group_1_relu.output_shape), DummyNode(self.group_2_relu.output_shape)]
self.group_4_123_add.input = [DummyNode(self.group_3_relu.output_shape), DummyNode(self.group_4_12_add.output_shape)]
return self.group_1_conv.flops() + \
self.group_1_bn.flops() + \
self.group_1_relu.flops() +\
self.group_2_conv.flops() + \
self.group_2_bn.flops() +\
self.group_2_relu.flops() +\
self.group_12_add.flops()+ \
self.group_3_conv.flops()+\
self.group_3_bn.flops()+\
self.group_3_relu.flops()+\
self.group_4_12_add.flops()+\
self.group_4_123_add.flops()
def __call__(self, *args, **kwargs):
# # group 1
# group_1_conv_c = self.layer_factory.separable_conv2d(input_channel=None,
# filters=self.output_channel,
# kernel_size_h=3,
# kernel_size_w=3,
# rate_h=self.rate_list[0],
# rate_w=self.rate_list[0],
# cell_name=self.cell_name,
# block_name=self.block_name)
# group_1_conv = group_1_conv_c(*args, **kwargs)
#
# group_1_bn_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# group_1_bn = group_1_bn_c(group_1_conv)
#
# group_1_relu_c = self.layer_factory.relu(cell_name=self.cell_name, block_name=self.block_name)
# group_1_relu = group_1_relu_c(group_1_bn)
#
# # group 2
# group_2_conv_c = self.layer_factory.separable_conv2d(input_channel=None,
# filters=self.output_channel,
# kernel_size_h=3,
# kernel_size_w=3,
# rate_h=self.rate_list[1],
# rate_w=self.rate_list[1],
# cell_name=self.cell_name,
# block_name=self.block_name)
# group_2_conv = group_2_conv_c(group_1_relu)
#
# group_2_bn_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# group_2_bn = group_2_bn_c(group_2_conv)
#
# group_2_relu_c = self.layer_factory.relu(cell_name=self.cell_name, block_name=self.block_name)
# group_2_relu = group_2_relu_c(group_2_bn)
#
# group_12_add_c = self.layer_factory.add(cell_name=self.cell_name, block_name=self.block_name)
# group_12_add = group_12_add_c(*[[group_1_relu, group_2_relu]])
#
# # group 3
# group_3_conv_c = self.layer_factory.separable_conv2d(input_channel=None,
# filters=self.output_channel,
# kernel_size_h=3,
# kernel_size_w=3,
# rate_h=self.rate_list[2],
# rate_w=self.rate_list[2],
# cell_name=self.cell_name,
# block_name=self.block_name)
# group_3_conv = group_3_conv_c(group_12_add)
#
# group_3_bn_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# group_3_bn = group_3_bn_c(group_3_conv)
#
# group_3_relu_c = self.layer_factory.relu(cell_name=self.cell_name, block_name=self.block_name)
# group_3_relu = group_3_relu_c(group_3_bn)
#
# group_4_12_add_c = self.layer_factory.add(cell_name=self.cell_name, block_name=self.block_name)
# group_4_12_add = group_4_12_add_c(*[[group_1_relu, group_2_relu]])
#
# group_4_123_add_c = self.layer_factory.add(cell_name=self.cell_name, block_name=self.block_name)
# group_4_123_add = group_4_123_add_c(*[[group_4_12_add, group_3_relu]])
group_1_conv_tensor = self.group_1_conv(*args, **kwargs)
group_1_bn_tensor = self.group_1_bn(group_1_conv_tensor)
group_1_relu_tensor = self.group_1_relu(group_1_bn_tensor)
group_2_conv_tensor = self.group_2_conv(group_1_relu_tensor)
group_2_bn_tensor = self.group_2_bn(group_2_conv_tensor)
group_2_relu_tensor = self.group_2_relu(group_2_bn_tensor)
group_12_add = self.group_12_add(*[[group_1_relu_tensor, group_2_relu_tensor]])
group_3_conv_tensor = self.group_3_conv(group_12_add)
group_3_bn_tensor = self.group_3_bn(group_3_conv_tensor)
group_3_relu_tensor = self.group_3_relu(group_3_bn_tensor)
group_4_12_add_tensor = self.group_4_12_add(*[[group_1_relu_tensor, group_2_relu_tensor]])
group_4_123_add_tensor = self.group_4_123_add(*[[group_4_12_add_tensor, group_3_relu_tensor]])
return group_4_123_add_tensor
@property
def layer_type_encoder(self):
# 0.3 ~ 0.4
a, b, c = self.rate_list
a_i = -1
b_i = -1
c_i = -1
for i, m in enumerate(self.rate_list):
if m == a:
a_i = i
if m == b:
b_i = i
if m == c:
c_i = i
return (a_i * 25 + b_i * 5 + c_i) / 690.0 + 0.3
class SEBranch(Branch):
def __init__(self, input=None, output=None, **kwargs):
super(SEBranch, self).__init__(input,output,**kwargs)
self.layer_name = 'se_branch'
if 'squeeze_channels' not in kwargs:
candidate_squeeze_channels = [4, 8, 16]
self.squeeze_channels = random.choice(candidate_squeeze_channels)
else:
self.squeeze_channels = kwargs['squeeze_channels']
self.group_1 = BaseStubAvgPooling2d(kernel_size_h=None, kernel_size_w=None, group=self)
self.group_1_conv = BaseStubConv2d(input_channel=None,
filters=self.squeeze_channels,
kernel_size_h=1,
kernel_size_w=1,
rate_h=1,
rate_w=1,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_1_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_1_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_2_conv = BaseStubConv2d(input_channel=None,
filters=None,
kernel_size_h=1,
kernel_size_w=1,
rate_h=1,
rate_w=1,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_2_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_3_sigmoid = BaseStubSigmoid(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_4_multiply = BaseStubDot(cell_name=self.cell_name, block_name=self.block_name, group=self)
@property
def output_shape(self):
self.group_1.input = self.input
self.group_1.kernel_size_h = self.input.shape[1]
self.group_1.kernel_size_w = self.input.shape[2]
self.group_1_conv.input = DummyNode(self.group_1.output_shape)
self.group_1_bn.input = DummyNode(self.group_1_conv.output_shape)
self.group_1_relu.input = DummyNode(self.group_1_bn.output_shape)
self.group_2_conv.input = DummyNode(self.group_1_relu.output_shape)
self.group_2_conv.filters = self.input.shape[-1]
self.group_2_bn.input = DummyNode(self.group_2_conv.output_shape)
self.group_3_sigmoid.input = DummyNode(self.group_2_bn.output_shape)
self.group_4_multiply.input = [self.input, DummyNode(self.group_3_sigmoid.output_shape)]
return self.group_4_multiply.output_shape
def flops(self):
self.group_1.input = self.input
self.group_1.kernel_size_h = self.input.shape[1]
self.group_1.kernel_size_w = self.input.shape[2]
self.group_1_conv.input = DummyNode(self.group_1.output_shape)
self.group_1_bn.input = DummyNode(self.group_1_conv.output_shape)
self.group_1_relu.input = DummyNode(self.group_1_bn.output_shape)
self.group_2_conv.input = DummyNode(self.group_1_relu.output_shape)
self.group_2_conv.filters = self.input.shape[-1]
self.group_2_bn.input = DummyNode(self.group_2_conv.output_shape)
self.group_3_sigmoid.input = DummyNode(self.group_2_bn.output_shape)
self.group_4_multiply.input = [self.input, DummyNode(self.group_3_sigmoid.output_shape)]
return self.group_1.flops()+\
self.group_1_conv.flops()+\
self.group_1_bn.flops()+\
self.group_1_relu.flops()+\
self.group_2_conv.flops()+\
self.group_2_bn.flops()+\
self.group_3_sigmoid.flops()+\
self.group_4_multiply.flops()
def __call__(self, *args, **kwargs):
# group_1_layer_c = self.layer_factory.avg_pool2d(kernel_size_h=self.input.shape[1], kernel_size_w=self.input.shape[2])
# group_1_layer = group_1_layer_c(*args, **kwargs)
#
# group_1_conv_c = self.layer_factory.conv2d(None,
# filters=self.squeeze_channels,
# kernel_size_h=1,
# kernel_size_w=1,
# cell_name=self.cell_name,
# block_name=self.block_name
# )
# group_1_conv = group_1_conv_c(group_1_layer)
#
# group_1_bn_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# group_1_bn = group_1_bn_c(group_1_conv)
#
# group_1_relu_c = self.layer_factory.relu(cell_name=self.cell_name, block_name=self.block_name)
# group_1_relu = group_1_relu_c(group_1_bn)
#
# group_2_conv_c = self.layer_factory.conv2d(None,
# filters=self.input.shape[-1],
# kernel_size_h=1,
# kernel_size_w=1,
# rate_h=1,
# rate_w=1,
# cell_name=self.cell_name,
# block_name=self.block_name)
# group_2_conv = group_2_conv_c(group_1_relu)
#
# group_2_bn_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# group_2_bn = group_2_bn_c(group_2_conv)
#
# group_3_sigmoid_c = self.layer_factory.sigmoid(cell_name=self.cell_name, block_name=self.block_name)
# group_3_sigmoid = group_3_sigmoid_c(group_2_bn)
#
# group_4_multiply_c = self.layer_factory.dot(cell_name=self.cell_name, block_name=self.block_name)
# group_4_multiply = group_4_multiply_c(*[[group_3_sigmoid, args[0]]], **kwargs)
self.group_1.kernel_size_h = self.input.shape[1]
self.group_1.kernel_size_w = self.input.shape[2]
group_1_tensor = self.group_1(*args, **kwargs)
group_1_conv_tensor = self.group_1_conv(group_1_tensor)
group_1_bn_tensor = self.group_1_bn(group_1_conv_tensor)
group_1_relu_tensor = self.group_1_relu(group_1_bn_tensor)
self.group_2_conv.filters = self.input.shape[-1]
group_2_conv_tensor = self.group_2_conv(group_1_relu_tensor)
group_2_bn_tensor = self.group_2_bn(group_2_conv_tensor)
group_3_sigmoid_tensor = self.group_3_sigmoid(group_2_bn_tensor)
group_4_multiply_tensor = self.group_4_multiply(*[[group_3_sigmoid_tensor, args[0]]], **kwargs)
return group_4_multiply_tensor
@property
def layer_type_encoder(self):
# 0.4 ~ 0.5
if self.squeeze_channels == 4:
return 0.43
elif self.squeeze_channels == 8:
return 0.46
else:
return 0.49
class RegionSEBranch(Branch):
def __init__(self, input=None, output=None, **kwargs):
super(RegionSEBranch, self).__init__(input, output, **kwargs)
self.layer_name = 'regionse_branch'
if 'squeeze_channels' not in kwargs:
candidate_squeeze_channels = [4, 8, 16]
self.squeeze_channels = random.choice(candidate_squeeze_channels)
else:
self.squeeze_channels = kwargs['squeeze_channels']
if 'region_size' not in kwargs:
candidate_region_sizes = [2, 4, 6, 8]
self.region_size = random.choice(candidate_region_sizes)
else:
self.region_size = kwargs['region_size']
self.group_1 = BaseStubAvgPooling2d(kernel_size_h=self.region_size, kernel_size_w=self.region_size, group=self)
self.group_1_conv = BaseStubConv2d(input_channel=None,
filters=self.squeeze_channels,
kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_1_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_1_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_2_conv = BaseStubConv2d(input_channel=None,
filters=None,
kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_2_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_3_sigmoid = BaseStubSigmoid(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_resize = BaseStubBilinearResize(height=None, width=None, group=self)
self.group_4_multiply = BaseStubDot(cell_name=self.cell_name, block_name=self.block_name, group=self)
@property
def output_shape(self):
self.group_1.input = self.input
self.group_1_conv.input = DummyNode(self.group_1.output_shape)
self.group_1_bn.input = DummyNode(self.group_1_conv.output_shape)
self.group_1_relu.input = DummyNode(self.group_1_bn.output_shape)
self.group_2_conv.input = DummyNode(self.group_1_relu.output_shape)
self.group_2_conv.filters = self.input.shape[-1]
self.group_2_bn.input = DummyNode(self.group_2_conv.output_shape)
self.group_3_sigmoid.input = DummyNode(self.group_2_bn.output_shape)
self.group_resize.input = DummyNode(self.group_3_sigmoid.output_shape)
self.group_resize.height = self.input.shape[1]
self.group_resize.width = self.input.shape[2]
self.group_4_multiply.input = [self.input, DummyNode(self.group_resize.output_shape)]
return self.group_4_multiply.output_shape
def flops(self):
self.group_1.input = self.input
self.group_1_conv.input = DummyNode(self.group_1.output_shape)
self.group_1_bn.input = DummyNode(self.group_1_conv.output_shape)
self.group_1_relu.input = DummyNode(self.group_1_bn.output_shape)
self.group_2_conv.input = DummyNode(self.group_1_relu.output_shape)
self.group_2_conv.filters = self.input.shape[-1]
self.group_2_bn.input = DummyNode(self.group_2_conv.output_shape)
self.group_3_sigmoid.input = DummyNode(self.group_2_bn.output_shape)
self.group_resize.input = DummyNode(self.group_3_sigmoid.output_shape)
self.group_resize.height = self.input.shape[1]
self.group_resize.width = self.input.shape[2]
self.group_4_multiply.input = [self.input, DummyNode(self.group_resize.output_shape)]
return self.group_1.flops()+\
self.group_1_conv.flops()+\
self.group_1_bn.flops()+\
self.group_1_relu.flops()+\
self.group_2_conv.flops()+\
self.group_2_bn.flops()+\
self.group_3_sigmoid.flops()+ \
self.group_resize.flops()+\
self.group_4_multiply.flops()
def __call__(self, *args, **kwargs):
# group_1_layer_c = self.layer_factory.avg_pool2d(kernel_size_h=self.region_size, kernel_size_w=self.region_size)
# group_1_layer = group_1_layer_c(*args, **kwargs)
#
# group_1_conv_c = self.layer_factory.conv2d(None,
# filters=self.squeeze_channels,
# kernel_size_h=3,
# kernel_size_w=3,
# cell_name=self.cell_name,
# block_name=self.block_name
# )
# group_1_conv = group_1_conv_c(group_1_layer)
#
# group_1_bn_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# group_1_bn = group_1_bn_c(group_1_conv)
#
# group_1_relu_c = self.layer_factory.relu(cell_name=self.cell_name, block_name=self.block_name)
# group_1_relu = group_1_relu_c(group_1_bn)
#
# group_2_conv_c = self.layer_factory.conv2d(None,
# filters=self.input.shape[-1],
# kernel_size_h=3,
# kernel_size_w=3,
# cell_name=self.cell_name,
# block_name=self.block_name)
# group_2_conv = group_2_conv_c(group_1_relu)
#
# group_2_bn_c = self.layer_factory.bn2d(cell_name=self.cell_name, block_name=self.block_name)
# group_2_bn = group_2_bn_c(group_2_conv)
#
# group_3_sigmoid_c = self.layer_factory.sigmoid(cell_name=self.cell_name, block_name=self.block_name)
# group_3_sigmoid = group_3_sigmoid_c(group_2_bn)
#
# group_resize_c = self.layer_factory.bilinear_resize(height=self.input.shape[1], width=self.input.shape[2])
# group_resize = group_resize_c(group_3_sigmoid)
#
# group_4_multiply_c = self.layer_factory.dot(cell_name=self.cell_name, block_name=self.block_name)
# group_4_multiply = group_4_multiply_c(*[[group_resize, args[0]]], **kwargs)
group_1_tensor = self.group_1(*args, **kwargs)
group_1_conv_tensor = self.group_1_conv(group_1_tensor)
group_1_bn_tensor = self.group_1_bn(group_1_conv_tensor)
group_1_relu_tensor = self.group_1_relu(group_1_bn_tensor)
self.group_2_conv.filters = self.input.shape[-1]
group_2_conv_tensor = self.group_2_conv(group_1_relu_tensor)
group_2_bn_tensor = self.group_2_bn(group_2_conv_tensor)
group_3_sigmoid_tensor = self.group_3_sigmoid(group_2_bn_tensor)
self.group_resize.height = self.input.shape[1]
self.group_resize.width = self.input.shape[2]
group_resize_tensor = self.group_resize(group_3_sigmoid_tensor)
group_4_multiply_tensor = self.group_4_multiply(*[[group_resize_tensor, args[0]]], **kwargs)
return group_4_multiply_tensor
@property
def layer_type_encoder(self):
# 0.4 ~ 0.5
# region_size: 2, 4, 6, 8; squeeze_channels: 4, 8, 16
sc_i = -1
for i, s in enumerate([4,8,16]):
if self.squeeze_channels == s:
sc_i = i
rs_i = -1
for j,r in enumerate([2,4,6,8]):
if self.region_size == r:
rs_i = j
return (sc_i * 4 + rs_i) / 160.0 + 0.4
class ResBranch(Branch):
def __init__(self, output_channel, input=None, output=None, **kwargs):
super(ResBranch, self).__init__(input, output, **kwargs)
self.layer_name = 'res_branch'
self.output_channel = output_channel
self.group_0_short_cut = None
self.group_1_conv = BaseStubConv2d(None,
self.output_channel,
3,
3,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_1_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_1_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_2_conv = BaseStubConv2d(None,
self.output_channel,
3,
3,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_2_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_3 = BaseStubAdd(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_4 = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
@property
def output_shape(self):
return (self.input.shape[0], self.input.shape[1], self.input.shape[2], self.output_channel)
def flops(self):
self.group_0_short_cut = None
if self.input.shape[-1] != self.output_channel:
self.group_0_short_cut = BaseStubConv2d(None, self.output_channel, 1, 1, cell_name=self.cell_name, block_name=self.block_name)
self.group_0_short_cut.input = self.input
self.group_1_conv.input = self.input
self.group_1_bn.input = DummyNode(self.group_1_conv.output_shape)
self.group_1_relu.input = DummyNode(self.group_1_bn.output_shape)
self.group_2_conv.input = DummyNode(self.group_1_relu.output_shape)
self.group_2_bn.input = DummyNode(self.group_2_conv.output_shape)
if self.input.shape[-1] != self.output_channel:
self.group_3.input = [DummyNode(self.group_0_short_cut.output_shape), DummyNode(self.group_2_bn.output_shape)]
else:
self.group_3.input = [self.input, DummyNode(self.group_2_bn.output_shape)]
self.group_4.input = DummyNode(self.group_3.output_shape)
total_flops = self.group_1_conv.flops() + \
self.group_1_bn.flops() +\
self.group_1_relu.flops() +\
self.group_2_conv.flops() +\
self.group_2_bn.flops() + \
self.group_3.flops() + \
self.group_4.flops()
if self.group_0_short_cut is not None:
total_flops += self.group_0_short_cut.flops()
return total_flops
def __call__(self, *args, **kwargs):
group_0_short_cut = None
if args[0].shape[-1] != self.output_channel:
group_0_short_cut_c = self.layer_factory.conv2d(None, self.output_channel, 1, 1, cell_name=self.cell_name, block_name=self.block_name)
group_0_short_cut = group_0_short_cut_c(*args, **kwargs)
group_1_conv_tensor = self.group_1_conv(*args, **kwargs)
group_1_bn_tensor = self.group_1_bn(group_1_conv_tensor)
group_1_relu_tensor = self.group_1_relu(group_1_bn_tensor)
group_2_conv_tensor = self.group_2_conv(group_1_relu_tensor)
group_2_bn_tensor = self.group_2_bn(group_2_conv_tensor)
group_3_tensor = None
if group_0_short_cut is None:
group_3_tensor = self.group_3(*[[group_2_bn_tensor, args[0]]], **kwargs)
else:
group_3_tensor = self.group_3(*[[group_2_bn_tensor, group_0_short_cut]], **kwargs)
group_4_tensor = self.group_4(group_3_tensor)
return group_4_tensor
@property
def layer_type_encoder(self):
# 0.5 ~ 0.6
return 0.55
class BottleNeckResBranch(Branch):
def __init__(self, output_channel, input=None, output=None, **kwargs):
super(BottleNeckResBranch, self).__init__(input, output, **kwargs)
self.layer_name = 'bottleneck_res_branch'
self.output_channel = output_channel
self.candidate_bottleneck = [8, 16, 32, 64]
if 'bottleneck' not in kwargs:
self.bottleneck = random.choice(self.candidate_bottleneck)
else:
self.bottleneck = kwargs['bottleneck']
self.group_0_short_cut = None
self.group_1_conv = BaseStubConv2d(None,
self.bottleneck,
1,
1,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_1_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name,group=self)
self.group_1_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_2_conv = BaseStubConv2d(None,
self.bottleneck,
3,
3,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_2_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name,group=self)
self.group_2_relu = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_3_conv = BaseStubConv2d(None,
self.output_channel,
1,
1,
cell_name=self.cell_name,
block_name=self.block_name,
group=self)
self.group_3_bn = BaseStubBatchNormalization2d(cell_name=self.cell_name, block_name=self.block_name,group=self)
self.group_4 = BaseStubAdd(cell_name=self.cell_name, block_name=self.block_name, group=self)
self.group_5 = BaseStubReLU(cell_name=self.cell_name, block_name=self.block_name, group=self)
@property
def output_shape(self):
return (self.input.shape[0], self.input.shape[1], self.input.shape[2], self.output_channel)
def flops(self):
self.group_0_short_cut = None
if self.input.shape[-1] != self.output_channel:
self.group_0_short_cut = BaseStubConv2d(None,
self.output_channel,
1,
1,
cell_name=self.cell_name,
block_name=self.block_name)
self.group_0_short_cut.input = self.input
self.group_1_conv.input = self.input
self.group_1_bn.input = DummyNode(self.group_1_conv.output_shape)
self.group_1_relu.input = DummyNode(self.group_1_bn.output_shape)
self.group_2_conv.input = DummyNode(self.group_1_relu.output_shape)
self.group_2_bn.input = DummyNode(self.group_2_conv.output_shape)
self.group_2_relu.input = DummyNode(self.group_2_bn.output_shape)
self.group_3_conv.input = DummyNode(self.group_2_relu.output_shape)
self.group_3_bn.input = DummyNode(self.group_3_conv.output_shape)
if self.group_0_short_cut is not None:
self.group_4.input = [DummyNode(self.group_0_short_cut.output_shape), DummyNode(self.group_3_bn.output_shape)]
else:
self.group_4.input = [self.input, DummyNode(self.group_3_bn.output_shape)]
self.group_5.input = DummyNode(self.group_4.output_shape)
total_flops = self.group_1_conv.flops() + \
self.group_1_bn.flops() + \
self.group_1_relu.flops() + \
self.group_2_conv.flops() + \
self.group_2_bn.flops() + \
self.group_2_relu.flops() + \
self.group_3_conv.flops() + \
self.group_3_bn.flops() +\
self.group_4.flops() + \
self.group_5.flops()
if self.group_0_short_cut is not None:
total_flops += self.group_0_short_cut.flops()
return total_flops
def __call__(self, *args, **kwargs):
group_0_short_cut = None
if args[0].shape[-1] != self.output_channel:
group_0_short_cut_c = self.layer_factory.conv2d(None,
self.output_channel,
1,
1,
cell_name=self.cell_name,
block_name=self.block_name)
group_0_short_cut = group_0_short_cut_c(*args, **kwargs)
group_1_conv_tensor = self.group_1_conv(*args, **kwargs)
group_1_bn_tensor = self.group_1_bn(group_1_conv_tensor)
group_1_relu_tensor = self.group_1_relu(group_1_bn_tensor)
group_2_conv_tensor = self.group_2_conv(group_1_relu_tensor)
group_2_bn_tensor = self.group_2_bn(group_2_conv_tensor)
group_2_relu_tensor = self.group_2_relu(group_2_bn_tensor)
group_3_conv_tensor = self.group_3_conv(group_2_relu_tensor)
group_3_bn_tensor = self.group_3_bn(group_3_conv_tensor)
group_4_tensor = None
if group_0_short_cut is None:
group_4_tensor = self.group_4(*[[group_3_bn_tensor, args[0]]], **kwargs)
else:
group_4_tensor = self.group_4(*[[group_3_bn_tensor, group_0_short_cut]], **kwargs)
group_5_tensor = self.group_5(group_4_tensor)
return group_5_tensor
@property
def layer_type_encoder(self):
# 0.5 ~ 0.6
mi = -1
for i, m in enumerate(self.candidate_bottleneck):
if self.bottleneck == m:
mi = i
return 0.5 + 0.1 / len(self.candidate_bottleneck) * mi
class PoolBranch(Branch):
def __init__(self, input=None, output=None, **kwargs):
super(PoolBranch, self).__init__(input, output, **kwargs)
# spatial pyramid pooling
# shape = clone_graph.node_list[output_node_id].shape
# min_hw = min(shape[1], shape[2])
self.layer_name = 'avg_branch'
self.is_avg_pool = False
if 'is_avg_pool' in kwargs:
self.is_avg_pool = kwargs['is_avg_pool']
if self.is_avg_pool:
self.layer_1 = BaseStubAvgPooling2d(kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name)
else:
self.layer_1 = BaseStubMaxPooling2d(kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name)
else:
if random.random() < 0.5:
self.layer_1 = BaseStubAvgPooling2d(kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name)
self.is_avg_pool = True
else:
self.layer_1 = BaseStubMaxPooling2d(kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name)
self.is_avg_pool = False
@property
def output_shape(self):
self.layer_1.input = self.input
return self.layer_1.output_shape
def flops(self):
self.layer_1.input = self.input
return self.layer_1.flops()
def __call__(self, *args, **kwargs):
if self.is_avg_pool:
layer_1_c = self.layer_factory.avg_pool2d(kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name)
layer_1_c.input = self.input
layer_1 = layer_1_c(*args, **kwargs)
return layer_1
else:
layer_1_c = self.layer_factory.max_pool2d(kernel_size_h=3,
kernel_size_w=3,
cell_name=self.cell_name,
block_name=self.block_name)
layer_1_c.input = self.input
layer_1 = layer_1_c(*args, **kwargs)
return layer_1
@property
def layer_type_encoder(self):
if self.is_avg_pool:
return 1 / 300.0
else:
return 2 / 300.0 | [
"jian.fbehind@gmail.com"
] | jian.fbehind@gmail.com |
ba4984a720c6e1306f796c62fb2630c6a574f05f | dcd3a08831759b4458a9bac4e44a7bbfac626dc1 | /python/protobufs/services/team/actions/join_team_pb2.py | 871af1cf3126313e5c2df2090091876645a639bf | [
"MIT"
] | permissive | getcircle/protobuf-registry | 433b6ad788831b34ccd86e2b42a3ec6606adc698 | 20ad8463b7ac6e2cf279c08bcd3e953993fe9153 | refs/heads/master | 2021-05-01T00:11:04.763067 | 2016-12-05T04:46:44 | 2016-12-05T04:46:44 | 27,981,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 3,602 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: protobufs/services/team/actions/join_team.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from protobufs.services.team import containers_pb2 as protobufs_dot_services_dot_team_dot_containers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='protobufs/services/team/actions/join_team.proto',
package='services.team.actions.join_team',
syntax='proto3',
serialized_pb=b'\n/protobufs/services/team/actions/join_team.proto\x12\x1fservices.team.actions.join_team\x1a(protobufs/services/team/containers.proto\"\x1c\n\tRequestV1\x12\x0f\n\x07team_id\x18\x01 \x01(\t\"D\n\nResponseV1\x12\x36\n\x06member\x18\x01 \x01(\x0b\x32&.services.team.containers.TeamMemberV1b\x06proto3'
,
dependencies=[protobufs_dot_services_dot_team_dot_containers__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_REQUESTV1 = _descriptor.Descriptor(
name='RequestV1',
full_name='services.team.actions.join_team.RequestV1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='team_id', full_name='services.team.actions.join_team.RequestV1.team_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=126,
serialized_end=154,
)
_RESPONSEV1 = _descriptor.Descriptor(
name='ResponseV1',
full_name='services.team.actions.join_team.ResponseV1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='member', full_name='services.team.actions.join_team.ResponseV1.member', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=156,
serialized_end=224,
)
_RESPONSEV1.fields_by_name['member'].message_type = protobufs_dot_services_dot_team_dot_containers__pb2._TEAMMEMBERV1
DESCRIPTOR.message_types_by_name['RequestV1'] = _REQUESTV1
DESCRIPTOR.message_types_by_name['ResponseV1'] = _RESPONSEV1
RequestV1 = _reflection.GeneratedProtocolMessageType('RequestV1', (_message.Message,), dict(
DESCRIPTOR = _REQUESTV1,
__module__ = 'protobufs.services.team.actions.join_team_pb2'
# @@protoc_insertion_point(class_scope:services.team.actions.join_team.RequestV1)
))
_sym_db.RegisterMessage(RequestV1)
ResponseV1 = _reflection.GeneratedProtocolMessageType('ResponseV1', (_message.Message,), dict(
DESCRIPTOR = _RESPONSEV1,
__module__ = 'protobufs.services.team.actions.join_team_pb2'
# @@protoc_insertion_point(class_scope:services.team.actions.join_team.ResponseV1)
))
_sym_db.RegisterMessage(ResponseV1)
# @@protoc_insertion_point(module_scope)
| [
"mwhahn@gmail.com"
] | mwhahn@gmail.com |
0e66bd19449a34a92382dfade04ca2dd9697c3f2 | f1a8e308c76866e2fba20401e6f1d5842dd60c46 | /Algorithms and Data Structures Practice/LeetCode Questions/Sorting/88. Merge Sorted Array.py | f673b376d6a5371cbd0f842e740c2aeb8490f3ec | [] | no_license | harman666666/Algorithms-Data-Structures-and-Design | 6e5da0c1f701e7dfc7b045ecd1209463131d3fc7 | 483f0c93faca8ccaf038b77ebe2fa712f6b0c6bc | refs/heads/master | 2021-07-14T10:11:27.588838 | 2021-07-07T01:47:42 | 2021-07-07T01:47:42 | 101,330,760 | 3 | 1 | null | 2018-10-15T04:52:07 | 2017-08-24T19:32:03 | Python | UTF-8 | Python | false | false | 2,274 | py | '''
88. Merge Sorted Array
Easy
2197
4153
Add to List
Share
Given two sorted integer arrays nums1 and nums2, merge nums2 into nums1 as one sorted array.
Note:
The number of elements initialized in nums1 and nums2 are m and n respectively.
You may assume that nums1 has enough space (size that is equal to m + n) to hold additional elements from nums2.
Example:
Input:
nums1 = [1,2,3,0,0,0], m = 3
nums2 = [2,5,6], n = 3
Output: [1,2,2,3,5,6]
Constraints:
-10^9 <= nums1[i], nums2[i] <= 10^9
nums1.length == m + n
nums2.length == n
Accepted
592,599
Submissions
1,514,076
'''
'''
MEMORIZE THE BEAUTIFUL WAY:
'''
def merge(self, nums1, m, nums2, n):
while m > 0 and n > 0:
if nums1[m-1] >= nums2[n-1]:
nums1[m+n-1] = nums1[m-1]
m -= 1
else:
nums1[m+n-1] = nums2[n-1]
n -= 1
if n > 0:
nums1[:n] = nums2[:n]
class Solution:
def merge(self, nums1: List[int], m: int, nums2: List[int], n: int) -> None:
"""
Do not return anything, modify nums1 in-place instead.
"""
# SWAP ALL THE ELEMENTS IN NUMS1 TO THE END FIRST!!!
end = len(nums1) - 1
sizeNums1 = len(nums1) - len(nums2)
swapPtr = sizeNums1 - 1
while swapPtr != -1:
nums1[swapPtr], nums1[end] = nums1[end], nums1[swapPtr]
swapPtr -= 1
end -= 1
print(nums1)
inPtr = 0
l = end + 1
r = 0
if len(nums2) == 0:
return nums1
while inPtr != len(nums1):
if r == len(nums2) and l == len(nums1):
return nums1
elif l == len(nums1):
nums2[r], nums1[inPtr] = nums1[inPtr], nums2[r]
r += 1
elif r == len(nums2):
nums1[l], nums1[inPtr] = nums1[inPtr], nums1[l]
l += 1
elif nums2[r] < nums1[l]:
nums2[r], nums1[inPtr] = nums1[inPtr], nums2[r]
r += 1
else:
nums1[l], nums1[inPtr] = nums1[inPtr], nums1[l]
l += 1
inPtr += 1
| [
"harman.j.singh@hotmail.com"
] | harman.j.singh@hotmail.com |
530dfcf02bcc8d889a76b628309b95f3fec8528f | 3a39e879fb2901207afcfc238b169ddefa104055 | /Chapter05/Docs/errbackspider/errbackspider/spiders/errback_spider.py | 9b55761a795c9081c4c163df0d07bfd92348f856 | [] | no_license | Synapses/Web_Scraping_with_Python | cb32ddd468250b9f11ad16d3576d0920693e708c | 3bb8cd47d0e1e182bb8ee800d32e24f45bf13ab0 | refs/heads/master | 2023-03-15T09:19:02.754593 | 2020-06-16T02:17:11 | 2020-06-16T02:17:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,852 | py | import scrapy
from scrapy.spidermiddlewares.httperror import HttpError
from twisted.internet.error import DNSLookupError
from twisted.internet.error import TimeoutError, TCPTimedOutError
class ErrbackSpider(scrapy.Spider):
name = "errback_example"
start_urls = [
"http://www.httpbin.org", # HTTP 200 expected
"http://www.httpbin.org/status/404", # Not found error
"http://www.httpbin.org/status/500", # server issue
"http://www.httpbin.org:12345", # non-responding host, timeout expected
"http://www.httphttpbinbin.org", # DNS error expected
]
def start_request(self):
for u in self.start_urls:
yield scrapy.Request(u, callback=self.parse_httpbin,
errback=self.errback_httpbin,
dont_filter=True)
def parse_httpbin(self, response):
self.logger.info('Got successful response from {}'.format(response.url))
# do something useful here...
def errback_httpbin(self, failure):
# Log all failures
self.logger.error(repr(failure))
# in case you want to do something special for some errors,
# you may need the failure's type:
if failure.check(HttpError):
# these exceptions come from HttpError spider middleware
# you can get the non-200 response
response = failure.value.response
self.logger.error('HttpError on %s', response.url)
elif failure.check(DNSLookupError):
# this is the original request
request = failure.request
self.logger.error('DNSLookupError on %s', request.url)
elif failure.check(TimeoutError, TCPTimedOutError):
request = failure.request
self.logger.error('TimeoutError on %s', request.url)
| [
"philip.dongfei@gmail.com"
] | philip.dongfei@gmail.com |
fcd82eee8042da29dd0e74e9c8fca7af2e9bcb0f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03015/s715773814.py | 9cd7cb639cfb7486945fee440b90b3bdbe4a21eb | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | def main():
MOD = 10 ** 9 + 7
EQ = 0 # 等しくなり得る
SMALL = 1 # 未満確定
S = map(int, input())
dp = [1, 0]
for x in S:
ndp = [0] * 2
if x == 0:
ndp[EQ] = dp[EQ] # (0,0)
ndp[SMALL] = dp[SMALL] * 3 # (0,0),(0,1),(1,0)
elif x == 1:
ndp[EQ] = dp[EQ] * 2 # (0,1),(1,0)
ndp[SMALL] = dp[EQ] + dp[SMALL] * 3 # EQ->(0,0), SMALL->(0,0),(0,1),(1,0)
*dp, = map(lambda x: x % MOD, ndp)
ans = sum(dp) % MOD # 取り忘れ
print(ans)
if __name__ == '__main__':
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
df675a7f387998c1c4b576a51598655b2afe8cf6 | 953a94489cd1970bfde33310c5796e6e3a2477c4 | /scripts/mgear/maya/shifter/component/leg_2jnt_freeTangents_01/guide.py | 041bdd863a1c284622123e7c9cead3d8270c14b1 | [
"MIT"
] | permissive | mottosso/mgear | 10c1d65dc0554cf03ac093089deeee84141fe30a | 4ec4e15484f33aa77c04d16a93485dcf5c80df41 | refs/heads/codeclimate | 2021-01-19T19:29:08.660736 | 2017-11-13T16:56:51 | 2017-11-13T16:56:51 | 101,194,256 | 4 | 3 | null | 2017-11-13T16:56:52 | 2017-08-23T15:10:28 | Python | UTF-8 | Python | false | false | 10,675 | py | # MGEAR is under the terms of the MIT License
# Copyright (c) 2016 Jeremie Passerin, Miquel Campos
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Author: Jeremie Passerin geerem@hotmail.com www.jeremiepasserin.com
# Author: Miquel Campos hello@miquel-campos.com www.miquel-campos.com
# Date: 2016 / 10 / 10
#############################################
# GLOBAL
#############################################
from functools import partial
# mgear
from mgear.maya.shifter.component.guide import ComponentGuide
import mgear.maya.transform as tra
#Pyside
from mgear.maya.shifter.component.guide import componentMainSettings
import mgear.maya.pyqt as gqt
from maya.app.general.mayaMixin import MayaQWidgetDockableMixin
from maya.app.general.mayaMixin import MayaQDockWidget
import settingsUI as sui
QtGui, QtCore, QtWidgets, wrapInstance = gqt.qt_import()
# guide info
AUTHOR = "Jeremie Passerin, Miquel Campos"
URL = "www.jeremiepasserin.com, www.miquel-campos.com"
EMAIL = "geerem@hotmail.com, hello@miquel-campos.com"
VERSION = [1,2,0]
TYPE = "leg_2jnt_freeTangents_01"
NAME = "leg"
DESCRIPTION = "Auto UPV. 2 bones leg with stretch, roundess, ik/fk...with classic maya roll. With Knee pin and only one central tangent"
##########################################################
# CLASS
##########################################################
class Guide(ComponentGuide):
compType = TYPE
compName = NAME
description = DESCRIPTION
author = AUTHOR
url = URL
email = EMAIL
version = VERSION
# =====================================================
##
# @param self
def postInit(self):
self.save_transform = ["root", "knee", "ankle", "eff"]
# =====================================================
## Add more object to the object definition list.
# @param self
def addObjects(self):
self.root = self.addRoot()
vTemp = tra.getOffsetPosition( self.root, [0,-3,0.1])
self.knee = self.addLoc("knee", self.root, vTemp)
vTemp = tra.getOffsetPosition( self.root, [0,-6,0])
self.ankle = self.addLoc("ankle", self.knee, vTemp)
vTemp = tra.getOffsetPosition( self.root, [0,-6, .5])
self.eff = self.addLoc("eff", self.ankle, vTemp)
centers = [self.root, self.knee, self.ankle, self.eff]
self.dispcrv = self.addDispCurve("crv", centers)
# =====================================================
## Add more parameter to the parameter definition list.
# @param self
def addParameters(self):
# Default Values
self.pBlend = self.addParam("blend", "double", 1, 0, 1)
self.pIkRefArray = self.addParam("ikrefarray", "string", "")
self.pUpvRefArray = self.addParam("upvrefarray", "string", "")
self.pUpvRefArray = self.addParam("pinrefarray", "string", "")
self.pMaxStretch = self.addParam("maxstretch", "double", 1.5 , 1, None)
self.pMirrorMid = self.addParam("mirrorMid", "bool", False)
# Divisions
self.pDiv0 = self.addParam("div0", "long", 2, 1, None)
self.pDiv1 = self.addParam("div1", "long", 2, 1, None)
# FCurves
self.pSt_profile = self.addFCurveParam("st_profile", [[0,0],[.5,-1],[1,0]])
self.pSq_profile = self.addFCurveParam("sq_profile", [[0,0],[.5,1],[1,0]])
self.pUseIndex = self.addParam("useIndex", "bool", False)
self.pParentJointIndex = self.addParam("parentJointIndex", "long", -1, None, None)
##########################################################
# Setting Page
##########################################################
class settingsTab(QtWidgets.QDialog, sui.Ui_Form):
def __init__(self, parent=None):
super(settingsTab, self).__init__(parent)
self.setupUi(self)
class componentSettings(MayaQWidgetDockableMixin, componentMainSettings):
def __init__(self, parent = None):
self.toolName = TYPE
# Delete old instances of the componet settings window.
gqt.deleteInstances(self, MayaQDockWidget)
super(self.__class__, self).__init__(parent = parent)
self.settingsTab = settingsTab()
self.setup_componentSettingWindow()
self.create_componentControls()
self.populate_componentControls()
self.create_componentLayout()
self.create_componentConnections()
def setup_componentSettingWindow(self):
self.mayaMainWindow = gqt.maya_main_window()
self.setObjectName(self.toolName)
self.setWindowFlags(QtCore.Qt.Window)
self.setWindowTitle(TYPE)
self.resize(280, 780)
def create_componentControls(self):
return
def populate_componentControls(self):
"""
Populate the controls values from the custom attributes of the component.
"""
#populate tab
self.tabs.insertTab(1, self.settingsTab, "Component Settings")
#populate component settings
self.settingsTab.ikfk_slider.setValue(int(self.root.attr("blend").get()*100))
self.settingsTab.ikfk_spinBox.setValue(int(self.root.attr("blend").get()*100))
self.settingsTab.maxStretch_spinBox.setValue(self.root.attr("maxstretch").get())
self.populateCheck(self.settingsTab.mirrorMid_checkBox, "mirrorMid")
self.settingsTab.div0_spinBox.setValue(self.root.attr("div0").get())
self.settingsTab.div1_spinBox.setValue(self.root.attr("div1").get())
ikRefArrayItems = self.root.attr("ikrefarray").get().split(",")
for item in ikRefArrayItems:
self.settingsTab.ikRefArray_listWidget.addItem(item)
upvRefArrayItems = self.root.attr("upvrefarray").get().split(",")
for item in upvRefArrayItems:
self.settingsTab.upvRefArray_listWidget.addItem(item)
pinRefArrayItems = self.root.attr("pinrefarray").get().split(",")
for item in pinRefArrayItems:
self.settingsTab.pinRefArray_listWidget.addItem(item)
def create_componentLayout(self):
self.settings_layout = QtWidgets.QVBoxLayout()
self.settings_layout.addWidget(self.tabs)
self.settings_layout.addWidget(self.close_button)
self.setLayout(self.settings_layout)
def create_componentConnections(self):
self.settingsTab.ikfk_slider.valueChanged.connect(partial(self.updateSlider, self.settingsTab.ikfk_slider, "blend"))
self.settingsTab.ikfk_spinBox.valueChanged.connect(partial(self.updateSlider, self.settingsTab.ikfk_spinBox, "blend"))
self.settingsTab.maxStretch_spinBox.valueChanged.connect(partial(self.updateSpinBox, self.settingsTab.maxStretch_spinBox, "maxstretch"))
self.settingsTab.div0_spinBox.valueChanged.connect(partial(self.updateSpinBox, self.settingsTab.div0_spinBox, "div0"))
self.settingsTab.div1_spinBox.valueChanged.connect(partial(self.updateSpinBox, self.settingsTab.div1_spinBox, "div1"))
self.settingsTab.squashStretchProfile_pushButton.clicked.connect(self.setProfile)
self.settingsTab.mirrorMid_checkBox.stateChanged.connect(partial(self.updateCheck, self.settingsTab.mirrorMid_checkBox, "mirrorMid"))
self.settingsTab.ikRefArrayAdd_pushButton.clicked.connect(partial(self.addItem2listWidget, self.settingsTab.ikRefArray_listWidget, "ikrefarray"))
self.settingsTab.ikRefArrayRemove_pushButton.clicked.connect(partial(self.removeSelectedFromListWidget, self.settingsTab.ikRefArray_listWidget, "ikrefarray"))
self.settingsTab.ikRefArray_copyRef_pushButton.clicked.connect(partial(self.copyFromListWidget, self.settingsTab.upvRefArray_listWidget, self.settingsTab.ikRefArray_listWidget, "ikrefarray"))
self.settingsTab.ikRefArray_listWidget.installEventFilter(self)
self.settingsTab.upvRefArrayAdd_pushButton.clicked.connect(partial(self.addItem2listWidget, self.settingsTab.upvRefArray_listWidget, "upvrefarray"))
self.settingsTab.upvRefArrayRemove_pushButton.clicked.connect(partial(self.removeSelectedFromListWidget, self.settingsTab.upvRefArray_listWidget, "upvrefarray"))
self.settingsTab.upvRefArray_copyRef_pushButton.clicked.connect(partial(self.copyFromListWidget, self.settingsTab.ikRefArray_listWidget, self.settingsTab.upvRefArray_listWidget, "upvrefarray"))
self.settingsTab.upvRefArray_listWidget.installEventFilter(self)
self.settingsTab.pinRefArrayAdd_pushButton.clicked.connect(partial(self.addItem2listWidget, self.settingsTab.pinRefArray_listWidget, "pinrefarray"))
self.settingsTab.pinRefArrayRemove_pushButton.clicked.connect(partial(self.removeSelectedFromListWidget, self.settingsTab.pinRefArray_listWidget, "pinrefarray"))
self.settingsTab.pinRefArray_copyRef_pushButton.clicked.connect(partial(self.copyFromListWidget, self.settingsTab.ikRefArray_listWidget, self.settingsTab.pinRefArray_listWidget, "pinrefarray"))
self.settingsTab.pinRefArray_listWidget.installEventFilter(self)
def eventFilter(self, sender, event):
if event.type() == QtCore.QEvent.ChildRemoved:
if sender == self.settingsTab.ikRefArray_listWidget:
self.updateListAttr(sender, "ikrefarray")
elif sender == self.settingsTab.upvRefArray_listWidget:
self.updateListAttr(sender, "upvrefarray")
elif sender == self.settingsTab.pinRefArray_listWidget:
self.updateListAttr(sender, "pinrefarray")
return True
else:
return QtWidgets.QDialog.eventFilter(self, sender, event)
def dockCloseEventTriggered(self):
gqt.deleteInstances(self, MayaQDockWidget)
| [
"miquel.campos@gmail.com"
] | miquel.campos@gmail.com |
fa890b0f406730ad6d9f53bbbeb35a89a1eba5c9 | a25acab883494fa90cccc7255cac67251b40a21d | /specific/anchor/utils.py | 9e18add0d1f25afe548b7ec6e5abe9ed6f489f2f | [] | no_license | csliuchang/PupaDetector | cd8d85ca0cdb236dae28b82cdac144e17ce8f76f | b88dfdfd3f52e1df7cd44b5e1d7086acbe1ec046 | refs/heads/master | 2023-08-12T13:05:19.796420 | 2021-09-17T08:54:28 | 2021-09-17T08:54:28 | 397,140,426 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,914 | py | import torch
def images_to_levels(target, num_levels):
"""Convert targets by image to targets by feature level.
[target_img0, target_img1] -> [target_level0, target_level1, ...]
"""
target = torch.stack(target, 0)
level_targets = []
start = 0
for n in num_levels:
end = start + n
level_targets.append(target[:, start:end])
start = end
return level_targets
def anchor_inside_flags(flat_anchors,
valid_flags,
img_shape,
allowed_border=0):
"""Check whether the anchors are inside the border.
Parameters
----------
flat_anchors : torch.Tensor
Flatten anchors, shape (n, 4).
valid_flags : torch.Tensor
An existing valid flags of anchors.
img_shape : tuple(int)
Shape of current image.
allowed_border : int, optional
The border to allow the valid anchor. Defaults to 0.
Returns
-------
torch.Tensor
Flags indicating whether the anchors are inside a
valid range.
"""
img_h, img_w = img_shape[:2]
if allowed_border >= 0:
inside_flags = valid_flags & \
(flat_anchors[:, 0] >= -allowed_border) & \
(flat_anchors[:, 1] >= -allowed_border) & \
(flat_anchors[:, 2] < img_w + allowed_border) & \
(flat_anchors[:, 3] < img_h + allowed_border)
else:
inside_flags = valid_flags
return inside_flags
def calc_region(bbox, ratio, featmap_size=None):
"""Calculate a proportional bbox region.
The bbox center are fixed and the new h' and w' is h * ratio and w * ratio.
Parameters
----------
bbox : Tensor
Bboxes to calculate regions, shape (n, 4).
ratio : float
Ratio of the output region.
featmap_size : tuple
Feature map size used for clipping the boundary.
Returns
-------
tuple
x1, y1, x2, y2
"""
x1 = torch.round((1 - ratio) * bbox[0] + ratio * bbox[2]).long()
y1 = torch.round((1 - ratio) * bbox[1] + ratio * bbox[3]).long()
x2 = torch.round(ratio * bbox[0] + (1 - ratio) * bbox[2]).long()
y2 = torch.round(ratio * bbox[1] + (1 - ratio) * bbox[3]).long()
if featmap_size is not None:
x1 = x1.clamp(min=0, max=featmap_size[1])
y1 = y1.clamp(min=0, max=featmap_size[0])
x2 = x2.clamp(min=0, max=featmap_size[1])
y2 = y2.clamp(min=0, max=featmap_size[0])
return (x1, y1, x2, y2)
def meshgrid(y, x):
"""Generate mesh grid of y and x.
Parameters
----------
y : torch.Tensor
Grids of y dimension.
x : torch.Tensor
Grids of x dimension.
Returns
-------
tuple[torch.Tensor]
The mesh grids of y and x.
"""
H = y.shape[0]
W = x.shape[0]
xx = x.repeat(H).view(H, W)
yy = y.view(-1, 1).repeat(1, W)
return yy, xx
| [
"598306303@qq.com"
] | 598306303@qq.com |
f3485f6e457870ef4a389aca1ae87f74e7857980 | 7b72e319c16fa66644a29a930e46a10c943ac533 | /flopy/modflow/mfpcg.py | 2eb540900fead397aebfcd83147bd61134f82845 | [
"BSD-3-Clause"
] | permissive | kwilcox/flopy | 58e9297ee6cb4cf95de8a57a5b338f9ff1b1cc61 | 527c4ee452ea779bdebd6c1c540452d145e26943 | refs/heads/master | 2020-12-01T11:42:01.608949 | 2015-01-28T19:03:55 | 2015-01-28T19:03:55 | 28,347,156 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,967 | py | from flopy.mbase import Package
class ModflowPcg(Package):
'''Pcg Package
Only programmed to work with the default values; may need work for other options'''
def __init__(self, model, mxiter=50, iter1=30, npcond=1,
hclose=1e-5, rclose=1e-5, relax=1.0, nbpol=0, iprpcg=0, mutpcg=3,
damp=1.0, dampt=1.0, ihcofadd=0,
extension='pcg', unitnumber=27):
Package.__init__(self, model, extension, 'PCG', unitnumber) # Call ancestor's init to set self.parent, extension, name and unit number
self.heading = '# PCG for MODFLOW, generated by Flopy.'
self.url = 'pcg.htm'
self.mxiter = mxiter
self.iter1 = iter1
self.npcond = npcond
self.hclose = hclose
self.rclose = rclose
self.relax = relax
self.nbpol = nbpol
self.iprpcg = iprpcg
self.mutpcg = mutpcg
self.damp = damp
self.dampt = dampt
self.ihcofadd = ihcofadd
self.parent.add_package(self)
def __repr__( self ):
return 'Preconditioned conjugate gradient solver package class'
def write_file(self):
# Open file for writing
f_pcg = open(self.fn_path, 'w')
f_pcg.write('%s\n' % self.heading)
ifrfm = self.parent.get_ifrefm()
if ifrfm:
f_pcg.write('{0} '.format(self.mxiter))
f_pcg.write('{0} '.format(self.iter1))
f_pcg.write('{0} '.format(self.npcond))
f_pcg.write('{0} '.format(self.ihcofadd))
f_pcg.write('\n')
f_pcg.write('{0} '.format(self.hclose))
f_pcg.write('{0} '.format(self.rclose))
f_pcg.write('{0} '.format(self.relax))
f_pcg.write('{0} '.format(self.nbpol))
f_pcg.write('{0} '.format(self.iprpcg))
f_pcg.write('{0} '.format(self.mutpcg))
f_pcg.write('{0} '.format(self.damp))
if self.damp < 0:
f_pcg.write('{0} '.format(self.dampt))
f_pcg.write('\n')
else:
f_pcg.write('{0:10d}'.format(self.mxiter))
f_pcg.write('{0:10d}'.format(self.iter1))
f_pcg.write('{0:10d}'.format(self.npcond))
f_pcg.write('{0:10d}'.format(self.ihcofadd))
f_pcg.write('\n')
f_pcg.write('{0:9.4e} '.format(self.hclose))
f_pcg.write('{0:9.4e} '.format(self.rclose))
f_pcg.write('{0:9.4e} '.format(self.relax))
f_pcg.write('{0:10d}'.format(self.nbpol))
f_pcg.write('{0:10d}'.format(self.iprpcg))
f_pcg.write('{0:10d}'.format(self.mutpcg))
f_pcg.write('{0:9.4e} '.format(self.damp))
if self.damp < 0:
f_pcg.write('{0:9.4e} '.format(self.dampt))
f_pcg.write('\n')
f_pcg.close()
@staticmethod
def load(f, model, ext_unit_dict=None):
if type(f) is not file:
filename = f
f = open(filename, 'r')
#dataset 0 -- header
while True:
line = f.readline()
if line[0] != '#':
break
#dataset 1
ifrfm = model.get_ifrefm()
if model.version != 'mf2k':
ifrfm = True
ihcofadd = 0
dampt = 0.
if ifrfm:
t = line.strip().split()
mxiter = int(t[0])
iter1 = int(t[1])
npcond = int(t[2])
try:
ihcofadd = int(t[3])
except:
pass
line = f.readline()
t = line.strip().split()
hclose = float(t[0])
rclose = float(t[1])
relax = float(t[2])
nbpol = int(t[3])
iprpcg = int(t[4])
mutpcg = int(t[5])
damp = float(t[6])
if damp < 0.:
dampt = float(t[7])
else:
mxiter = int(line[0:10].strip())
iter1 = int(line[10:20].strip())
npcond = int(line[20:30].strip())
try:
ihcofadd = int(line[30:40].strip())
except:
pass
line = f.readline()
hclose = float(line[0:10].strip())
rclose = float(line[10:20].strip())
relax = float(line[20:30].strip())
nbpol = int(line[30:40].strip())
iprpcg = int(line[40:50].strip())
mutpcg = int(line[50:60].strip())
damp = float(line[60:70].strip())
if damp < 0.:
dampt = float(line[70:80].strip())
pcg = ModflowPcg(model, mxiter=mxiter, iter1=iter1, npcond=npcond, ihcofadd=ihcofadd,\
hclose=hclose, rclose=rclose, relax=relax, nbpol=nbpol,\
iprpcg=iprpcg, mutpcg=mutpcg, damp=damp, dampt=dampt)
return pcg
| [
"langevin@usgs.gov"
] | langevin@usgs.gov |
15ca14dda5bc43ba7ac49e7f50ec9a4f71acc1a8 | f3b233e5053e28fa95c549017bd75a30456eb50c | /ptp1b_input/L67/67-77_wat_20Abox/set_5.py | e11d081f50b40e893e218ff492bdaf3f17ca53ce | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 739 | py | import os
dir = '/mnt/scratch/songlin3/run/ptp1b/L67/wat_20Abox/ti_one-step/67_77/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_5.in'
temp_pbs = filesdir + 'temp_5.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_5.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_5.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"songlin3@msu.edu"
] | songlin3@msu.edu |
dc56fca9c3f0400c2c546ec88a36f50896da3a5b | e8ae11e5017507da59e2e92d423b6a1994490de4 | /env/lib/python2.7/site-packages/azure/mgmt/network/models/express_route_circuit_routes_table_summary.py | 202ba003885f50452d9a8f36be1ac4f62ebc3c8a | [] | no_license | teopeurt/ansible-ubuntu-server | 613d00cea28bc6531acf4a39aeeb9cd0baa2a391 | b5b6127d2ee9723c5088443efe2ffb8ae30cfea7 | refs/heads/master | 2021-06-28T12:49:50.935753 | 2017-07-31T17:34:33 | 2017-07-31T17:34:33 | 98,912,808 | 0 | 1 | null | 2020-07-24T00:05:31 | 2017-07-31T17:32:56 | Makefile | UTF-8 | Python | false | false | 2,276 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExpressRouteCircuitRoutesTableSummary(Model):
"""
The routes table associated with the ExpressRouteCircuit
:param neighbor: Neighbor.
:type neighbor: str
:param v: BGP version number spoken to the neighbor.
:type v: int
:param as_property: Autonomous system number.
:type as_property: int
:param up_down: The length of time that the BGP session has been in the
Established state, or the current status if not in the Established state.
:type up_down: str
:param state_pfx_rcd: Current state of the BGP session, and the number of
prefixes that have been received from a neighbor or peer group.
:type state_pfx_rcd: str
"""
_attribute_map = {
'neighbor': {'key': 'neighbor', 'type': 'str'},
'v': {'key': 'v', 'type': 'int'},
'as_property': {'key': 'as', 'type': 'int'},
'up_down': {'key': 'upDown', 'type': 'str'},
'state_pfx_rcd': {'key': 'statePfxRcd', 'type': 'str'},
}
def __init__(self, neighbor=None, v=None, as_property=None, up_down=None, state_pfx_rcd=None):
self.neighbor = neighbor
self.v = v
self.as_property = as_property
self.up_down = up_down
self.state_pfx_rcd = state_pfx_rcd
| [
"me@teopeurt.com"
] | me@teopeurt.com |
4e7414a3a31a6dc0edb60e6832a9aea5fff43856 | dacb257a90310eba03f3128221120a7d54b894ba | /_templates/component_template.py | 17012640af10b7c3b7946c401e3122b013cd8f93 | [
"MIT"
] | permissive | SiChiTong/pysmartnode | 92351efa02e52aa84185a53896957c453b12540a | a0998ad6582a28fe5a0529fb15dd4f61e254d25f | refs/heads/master | 2023-01-05T10:00:14.907988 | 2020-09-01T10:07:45 | 2020-09-01T10:07:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,938 | py | # Author: Kevin Köck
# Copyright Kevin Köck 2018-2020 Released under the MIT license
# Created on 2018-06-22
"""
example config for MyComponent:
{
package: <package_path>
component: MyComponent
constructor_args: {
my_value: "hi there"
# mqtt_topic: sometopic # optional, defaults to home/<controller-id>/<component_name>/<component-count>/set
# mqtt_topic2: sometopic # optional, defautls to home/sometopic
# friendly_name: null # optional, friendly name shown in homeassistant gui with mqtt discovery
# discover: true # optional, if false no discovery message for homeassistant will be sent.
}
}
"""
__updated__ = "2020-03-29"
__version__ = "1.91"
import uasyncio as asyncio
from pysmartnode import config
from pysmartnode import logging
from pysmartnode.utils.component import ComponentBase, DISCOVERY_SWITCH
import gc
####################
# choose a component name that will be used for logging (not in leightweight_log),
# a default mqtt topic that can be changed by received or local component configuration
# as well as for the component name in homeassistant.
COMPONENT_NAME = "MyComponent"
# define the type of the component according to the homeassistant specifications
_COMPONENT_TYPE = "switch"
####################
_log = logging.getLogger(COMPONENT_NAME)
_mqtt = config.getMQTT()
gc.collect()
_unit_index = -1
# This template is for a very general component.
# It might be better to either use the templates for a specific type of
# component like a sensor or a switch.
class MyComponent(ComponentBase):
def __init__(self, my_value, # extend or shrink according to your sensor
mqtt_topic=None, mqtt_topic2=None,
friendly_name=None, discover=True, **kwargs):
# This makes it possible to use multiple instances of MyComponent
# It is needed for every default value for mqtt.
# Initialize before super()__init__(...) to not pass the wrong value.
global _unit_index
_unit_index += 1
super().__init__(COMPONENT_NAME, __version__, _unit_index, discover=discover, **kwargs)
# discover: boolean, if this component should publish its mqtt discovery.
# This can be used to prevent combined Components from exposing underlying
# hardware components like a power switch
# This will generate a topic like: home/31f29s/MyComponent0/set
self._command_topic = mqtt_topic or _mqtt.getDeviceTopic(
"{!s}{!s}".format(COMPONENT_NAME, self._count), is_request=True)
# These calls subscribe the topics.
_mqtt.subscribeSync(self._command_topic, self.on_message1, self, check_retained_state=True)
# check_retained_state will subscribe to the state topic (home/31f29s/MyComponent0)
# first, so the original state of the device can be restored.
# The state topic will then be unsubscribed and the requested command topic subscribed.
_mqtt.subscribeSync(mqtt_topic2 or "home/sometopic", self.on_message2, self)
self.my_value = my_value
self._frn = friendly_name # will default to unique name in discovery if None
self._loop_task = asyncio.create_task(self._loop())
# the component might get removed in which case it should be able to locate and stop
# any running loops it created (otherwise the component will create Exceptions and
# won't be able to be fully removed from RAM)
gc.collect()
async def _init_network(self):
await super()._init_network()
# All _init_network methods of every component will be called after each other.
# Therefore every _init_network of previously registered components will have
# run when this one is running.
# NEVER start loops here because it will block the _init_network of all other components!
# Start a new uasyncio task in __init__() if you need additional loops.
# This method is only used for subscribing topics, publishing discovery and logging.
# It can be used for similar network oriented initializations.
async def _loop(self):
# A loop should either only do network oriented tasks or only
# non-network oriented tasks to ensure that the device works
# even when the network is unavailable. A compromise could be
# to use network oriented tasks with timeouts if those delays
# aren't a problem for the device functionality.
while True:
await asyncio.sleep(5)
await _mqtt.publish(self._command_topic[:-4], "ON", qos=1) # publishing to state_topic
async def _remove(self):
"""Will be called if the component gets removed"""
# Cancel any loops/asyncio coroutines started by the component
self._loop_task.cancel()
await super()._remove()
async def _discovery(self, register=True):
"""
Send discovery messages
:param register: if True send discovery message, if False send empty discovery message
to remove the component from homeassistant.
:return:
"""
name = "{!s}{!s}".format(COMPONENT_NAME, self._count)
component_topic = _mqtt.getDeviceTopic(name)
# component topic could be something completely user defined.
# No need to follow the pattern:
component_topic = self._command_topic[:-4] # get the state topic of custom component topic
friendly_name = self._frn # define a friendly name for the homeassistant gui.
# Doesn't need to be unique
if register:
await self._publishDiscovery(_COMPONENT_TYPE, component_topic, name, DISCOVERY_SWITCH,
friendly_name)
else:
await self._deleteDiscovery(_COMPONENT_TYPE, name)
del name, component_topic, friendly_name
gc.collect()
async def on_message1(self, topic, message, retained):
"""
MQTTHandler is calling this subscribed async method whenever a message is received for the subscribed topic.
:param topic: str
:param message: str/dict/list (json converted)
:param retained: bool
:return:
"""
print("Do something")
return True # When returning True, the value of arg "message" will be
# published to the state topic as a retained message
async def on_message2(self, topic, message, retained):
"""
MQTTHandler is calling this subscribed async method whenever a message is received for the subscribed topic.
:param topic: str
:param message: str/dict/list (json converted)
:param retained: bool
:return:
"""
print("Do something else")
return True # When returning True, the value of arg "message" will be
# published to the state topic as a retained message
| [
"kevinkk525@users.noreply.github.com"
] | kevinkk525@users.noreply.github.com |
642c338f2025399808ac2d1a89087670e21e1dd9 | 365558f4e8ddc829f0ddca3f7d44ba62da27542a | /updates/api/mixins.py | 508f98039750b944973f416e4ae53a5ae150c9e5 | [] | no_license | paulitstep/restapi | 9f1a1458c85cccad1a51a1f00f9a948ccca90f5e | 465775f166b342bb416973335585225e16cb0ac4 | refs/heads/master | 2022-12-11T05:28:29.495535 | 2020-01-03T12:07:14 | 2020-01-03T12:07:14 | 229,033,566 | 0 | 0 | null | 2022-11-22T04:55:07 | 2019-12-19T10:44:52 | Python | UTF-8 | Python | false | false | 264 | py | from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
class CSRFExemptMixin(object):
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
| [
"pasha-mo1@rambler.ru"
] | pasha-mo1@rambler.ru |
502205acb9cda35045cccadbb6e2af22d5604de8 | d75fbceb28ad14b07ae4057a8b23ec0bd3682628 | /code/chap01/GeometryDemo.py | 1894209492bd0745a775c3814309fabaf5b7aac6 | [] | no_license | wubinbai/pygame-book | 0707a0b36f41bc6f0b1282707e6c4f6cbed9c87a | 9de1f7516a2aec940ffa97f9686cc0520bad2deb | refs/heads/master | 2020-12-21T15:51:08.397619 | 2020-01-30T12:37:52 | 2020-01-30T12:37:52 | 236,478,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | # OOP Geometry Demo
class Point():
x = 0.0
y = 0.0
def __init__(self, x, y):
self.x = x
self.y = y
print("Point constructor")
def ToString(self):
return "{X:" + str(self.x) + ",Y:" + str(self.y) + "}"
class Size():
width = 0.0
height = 0.0
def __init__(self,width,height):
self.width = width
self.height = height
print("Size constructor")
def ToString(self):
return "{WIDTH=" + str(self.width) + \
",HEIGHT=" + str(self.height) + "}"
class Circle(Point):
radius = 0.0
def __init__(self, x, y, radius):
super().__init__(x,y)
self.radius = radius
print("Circle constructor")
def ToString(self):
return super().ToString() + \
",{RADIUS=" + str(self.radius) + "}"
class Rectangle(Point,Size):
def __init__(self, x, y, width, height):
Point.__init__(self,x,y)
Size.__init__(self,width,height)
print("Rectangle constructor")
def ToString(self):
return Point.ToString(self) + "," + Size.ToString(self)
p = Point(10,20)
print(p.ToString())
s = Size(80,70)
print(s.ToString())
c = Circle(100,100,50)
print(c.ToString())
r = Rectangle(200,250,40,50)
print(r.ToString())
| [
"wubinbai@yahoo.com"
] | wubinbai@yahoo.com |
7783c08daad08e45d2628bdba38bd60685bafafa | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /gaussiana/ch3_2020_03_07_23_12_34_972398.py | 1961ce12d71b8ef3172faaed1cc7b3049bb38b32 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | import math
def calcula_gaussiana (x, y, z):
a = 1/(z*(2* math.pi)**(1/2))
b = (math.e) ** (-0.5*c)
c = ((x - y)/z)**2
gaussiana = a*b
return gaussiana | [
"you@example.com"
] | you@example.com |
7a8871f3e6f1d6ffbb7082df68f5813e5b281528 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/cloud/aapicsubnetpool.py | b8476311897c66bef3a0147220099b199efa5dcf | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,396 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class AApicSubnetPool(Mo):
meta = ClassMeta("cobra.model.cloud.AApicSubnetPool")
meta.isAbstract = True
meta.moClassName = "cloudAApicSubnetPool"
meta.moClassName = "cloudAApicSubnetPool"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "Abstract Apic Subnet Pool"
meta.writeAccessMask = 0xc00000001
meta.readAccessMask = 0xc00000001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.concreteSubClasses.add("cobra.model.cloud.ApicSubnetPool")
meta.rnPrefixes = [
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "createdBy", "createdBy", 52857, PropCategory.REGULAR)
prop.label = "Created By"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "USER"
prop._addConstant("SYSTEM", "system-internal", 1)
prop._addConstant("USER", "user", 0)
meta.props.add("createdBy", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "subnet", "subnet", 52856, PropCategory.REGULAR)
prop.label = "Subnet"
prop.isConfig = True
prop.isAdmin = True
meta.props.add("subnet", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"bkhoward@live.com"
] | bkhoward@live.com |
32b236c7d226d86c465cc89e6ca3ee8726a36cf5 | 0d15b6706d1016b604b351d57204852ff6613600 | /articles/admin.py | 1e1ea65ea50180b8980335603648ca7861f27e91 | [] | no_license | yoonmyunghoon/Django-prac | e431b00482e2b5dde5b6555b862c658f86ec6328 | 848275f43f514b7d84b6bcec1e7fee90f9b1a378 | refs/heads/master | 2023-03-03T23:21:57.607503 | 2021-02-22T08:46:58 | 2021-02-22T08:46:58 | 339,078,445 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 777 | py | from django.contrib import admin
from .models import Article, Comment, Hashtag
@admin.register(Article)
class ArticleAdmin(admin.ModelAdmin):
list_display = ("id", "title", "content", "image", "created_at", "updated_at")
list_display_links = ("content",)
list_filter = ("created_at",)
list_editable = ("title",)
list_per_page = 2
# admin.site.register(Article, ArticleAdmin)
@admin.register(Comment)
class CommentAdmin(admin.ModelAdmin):
list_display = ("id", "article_id", "content", "created_at", "updated_at")
list_filter = ("created_at",)
list_editable = ("content",)
list_per_page = 2
# admin.site.register(Comment, CommentAdmin)
@admin.register(Hashtag)
class HashtagAdmin(admin.ModelAdmin):
liset_display = ("content",)
| [
"youn1791472@gmail.com"
] | youn1791472@gmail.com |
fbfd4b1e2fa77069d31dc9861da0475a94f7c072 | 7832e7dc8f1583471af9c08806ce7f1117cd228a | /aliyun-python-sdk-rds/aliyunsdkrds/request/v20140815/CloneDBInstanceRequest.py | cb2978c432125971753d5407b3917c60679c46d8 | [
"Apache-2.0"
] | permissive | dianplus/aliyun-openapi-python-sdk | d6494850ddf0e66aaf04607322f353df32959725 | 6edf1ed02994245dae1d1b89edc6cce7caa51622 | refs/heads/master | 2023-04-08T11:35:36.216404 | 2017-11-02T12:01:15 | 2017-11-02T12:01:15 | 109,257,597 | 0 | 0 | NOASSERTION | 2023-03-23T17:59:30 | 2017-11-02T11:44:27 | Python | UTF-8 | Python | false | false | 4,506 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CloneDBInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'CloneDBInstance','rds')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_RestoreTime(self):
return self.get_query_params().get('RestoreTime')
def set_RestoreTime(self,RestoreTime):
self.add_query_param('RestoreTime',RestoreTime)
def get_Period(self):
return self.get_query_params().get('Period')
def set_Period(self,Period):
self.add_query_param('Period',Period)
def get_DBInstanceStorage(self):
return self.get_query_params().get('DBInstanceStorage')
def set_DBInstanceStorage(self,DBInstanceStorage):
self.add_query_param('DBInstanceStorage',DBInstanceStorage)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_BackupId(self):
return self.get_query_params().get('BackupId')
def set_BackupId(self,BackupId):
self.add_query_param('BackupId',BackupId)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_UsedTime(self):
return self.get_query_params().get('UsedTime')
def set_UsedTime(self,UsedTime):
self.add_query_param('UsedTime',UsedTime)
def get_DBInstanceClass(self):
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self,DBInstanceClass):
self.add_query_param('DBInstanceClass',DBInstanceClass)
def get_VSwitchId(self):
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self,VSwitchId):
self.add_query_param('VSwitchId',VSwitchId)
def get_PrivateIpAddress(self):
return self.get_query_params().get('PrivateIpAddress')
def set_PrivateIpAddress(self,PrivateIpAddress):
self.add_query_param('PrivateIpAddress',PrivateIpAddress)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_VPCId(self):
return self.get_query_params().get('VPCId')
def set_VPCId(self,VPCId):
self.add_query_param('VPCId',VPCId)
def get_DBInstanceDescription(self):
return self.get_query_params().get('DBInstanceDescription')
def set_DBInstanceDescription(self,DBInstanceDescription):
self.add_query_param('DBInstanceDescription',DBInstanceDescription)
def get_DBInstanceId(self):
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self,DBInstanceId):
self.add_query_param('DBInstanceId',DBInstanceId)
def get_PayType(self):
return self.get_query_params().get('PayType')
def set_PayType(self,PayType):
self.add_query_param('PayType',PayType)
def get_InstanceNetworkType(self):
return self.get_query_params().get('InstanceNetworkType')
def set_InstanceNetworkType(self,InstanceNetworkType):
self.add_query_param('InstanceNetworkType',InstanceNetworkType) | [
"haowei.yao@alibaba-inc.com"
] | haowei.yao@alibaba-inc.com |
c1298158bf73240b9c238c14aac0733c0bc1b59d | 29c476c037a05170ff2ddef8edd07014d3751614 | /0x06-python-classes/4-square.py | 88ef31648dc094728817a3acd69b73ce8832f99e | [] | no_license | hacheG/holbertonschool-higher_level_programming | a0aaddb30665833bd260766dac972b7f21dda8ea | 535b1ca229d7cf61124a128bb5725e5200c27fbc | refs/heads/master | 2020-07-22T23:09:27.486886 | 2020-02-13T19:41:34 | 2020-02-13T19:41:34 | 207,360,462 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | #!/usr/bin/python3
class Square():
"""Class Square with setter & getter"""
def __init__(self, size=0):
self.__size = size
@property
def size(self):
return self.__size
@size.setter
def size(self, x):
if not isinstance(x, int):
raise TypeError("size must be an integer")
if x < 0:
raise ValueError("size must be >= 0")
self.__size = x
def area(self):
return (self.__size * self.__size)
| [
"943@holbertonschool.com"
] | 943@holbertonschool.com |
a53f41a47bb29cda9a8831a17ee8ca50f996caea | 8616892b6541602b53fdd94d1552d8e96b7ab722 | /dessn/configurations/old/combined_simple_nolowz.py | bc61fe17df88b12a52d7a966b4534f1e4c36302e | [
"MIT"
] | permissive | dessn/sn-bhm | 7c436877832ec10e4af318a6befff9fb8ffcbf3a | f320a41f9a4f2be49073437e98addca79e938160 | refs/heads/master | 2020-08-28T03:34:38.180148 | 2019-05-01T04:23:23 | 2019-05-01T04:23:23 | 45,723,864 | 1 | 0 | null | 2017-04-25T02:54:43 | 2015-11-07T05:27:44 | Python | UTF-8 | Python | false | false | 2,485 | py | import os
import logging
import socket
from dessn.framework.fitter import Fitter
from dessn.framework.models.approx_model import ApproximateModelW, ApproximateModel, ApproximateModelOl
from dessn.framework.simulations.snana_bulk import SNANACombinedBulk
from dessn.framework.simulations.selection_effects import lowz_sel, des_sel
from dessn.planck.planck import get_planck
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
plot_dir = os.path.dirname(os.path.abspath(__file__)) + "/plots/%s/" % os.path.basename(__file__)[:-3]
dir_name = plot_dir + "output/"
pfn1 = plot_dir + os.path.basename(__file__)[:-3]
file = os.path.abspath(__file__)
print(dir_name)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
models = ApproximateModelW()
# Turn off mass and skewness for easy test
simulation = [SNANACombinedBulk(152, ["SHINTON_LOWZ_MATRIX_G10_SKEWC_SKEWX1", "SHINTON_LOWZ_MATRIX_C11_SKEWC_SKEWX1"],
"CombinedLowZ", manual_selection=lowz_sel(cov_scale=0.01), num_calib=50),
SNANACombinedBulk(208, ["SHINTON_DES_MATRIX_G10_SKEWC_SKEWX1", "SHINTON_DES_MATRIX_C11_SKEWC_SKEWX1"],
"CombinedDES", manual_selection=des_sel(), num_calib=21)]
fitter = Fitter(dir_name)
fitter.set_models(models)
fitter.set_simulations(simulation)
fitter.set_num_cosmologies(100)
fitter.set_num_walkers(1)
fitter.set_max_steps(3000)
h = socket.gethostname()
if h != "smp-hk5pn72": # The hostname of my laptop. Only will work for me, ha!
fitter.fit(file)
else:
from chainconsumer import ChainConsumer
# results = fitter.load()
# print("Data loaded")
m, s, chain, truth, weight, old_weight, posterior = fitter.load()
c = ChainConsumer()
c.add_chain(chain, weights=weight, posterior=posterior, name="Approx")
c.configure(spacing=1.0)
parameters = [r"$\Omega_m$", "$w$"]
print(c.analysis.get_latex_table(transpose=True))
c.plotter.plot(filename=pfn1 + ".png", truth=truth, parameters=parameters)
print("Plotting distributions")
c = ChainConsumer()
c.add_chain(chain, weights=weight, posterior=posterior, name="Approx")
c.configure(label_font_size=10, tick_font_size=10, diagonal_tick_labels=False)
c.plotter.plot_distributions(filename=pfn1 + "_dist.png", truth=truth, col_wrap=8)
| [
"samuelreay@gmail.com"
] | samuelreay@gmail.com |
57bce43d697126c0d1fdf8886d4be50f39e8e18a | 34599596e145555fde0d4264a1d222f951f49051 | /pcat2py/class/26048566-5cc5-11e4-af55-00155d01fe08.py | f2d5557e83a298ecbd3b520346c5dd7d68781382 | [
"MIT"
] | permissive | phnomcobra/PCAT2PY | dc2fcbee142ce442e53da08476bfe4e68619346d | 937c3b365cdc5ac69b78f59070be0a21bdb53db0 | refs/heads/master | 2021-01-11T02:23:30.669168 | 2018-02-13T17:04:03 | 2018-02-13T17:04:03 | 70,970,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,019 | py | #!/usr/bin/python
################################################################################
# 26048566-5cc5-11e4-af55-00155d01fe08
#
# Justin Dierking
# justindierking@hardbitsolutions.com
# phnomcobra@gmail.com
#
# 10/24/2014 Original Construction
################################################################################
class Finding:
def __init__(self):
self.output = []
self.is_compliant = False
self.uuid = "26048566-5cc5-11e4-af55-00155d01fe08"
def check(self, cli):
# Initialize Compliance
self.is_compliant = True
# Execute command and parse capture standard output
stdout = cli.system("grep -r usb-storage /etc/modprobe.conf /etc/modprobe.d")
# Split output lines
self.output = stdout.split('\n')
# Process standard output
for line in self.output:
if len(line.strip()) > 0:
self.is_compliant = False
return self.is_compliant
| [
"phnomcobra@gmail.com"
] | phnomcobra@gmail.com |
25414baa7b52f3f2d2f1fbc697b602bd893b8ab7 | 1a31dfb66512aa66c407484f2ea8b0fb370669a4 | /account/urls.py | 74fd4189f5be454d9fbca20ea9f45a0439c5e14d | [] | no_license | nisha-eng/dstt | 790129f2918e0210421039baba0a4e8c877a7627 | bab89000242aec3a1a6fb05447ec52b14722809f | refs/heads/main | 2023-03-01T02:50:20.767421 | 2021-02-05T05:23:08 | 2021-02-05T05:23:08 | 336,171,818 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 172 | py | from django.contrib import admin
from django.urls import path,include
from . import views
urlpatterns = [
path('signup/',views.SignUpView.as_view(), name='signup'),
] | [
"mdipakpatidar@gmail.com"
] | mdipakpatidar@gmail.com |
27a02432307f60e2349e5f62217bdc229641b3a0 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_9_v3/I_w_Mgt_to_Wxyz_focus_to_Cxy_focus_series_Pyramid/pyramid_1side/bce_s001_tv_s0p1_L3/step12_L2345678.py | 65d732c2f60394ea9ab9e20e7ad6db3c04b3fa69 | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,245 | py | '''
目前只有 step12 一定需要切換資料夾到 該komg_model所在的資料夾 才能執行喔!
'''
if(__name__ == "__main__"):
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") + 1 ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
from step12_result_analyzer import Row_col_exps_analyzer
from step11_L2345678 import *
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir)
#############################################################################################################################################################################################################
ana_dir = template_dir
#############################################################################################################################################################################################################
"""
以下留下一些example這樣子
core_amount == 7 是因為 目前 see_amount == 7 ,想 一個core 一個see
task_amount == 7 是因為 目前 see_amount == 7
single_see_multiprocess == True 代表 see內 還要 切 multiprocess,
single_see_core_amount == 2 代表切2分
所以總共會有 7*2 = 14 份 process 要同時處理,
但建議不要用,已經測過,爆記憶體了
"""
#################################################################################################################################################################################################################
#################################################################################################################################################################################################################
ana_name = "ch032_1side_all__2side_all"
analyzer = Row_col_exps_analyzer(ana_describe=f"{ana_dir}/0_ana_{ana_name}",
ana_what_sees="see",
ana_what="rec",
row_col_results=ch032_1side_all__2side_all, show_in_img=False, show_gt_img=False, bgr2rgb=True, add_loss=False, img_h=512, img_w=512)\
.analyze_row_col_results_all_single_see(single_see_multiprocess=False, single_see_core_amount=6)\
.Gather_all_see_final_img()
'''
analyzer = Col_exps_analyzer(ana_describe=f"{ana_dir}/0_ana_{ana_name}",
ana_what_sees="see",
ana_what="wc",
col_results=ch032_1side_all__2side_all, show_in_img=False, show_gt_img=False, bgr2rgb=True, add_loss=False, img_h=512, img_w=512)\
.analyze_col_results_all_single_see(single_see_multiprocess=True, single_see_core_amount=6)\
.Gather_all_see_final_img()
'''
| [
"s89334roy@yahoo.com.tw"
] | s89334roy@yahoo.com.tw |
afd99a73aa5211d3722e521ba63eeacc2c19e7b9 | 642526009a434c2a6e04fe0293279a151b216d0a | /dkube/sdk/internal/dkube_client/models/git_commit_details.py | 136441f2723ae8ca83f5e11383723fe023b5aa91 | [] | no_license | mak-454/dkube-sdk | d4b8e7f7b1d8c0b0f64b10940ae42ab9d62f4654 | d2ba78a0abbda589efc0dbd957d9a8f6fd227464 | refs/heads/master | 2022-12-26T03:17:55.627379 | 2020-05-09T17:29:08 | 2020-05-09T17:29:08 | 262,622,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,283 | py | # coding: utf-8
"""
Dkube api server
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 0.2.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GitCommitDetails(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'message': 'str',
'_date': 'str'
}
attribute_map = {
'id': 'id',
'message': 'message',
'_date': 'date'
}
def __init__(self, id=None, message=None, _date=None): # noqa: E501
"""GitCommitDetails - a model defined in Swagger""" # noqa: E501
self._id = None
self._message = None
self.__date = None
self.discriminator = None
if id is not None:
self.id = id
if message is not None:
self.message = message
if _date is not None:
self._date = _date
@property
def id(self):
"""Gets the id of this GitCommitDetails. # noqa: E501
:return: The id of this GitCommitDetails. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this GitCommitDetails.
:param id: The id of this GitCommitDetails. # noqa: E501
:type: str
"""
self._id = id
@property
def message(self):
"""Gets the message of this GitCommitDetails. # noqa: E501
:return: The message of this GitCommitDetails. # noqa: E501
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this GitCommitDetails.
:param message: The message of this GitCommitDetails. # noqa: E501
:type: str
"""
self._message = message
@property
def _date(self):
"""Gets the _date of this GitCommitDetails. # noqa: E501
:return: The _date of this GitCommitDetails. # noqa: E501
:rtype: str
"""
return self.__date
@_date.setter
def _date(self, _date):
"""Sets the _date of this GitCommitDetails.
:param _date: The _date of this GitCommitDetails. # noqa: E501
:type: str
"""
self.__date = _date
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(GitCommitDetails, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GitCommitDetails):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"ahmed.khan@oneconvergence.com"
] | ahmed.khan@oneconvergence.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.