hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8288f27e4f9f25b68044be3e4af91d23dfa24bd3 | 1,923 | py | Python | model.py | luqifeng/CVND---Image-Captioning-Project | 6564b72222d962f8e1acdcdcf3d8ac5874ad9ab8 | [
"MIT"
] | null | null | null | model.py | luqifeng/CVND---Image-Captioning-Project | 6564b72222d962f8e1acdcdcf3d8ac5874ad9ab8 | [
"MIT"
] | null | null | null | model.py | luqifeng/CVND---Image-Captioning-Project | 6564b72222d962f8e1acdcdcf3d8ac5874ad9ab8 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torchvision.models as models
import numpy as np
| 36.980769 | 126 | 0.631825 |
828aa1df6ebc3553389f760e5439ccc3f6c4765d | 981 | py | Python | App/items/models/items.py | fmgar/BlackMarker-API | a185d61d518ad505d2fd8882f0e8cd15474786cb | [
"MIT"
] | null | null | null | App/items/models/items.py | fmgar/BlackMarker-API | a185d61d518ad505d2fd8882f0e8cd15474786cb | [
"MIT"
] | null | null | null | App/items/models/items.py | fmgar/BlackMarker-API | a185d61d518ad505d2fd8882f0e8cd15474786cb | [
"MIT"
] | null | null | null | """Items model. """
# Django
from django.db import models
# Utilities
from App.utils.models import BlackMarketModel
# Models
from .category import Category
from .unit import Unit
from .owner import Owner
| 32.7 | 92 | 0.734964 |
828b34c1c1112e8cd47750832efbc80f1a49fc80 | 2,241 | py | Python | run_all.py | yuriisthebest/Advent-of-Code | 1a4b3d6e57b0751dec097ccfc83472c458605e37 | [
"MIT"
] | null | null | null | run_all.py | yuriisthebest/Advent-of-Code | 1a4b3d6e57b0751dec097ccfc83472c458605e37 | [
"MIT"
] | null | null | null | run_all.py | yuriisthebest/Advent-of-Code | 1a4b3d6e57b0751dec097ccfc83472c458605e37 | [
"MIT"
] | null | null | null | import json
import time
from multiprocessing import Process
from utils.paths import PATHS
from years.AoC2021.tasks import TASKS2021
# Constants
PARALLEL_COMPUTATION = True
TASKS = {
2021: TASKS2021
}
def asses_task(task: type, answers: dict, year: int) -> None:
"""
Run a task 4 times (part 1 test, part 1 task, part 2 test, part 2 task)
Test if the answers of each run correspond to the correct answers
:param task: Task object able to run a task
:param answers: The correct answers of the given task
:param year: The year where this task was asked
"""
t = task()
pred = t.run_all()
true = answers[task.__name__]
assert pred[0][0] == true[0] or true[0] == 0, \
f"({year}, {task.__name__}) Part 1 has failed on the test data. Expected: {true[0]}, got: {pred[0][0]}"
assert pred[0][1] == true[1] or true[1] == 0, \
f"({year}, {task.__name__}) Part 1 has failed on the real data. Expected: {true[1]}, got: {pred[0][1]}"
assert pred[1][0] == true[2] or true[2] == 0, \
f"({year}, {task.__name__}) Part 2 has failed on the test data. Expected: {true[2]}, got: {pred[1][0]}"
assert pred[1][1] == true[3] or true[3] == 0, \
f"({year}, {task.__name__}) Part 2 has failed on the real data. Expected: {true[3]}, got: {pred[1][1]}"
if __name__ == "__main__":
start = time.perf_counter()
num_tests = 0
processes = []
for year_num in TASKS.keys():
# Find the answers of the current year
with open(f"{PATHS[year_num]}\\answers.json") as f:
year_answers = json.load(f)
# Compute task results (unknown answers have a value of -1)
for i, current_task in enumerate(TASKS[year_num]):
num_tests += 1
if PARALLEL_COMPUTATION:
p = Process(target=asses_task, args=[current_task, year_answers, year_num])
p.start()
processes.append(p)
else:
asses_task(current_task, year_answers, year_num)
# Wait for processes to stop and report success
for process in processes:
process.join()
print(f"\n*** All {num_tests} tests completed successfully in {time.perf_counter() - start:.2f} sec***")
| 37.983051 | 111 | 0.617135 |
828cb262d3250d0e1b3f07edb7bc92fd873589c5 | 1,467 | py | Python | python/edl/tests/unittests/master_client_test.py | WEARE0/edl | f065ec02bb27a67c80466103e298bd6f37494048 | [
"Apache-2.0"
] | 90 | 2020-04-21T01:46:10.000Z | 2022-02-10T09:09:34.000Z | python/edl/tests/unittests/master_client_test.py | WEARE0/edl | f065ec02bb27a67c80466103e298bd6f37494048 | [
"Apache-2.0"
] | 37 | 2018-03-02T22:41:15.000Z | 2020-04-22T16:48:36.000Z | python/edl/tests/unittests/master_client_test.py | WEARE0/edl | f065ec02bb27a67c80466103e298bd6f37494048 | [
"Apache-2.0"
] | 34 | 2018-03-02T23:28:25.000Z | 2020-03-25T08:50:29.000Z | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import paddle_edl.utils.master_pb2 as master_pb2
import unittest
from edl.utils.master_client import Client
from edl.utils.utils import get_file_list, get_logger
os.environ["https_proxy"] = ""
os.environ["http_proxy"] = ""
if __name__ == "__main__":
logger = get_logger(10)
unittest.main()
| 32.6 | 74 | 0.712338 |
828ccbf87f380dbc253cd5ac125a944fc9a7bd55 | 4,262 | py | Python | src/commercetools/services/types.py | BramKaashoek/commercetools-python-sdk | 4a4191d7816c921401b782d8ae37626cb32791a1 | [
"MIT"
] | null | null | null | src/commercetools/services/types.py | BramKaashoek/commercetools-python-sdk | 4a4191d7816c921401b782d8ae37626cb32791a1 | [
"MIT"
] | null | null | null | src/commercetools/services/types.py | BramKaashoek/commercetools-python-sdk | 4a4191d7816c921401b782d8ae37626cb32791a1 | [
"MIT"
] | null | null | null | import typing
from commercetools import schemas, types
from commercetools.services import abstract
from commercetools.typing import OptionalListStr
__all__ = ["TypeService"]
| 30.22695 | 87 | 0.585171 |
828db07d0c0f0f1db466402e002749cf071a28f8 | 3,454 | py | Python | augraphy/augmentations/noisetexturize.py | RyonSayer/augraphy | be1e8dcf0f129ac3fc30ba1cad0d8de02443f67f | [
"MIT"
] | 36 | 2021-06-25T02:17:57.000Z | 2022-03-29T02:36:09.000Z | augraphy/augmentations/noisetexturize.py | shaheryar1/augraphy | 5dd52fdd3b497312606c6d3afa4003f94a8cbcc4 | [
"MIT"
] | 136 | 2021-06-25T07:39:46.000Z | 2022-03-31T13:00:30.000Z | augraphy/augmentations/noisetexturize.py | shaheryar1/augraphy | 5dd52fdd3b497312606c6d3afa4003f94a8cbcc4 | [
"MIT"
] | 24 | 2021-06-27T21:15:11.000Z | 2022-03-08T03:28:17.000Z | import random
import cv2
import numpy as np
from augraphy.base.augmentation import Augmentation
| 33.533981 | 118 | 0.606543 |
828dc1f2bed1b15e7518a1fcf0598cc4397058a0 | 50,333 | py | Python | plugins/modules/bigip_sslo_config_ssl.py | kevingstewart/f5_sslo_ansible | 13001a8eab514b5f1ea374abdfc7dd2383655a86 | [
"Apache-2.0"
] | 7 | 2021-06-25T15:39:49.000Z | 2022-02-28T10:58:53.000Z | plugins/modules/bigip_sslo_config_ssl.py | kevingstewart/f5_sslo_ansible | 13001a8eab514b5f1ea374abdfc7dd2383655a86 | [
"Apache-2.0"
] | 6 | 2021-06-29T18:18:45.000Z | 2021-09-17T12:04:24.000Z | plugins/modules/bigip_sslo_config_ssl.py | kevingstewart/f5_sslo_ansible | 13001a8eab514b5f1ea374abdfc7dd2383655a86 | [
"Apache-2.0"
] | 3 | 2021-06-28T23:25:38.000Z | 2022-02-28T10:57:32.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2021, kevin-dot-g-dot-stewart-at-gmail-dot-com
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Version: 1.0.1
#### Updates:
#### 1.0.1 - added 9.0 support
# - changed max version
# - added clientssl "alpn" proxy support
# - added clientssl logPublisher support
# - added serverssl logPublisher support
# - updated version and previousVersion keys to match target SSLO version
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigip_sslo_config_ssl
short_description: Manage an SSL Orchestrator SSL configuration
description:
- Manage an SSL Orchestrator SSL configuration
version_added: "1.0.0"
options:
name:
description:
- Specifies the name of the SSL configuration. Configuration auto-prepends "ssloT_" to service.
Service name should be less than 14 characters and not contain dashes "-".
type: str
required: True
clientSettings:
description:
- Specifies the client-side SSL settings
suboptions:
cipherType:
description:
- Defines the type of cipher used, either "string" (for cipher strings), or "group" (an existing cipher group).
type: str
choices:
- string
- group
default: string
cipher:
description:
- Defines the actual cipher string (ex. "DEFAULT"), or existing cipher group (ex. /Common/f5-default) to use.
type: str
default: DEFAULT
enableTLS1_3:
description:
- Defines whether or not to enable client-side TLSv1.3 support. When enabled, the cipherType must be "group" and cipher must indicate an existing cipher group.
type: bool
default: False
cert:
description:
- Defines the certificate applied in the client side settings. For a forward proxy this is the template certificate and (ex. /Common/default.crt). For a reverse proxy, this is the client-facing server certificate.
type: str
default: /Common/default.crt
key:
description:
- Defines the private key applied in the client side settings. For a forward proxy this is the template key and (ex. /Common/default.key). For a reverse proxy, this is the client-facing server private key.
type: str
default: /Common/default.key
chain:
description:
- Defines the certificate keychain in the client side settings.
type: str
default: None
caCert:
description:
- Defines the CA certificate applied in the client side settings. This is the signing/forging CA certificate used for forward proxy TLS handling. This setting is not applicable in reverse proxy SSL.
type: str
default: None
caKey:
description:
- Defines the CA private key applied in the client side settings. This is the signing/forging CA private key used for forward proxy TLS handling. This setting is not applicable in reverse proxy SSL.
type: str
default: None
caChain:
description:
- Defines the CA certificate keychain in the client side settings. This would contain any CA subordinated in the trust chain between the signing CA and explicitly-trusted root certificate. If required, it should contain any intermediate CA certificates, up to but not including the self-signed root CA.
type: str
default: None
alpn:
description:
- Requires 9.0+. Enables or disables ALPN HTTP/2 full proxy in an outbound (forward proxy) topology.
type: bool
default: False
logPublisher:
description:
- Requires 9.0+. Defines a specific log publisher to use for client-side SSL-related events.
type: str
default: /Common/sys-ssl-publisher
serverSettings:
description:
- Specifies the server-side SSL settings
suboptions:
cipherType:
description:
- Defines the type of cipher used, either "string" (for cipher strings), or "group" (an existing cipher group).
type: str
choices:
- string
- group
default: string
cipher:
description:
- Defines the actual cipher string (ex. "DEFAULT"), or existing cipher group (ex. /Common/f5-default) to use.
type: str
default: DEFAULT
enableTLS1_3:
description:
- Defines whether or not to enable server-side TLSv1.3 support. When enabled, the cipherType must be "group" and cipher must indicate an existing cipher group.
type: bool
default: False
caBundle:
description:
- Defines the certificate authority bundle used to validate remote server certificates. This setting is most applicable in the forward proxy use case to validate remote (Internat) server certificates.
type: str
default: /Common/ca-bundle.crt
blockExpired:
description:
- Defines the action to take if an expired remote server certificate is encountered. For forward proxy the default is to ignore expired certificates (False). For reverse proxy the default is to drop expired certificates (True).
type: bool
default: False
blockUntrusted:
description:
- Defines the action to take if an untrusted remote server certificate is encountered, based on the defined caBundle. For forward proxy the default is to ignore untrusted certificates (False). For reverse proxy the default is to drop untrusted certificates (True).
type: bool
default: False
ocsp:
description:
- Defines an OCSP configuration to use to perform certificate revocation checking again remote server certificates.
type: str
default: None
crl:
description:
- Defines a CRL configuration to use to perform certificate revocation checking again remote server certificates.
type: str
default: None
logPublisher:
description:
- Requires 9.0+. Defines a specific log publisher to use for server-side SSL-related events.
type: str
default: /Common/sys-ssl-publisher
bypassHandshakeFailure:
description:
- Defines the action to take if a server side TLS handshake failure is detected. A value of False will cause the connection to fail. A value of True will shutdown TLS decryption and allow the connection to proceed un-decrypted.
type: bool
default: False
bypassClientCertFailure:
description:
- Defines the action to take if a server side TLS handshake client certificate request is detected. A value of False will cause the connection to fail. A value of True will shutdown TLS decryption and allow the connection to proceed un-decrypted.
type: bool
default: False
mode:
description:
- Defines how this task is handled. With the default setting of 'update', the module performs the tasks required to update the target resource. With the 'output' setting, the resulting JSON object blocks are returned without updating the target resource. This option is useful for debugging, and when subordinate objects (ex. SSL, services, service chains, policy, resolver) are created in the same playbook, and their respectice output JSON referenced in a single Topology create task.
type: str
choices:
- update
- output
default: update
state:
description:
- Specifies the present/absent state required.
type: str
choices:
- absent
- present
default: present
extends_documentation_fragment: f5networks.f5_modules.f5
author:
- Kevin Stewart (kevin-dot-g-dot-stewart-at-gmail-dot-com)
'''
EXAMPLES = r'''
- name: Create SSLO SSL Forward Proxy Settings (simple)
hosts: localhost
gather_facts: False
connection: local
collections:
- kevingstewart.f5_sslo_ansible
vars:
provider:
server: 172.16.1.77
user: admin
password: admin
validate_certs: no
server_port: 443
tasks:
- name: SSLO SSL forward proxy settings
bigip_sslo_config_ssl:
provider: "{{ provider }}"
name: "demo_ssl"
clientSettings:
caCert: "/Common/subrsa.f5labs.com"
caKey: "/Common/subrsa.f5labs.com"
delegate_to: localhost
- name: Create SSLO SSL Forward Proxy Settings
hosts: localhost
gather_facts: False
connection: local
collections:
- kevingstewart.f5_sslo_ansible
vars:
provider:
server: 172.16.1.77
user: admin
password: admin
validate_certs: no
server_port: 443
tasks:
- name: SSLO SSL settings
bigip_sslo_config_ssl:
provider: "{{ provider }}"
name: "demo_ssl"
clientSettings:
cipherType: "group"
cipher: "/Common/f5-default"
enableTLS1_3: True
cert: "/Common/default.crt"
key: "/Common/default.key"
caCert: "/Common/subrsa.f5labs.com"
caKey: "/Common/subrsa.f5labs.com"
caChain: "/Common/my-ca-chain"
alpn: True
logPublisher: "/Common/my-ssl-publisher"
serverSettings:
cipherType: "group"
cipher: "/Common/f5-default"
enableTLS1_3: True
caBundle: "/Common/local-ca-bundle.crt"
blockExpired: False
blockUntrusted: False
ocsp: "/Common/my-ocsp"
crl: "/Common/my-crl"
logPublisher: "/Common/my-ssl-publisher"
bypassHandshakeFailure: True
bypassClientCertFailure: True
delegate_to: localhost
- name: Create SSLO SSL Reverse Proxy Settings (simple)
hosts: localhost
gather_facts: False
connection: local
collections:
- kevingstewart.f5_sslo_ansible
vars:
provider:
server: 172.16.1.77
user: admin
password: admin
validate_certs: no
server_port: 443
tasks:
- name: SSLO SSL settings
bigip_sslo_config_ssl:
provider: "{{ provider }}"
name: "demo_ssl"
clientSettings:
cert: "/Common/myserver.f5labs.com"
key: "/Common/myserver.f5labs.com"
delegate_to: localhost
- name: Create SSLO SSL Reverse Proxy Settings
hosts: localhost
gather_facts: False
connection: local
collections:
- kevingstewart.f5_sslo_ansible
vars:
provider:
server: 172.16.1.77
user: admin
password: admin
validate_certs: no
server_port: 443
tasks:
- name: SSLO SSL settings
bigip_sslo_config_ssl:
provider: "{{ provider }}"
name: "demo5"
clientSettings:
cipherType: "group"
cipher: "/Common/f5-default"
enableTLS1_3: True
cert: "/Common/myserver.f5labs.com"
key: "/Common/myserver.f5labs.com"
chain: "/Common/my-ca-chain"
serverSettings:
cipherType: "group"
cipher: "/Common/f5-default"
enableTLS1_3: True
caBundle: "/Common/local-ca-bundle.crt"
blockExpired: False
blockUntrusted: False
delegate_to: localhost
'''
RETURN = r'''
name:
description:
- Changed name of SSL configuration.
type: str
sample: demo_ssl
clientSettings:
description: client-side SSL settings
type: complex
contains:
cipherType:
description: defines "string" for cipher string, or "group" for cipher group
type: str
sample: string
cipher:
description: defines the cipher string or an existing cipher group
type: str
sample: DEFAULT or /Common/f5-default
enableTLS1_3:
description: enables or disables client-side TLSv1.3
type: bool
sample: True
cert:
description: defines the client-facing certificate. For forward proxy this is the template certificate. For reverse proxy this is the server certificate.
type: str
sample: /Common/default.crt
key:
description: defines the client-facing private key. For forward proxy this is the template key. For reverse proxy this is the server private key.
type: str
sample: /Common/default.key
chain:
description: defines the client-facing CA certificate chain. For reverse proxy this is the server certificate's CA chain.
type: str
sample: /Common/local-ca-chain.crt
caCert:
description: defines the issuing CA certificate for a forward proxy.
type: str
sample: /Common/default.crt
caKey:
description: defines the issuing CA private key for a forward proxy.
type: str
sample: /Common/default.key
caChain:
description: defines the CA certificate chain for the issuing CA in a forward proxy.
type: str
sample: /Common/local-ca-chain.crt
alpn:
description: requires 9.0+. Enables or disables ALPN HTTP/2 full proxy through a forward proxy topology.
type: bool
sample: True
logPublisher:
description: requires 9.0+. Defines a specific log publisher for client-side SSL-related events.
type: str
sample: /Common/sys-ssl-publisher
serverSettings:
description: network settings for for-service configuration
type: complex
contains:
cipherType:
description: defines "string" for cipher string, or "group" for cipher group
type: str
sample: string
cipher:
description: defines the cipher string or an existing cipher group
type: str
sample: DEFAULT or /Common/f5-default
enableTLS1_3:
description: enables or disables server-side TLSv1.3
type: bool
sample: True
caBundle:
description: defines a CA bundle used to valdate remote server certificates.
type: str
sample: /Common/ca-bundle.crt
blockExpired:
description: defines the action to take on receiving an expired remote server certificate, True = block, False = ignore.
type: bool
sample: True
blockUntrusted:
description: defines the action to take on receiving an untrusted remote server certificate, True = block, False = ignore.
type: bool
sample: True
ocsp:
description: defines aan existing OCSP configuration to validate revocation of remote server certificates.
type: str
sample: /Common/my-ocsp
crl:
description: defines aan existing CRL configuration to validate revocation of remote server certificates.
type: str
sample: /Common/my-crl
logPublisher:
description: requires 9.0+. Defines a specific log publisher for server-side SSL-related events.
type: str
sample: /Common/sys-ssl-publisher
bypassHandshakeFailure:
description:
- Defines the action to take on receiving a TLS handshake alert from a server. True = bypass decryption and allow through, False = block
type: bool
sample: True
bypassClientCertFailure:
description:
- Defines the action to take on receiving a TLS handshake client certificate request from a server. True = bypass decryption and allow through, False = block
type: bool
sample: True
mode:
description: describes the action to take on the task.
type: str
sample: update
state:
description:
- Changed state.
type: str
sample: present
'''
from datetime import datetime
from ansible.module_utils.basic import (
AnsibleModule, env_fallback
)
from ansible_collections.f5networks.f5_modules.plugins.module_utils.bigip import (
F5RestClient
)
from ansible_collections.f5networks.f5_modules.plugins.module_utils.common import (
F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec
)
from ansible_collections.f5networks.f5_modules.plugins.module_utils.icontrol import (
tmos_version
)
from ipaddress import (
ip_network, ip_interface
)
import json, time, re
global print_output
global json_template
global obj_attempts
global min_version
global max_version
print_output = []
## define object creation attempts count (with 1 seconds pause between each attempt)
obj_attempts = 20
## define minimum supported tmos version - min(SSLO 5.x)
min_version = 5.0
## define maximum supported tmos version - max(SSLO 8.x)
max_version = 9.0
json_template = {
"name":"f5-ssl-orchestrator-gc",
"inputProperties":[
{
"id":"f5-ssl-orchestrator-operation-context",
"type":"JSON",
"value":{
"operationType":"CREATE",
"deploymentType":"SSL_SETTINGS",
"deploymentName":"TEMPLATE_NAME",
"deploymentReference":"",
"partition":"Common",
"strictness":False
}
},
{
"id":"f5-ssl-orchestrator-tls",
"type":"JSON",
"value":{
"sslSettingsReference":"",
"sslSettingsName":"",
"description":"",
"previousVersion":"7.2",
"version":"7.2",
"generalSettings":{
"isForwardProxy":True,
"bypassHandshakeAlert":False,
"bypassClientCertFailure":False
},
"clientSettings":{
"ciphers":{
"isCipherString":True,
"cipherString":"DEFAULT",
"cipherGroup":"/Common/f5-default"
},
"certKeyChain":[
{
"cert":"/Common/default.crt",
"key":"/Common/default.key",
"chain":"",
"passphrase":"",
"name":"CERT_KEY_CHAIN_0"
}
],
"caCertKeyChain":[],
"forwardByPass":True,
"enabledSSLProcessingOptions":[]
},
"serverSettings":{
"ciphers":{
"isCipherString":True,
"cipherString":"DEFAULT",
"cipherGroup":"/Common/f5-default"
},
"caBundle":"/Common/ca-bundle.crt",
"expiredCertificates":False,
"untrustedCertificates":False,
"ocsp":"",
"crl":"",
"enabledSSLProcessingOptions":[]
},
"name":"TEMPLATE_NAME",
"advancedMode":"off",
"strictness":False,
"partition":"Common"
}
},
{
"id":"f5-ssl-orchestrator-topology",
"type":"JSON"
}
],
"configurationProcessorReference":{
"link":"https://localhost/mgmt/shared/iapp/processors/f5-iappslx-ssl-orchestrator-gc"
},
"configProcessorTimeoutSeconds": 120,
"statsProcessorTimeoutSeconds": 60,
"configProcessorAffinity": {
"processorPolicy": "LOCAL",
"affinityProcessorReference": {
"link": "https://localhost/mgmt/shared/iapp/affinity/local"
}
},
"state":"BINDING",
"presentationHtmlReference":{
"link":"https://localhost/iapps/f5-iappslx-ssl-orchestrator/sgc/sgcIndex.html"
},
"operation":"CREATE"
}
json_ca_cert_template = {
"cert":"/Common/default.crt",
"key":"/Common/defaut.key",
"chain":"",
"isCa":True,
"usage":"CA",
"port":"0",
"passphrase":"",
"certKeyChainMismatch":False,
"isDuplicateVal":False,
"name":"CA_CERT_KEY_CHAIN_0"
}
json_enable_tls13 = {
"name":"TLSv1.3",
"value":"TLSv1.3"
}
def main():
## start here
## define global print_output
global print_output
print_output = []
## define argumentspec
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
## send to exec_module, result contains output of tasks
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
result = dict(
print_output = print_output,
**results
)
module.exit_json(**result)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main() | 36.955213 | 492 | 0.580573 |
828ee62b4ffbbb1d5dea12315eaccdf681093a66 | 52,854 | py | Python | nemo/pipelines.py | simonsobs/nemo | ab72fa1c5ea878fcb63eaf31642b3d7bdd6ac636 | [
"BSD-3-Clause"
] | 2 | 2021-01-11T13:10:27.000Z | 2022-03-09T16:31:48.000Z | nemo/pipelines.py | simonsobs/nemo | ab72fa1c5ea878fcb63eaf31642b3d7bdd6ac636 | [
"BSD-3-Clause"
] | 3 | 2020-11-11T10:44:47.000Z | 2022-01-05T07:28:58.000Z | nemo/pipelines.py | simonsobs/nemo | ab72fa1c5ea878fcb63eaf31642b3d7bdd6ac636 | [
"BSD-3-Clause"
] | 1 | 2021-03-05T18:31:00.000Z | 2021-03-05T18:31:00.000Z | """
This module defines pipelines - sets of tasks in nemo that we sometimes want to do on different inputs
(e.g., real data or simulated data).
"""
import os
import sys
import glob
import shutil
import time
import astropy.io.fits as pyfits
import astropy.table as atpy
from astLib import astWCS
import numpy as np
from scipy import ndimage, interpolate
import copy
from pixell import enmap
import nemo
from . import startUp
from . import filters
from . import photometry
from . import catalogs
from . import maps
from . import signals
from . import completeness
from . import MockSurvey
import nemoCython
#------------------------------------------------------------------------------------------------------------
def filterMapsAndMakeCatalogs(config, rootOutDir = None, copyFilters = False, measureFluxes = True,
invertMap = False, verbose = True, useCachedMaps = True):
"""Runs the map filtering and catalog construction steps according to the given configuration.
Args:
config (:obj: 'startup.NemoConfig'): Nemo configuration object.
rootOutDir (str): If None, use the default given by config. Otherwise, use this to override where the
output filtered maps and catalogs are written.
copyFilters (bool, optional): If True, and rootOutDir is given (not None), then filters will be
copied from the default output location (from a pre-existing nemo run) to the appropriate
directory under rootOutDir. This is used by, e.g., contamination tests based on sky sims, where
the same kernels as used on the real data are applied to simulated maps. If rootOutDir = None,
setting copyKernels = True has no effect.
measureFluxes (bool, optional): If True, measure fluxes. If False, just extract S/N values for
detected objects.
invertMap (bool, optional): If True, multiply all maps by -1; needed by
:meth:maps.estimateContaminationFromInvertedMaps).
Returns:
Optimal catalog (keeps the highest S/N detection when filtering at multiple scales).
Note:
See bin/nemo for how this pipeline is applied to real data, and maps.sourceInjectionTest
for how this is applied to source-free sims that are generated on the fly.
"""
if config.parDict['twoPass'] == False:
catalog=_filterMapsAndMakeCatalogs(config, rootOutDir = rootOutDir, copyFilters = copyFilters,
measureFluxes = measureFluxes, invertMap = invertMap,
verbose = verbose, useCachedMaps = useCachedMaps)
else:
# Two pass pipeline
# On 1st pass, find sources (and maybe clusters) with canned settings, masking nothing.
# On 2nd pass, the 1st pass catalog will be used to mask or subtract sources from maps used for
# noise estimation only.
# No point doing this if we're not using the map itself for the noise term in the filter
for f in config.parDict['mapFilters']:
for key in f.keys():
if key == 'noiseParams' and f['noiseParams']['method'] != 'dataMap':
raise Exception("There is no point running if filter noise method != 'dataMap'.")
# Pass 1 - find point sources, save nothing
# NOTE: We need to do this for each map in the list, if we have a multi-frequency filter
pass1PtSrcSettings={'label': "Beam",
'class': "BeamMatchedFilter",
'params': {'noiseParams': {'method': "model",
'noiseGridArcmin': 40.0,
'numNoiseBins': 2},
'saveFilteredMaps': False,
'outputUnits': 'uK',
'edgeTrimArcmin': 0.0}}
config.parDict['mapFilters']=[pass1PtSrcSettings]
config.parDict['photFilter']=None
config.parDict['maskPointSourcesFromCatalog']=[] # This is only applied on the 2nd pass
config.parDict['measureShapes']=True # Double-lobed extended source at f090 causes havoc in one tile
orig_unfilteredMapsDictList=list(config.unfilteredMapsDictList)
config.parDict['forcedPhotometryCatalog']=None # If in this mode, only wanted on 2nd pass
pass1CatalogsList=[]
surveyMasksList=[] # ok, these should all be the same, otherwise we have problems...
for mapDict in orig_unfilteredMapsDictList:
# We use whole tile area (i.e., don't trim overlaps) so that we get everything if under MPI
# Otherwise, powerful sources in overlap regions mess things up under MPI
# Serial mode doesn't have this issue as it can see the whole catalog over all tiles
# But since we now use full area, we may double subtract ovelap sources when in serial mode
# So the removeDuplicates call fixes that, and doesn't impact anything else here
surveyMasksList.append(mapDict['surveyMask'])
mapDict['surveyMask']=None
config.unfilteredMapsDictList=[mapDict]
catalog=_filterMapsAndMakeCatalogs(config, verbose = False, writeAreaMasks = False)
if len(catalog) > 0 :
catalog, numDuplicatesFound, names=catalogs.removeDuplicates(catalog)
pass1CatalogsList.append(catalog)
# Pass 2 - subtract point sources in the maps used for noise term in filter only
# To avoid ringing in the pass 2, we siphon off the super bright things found in pass 1
# We subtract those from the maps used in pass 2 - we then need to add them back at the end
config.restoreConfig()
config.parDict['measureShapes']=True # We'll keep this for pass 2 as well
siphonSNR=50
for mapDict, catalog, surveyMask in zip(orig_unfilteredMapsDictList, pass1CatalogsList, surveyMasksList):
#catalogs.catalog2DS9(catalog[catalog['SNR'] > siphonSNR], config.diagnosticsDir+os.path.sep+"pass1_highSNR_siphoned.reg")
mapDict['noiseMaskCatalog']=catalog[catalog['SNR'] < siphonSNR]
mapDict['subtractPointSourcesFromCatalog']=[catalog[catalog['SNR'] > siphonSNR]]
mapDict['maskSubtractedPointSources']=True
mapDict['surveyMask']=surveyMask
config.unfilteredMapsDictList=orig_unfilteredMapsDictList
catalog=_filterMapsAndMakeCatalogs(config, verbose = False)
# Merge back in the bright sources that were subtracted in pass 1
# (but we don't do that in forced photometry mode)
mergeList=[catalog]
if config.parDict['forcedPhotometryCatalog'] is None:
for pass1Catalog in pass1CatalogsList:
mergeList.append(pass1Catalog[pass1Catalog['SNR'] > siphonSNR])
catalog=atpy.vstack(mergeList)
return catalog
#------------------------------------------------------------------------------------------------------------
def _filterMapsAndMakeCatalogs(config, rootOutDir = None, copyFilters = False, measureFluxes = True,
invertMap = False, verbose = True, useCachedMaps = True,
writeAreaMasks = True):
"""Runs the map filtering and catalog construction steps according to the given configuration.
Args:
config (:obj: 'startup.NemoConfig'): Nemo configuration object.
rootOutDir (str): If None, use the default given by config. Otherwise, use this to override where the
output filtered maps and catalogs are written.
copyFilters (bool, optional): If True, and rootOutDir is given (not None), then filters will be
copied from the default output location (from a pre-existing nemo run) to the appropriate
directory under rootOutDir. This is used by, e.g., contamination tests based on sky sims, where
the same kernels as used on the real data are applied to simulated maps. If rootOutDir = None,
setting copyKernels = True has no effect.
measureFluxes (bool, optional): If True, measure fluxes. If False, just extract S/N values for
detected objects.
invertMap (bool, optional): If True, multiply all maps by -1; needed by
:meth:maps.estimateContaminationFromInvertedMaps).
Returns:
Optimal catalog (keeps the highest S/N detection when filtering at multiple scales).
Note:
See bin/nemo for how this pipeline is applied to real data, and maps.sourceInjectionTest
for how this is applied to source-free sims that are generated on the fly.
"""
# If running on sims (source-free or with injected sources), this ensures we use the same kernels for
# filtering the sim maps as was used on the real data, by copying kernels to the sims dir. The kernels
# will then be loaded automatically when filterMaps is called. Yes, this is a bit clunky...
if rootOutDir is not None:
filteredMapsDir=rootOutDir+os.path.sep+"filteredMaps"
diagnosticsDir=rootOutDir+os.path.sep+"diagnostics"
dirList=[rootOutDir, filteredMapsDir, diagnosticsDir]
for d in dirList:
os.makedirs(d, exist_ok = True)
if copyFilters == True:
for tileName in config.tileNames:
fileNames=glob.glob(config.diagnosticsDir+os.path.sep+tileName+os.path.sep+"filter*#%s*.fits" % (tileName))
if len(fileNames) == 0:
raise Exception("Could not find pre-computed filters to copy - you need to add 'saveFilter: True' to the filter params in the config file (this is essential for doing source injection sims quickly).")
kernelCopyDestDir=diagnosticsDir+os.path.sep+tileName
os.makedirs(kernelCopyDestDir, exist_ok = True)
for f in fileNames:
dest=kernelCopyDestDir+os.path.sep+os.path.split(f)[-1]
if os.path.exists(dest) == False:
shutil.copyfile(f, dest)
print("... copied filter %s to %s ..." % (f, dest))
else:
rootOutDir=config.rootOutDir
filteredMapsDir=config.filteredMapsDir
diagnosticsDir=config.diagnosticsDir
# We re-sort the filters list here - in case we have photFilter defined
photFilter=config.parDict['photFilter']
filtersList=[]
if photFilter is not None:
for f in config.parDict['mapFilters']:
if f['label'] == photFilter:
filtersList.append(f)
for f in config.parDict['mapFilters']:
if photFilter is not None:
if f['label'] == photFilter:
continue
filtersList.append(f)
if photFilter is not None:
assert(filtersList[0]['label'] == photFilter)
photFilteredMapDict=None
# Make filtered maps for each filter and tile
catalogDict={}
for tileName in config.tileNames:
# Now have per-tile directories (friendlier for Lustre)
tileFilteredMapsDir=filteredMapsDir+os.path.sep+tileName
tileDiagnosticsDir=diagnosticsDir+os.path.sep+tileName
for d in [tileFilteredMapsDir, tileDiagnosticsDir]:
os.makedirs(d, exist_ok = True)
if verbose == True: print(">>> Making filtered maps - tileName = %s ..." % (tileName))
# We could load the unfiltered map only once here?
# We could also cache 'dataMap' noise as it will always be the same
for f in filtersList:
label=f['label']+"#"+tileName
catalogDict[label]={}
if 'saveDS9Regions' in f['params'] and f['params']['saveDS9Regions'] == True:
DS9RegionsPath=config.filteredMapsDir+os.path.sep+tileName+os.path.sep+"%s_filteredMap.reg" % (label)
else:
DS9RegionsPath=None
filteredMapDict=filters.filterMaps(config.unfilteredMapsDictList, f, tileName,
filteredMapsDir = tileFilteredMapsDir,
diagnosticsDir = tileDiagnosticsDir, selFnDir = config.selFnDir,
verbose = True, undoPixelWindow = True,
useCachedMaps = useCachedMaps)
if f['label'] == photFilter:
photFilteredMapDict={}
photFilteredMapDict['SNMap']=filteredMapDict['SNMap']
photFilteredMapDict['data']=filteredMapDict['data']
# Forced photometry on user-supplied list of objects, or detect sources
if 'forcedPhotometryCatalog' in config.parDict.keys() and config.parDict['forcedPhotometryCatalog'] is not None:
catalog=photometry.makeForcedPhotometryCatalog(filteredMapDict,
config.parDict['forcedPhotometryCatalog'],
useInterpolator = config.parDict['useInterpolator'],
DS9RegionsPath = DS9RegionsPath)
else:
# Normal mode
catalog=photometry.findObjects(filteredMapDict, threshold = config.parDict['thresholdSigma'],
minObjPix = config.parDict['minObjPix'],
findCenterOfMass = config.parDict['findCenterOfMass'],
removeRings = config.parDict['removeRings'],
ringThresholdSigma = config.parDict['ringThresholdSigma'],
rejectBorder = config.parDict['rejectBorder'],
objIdent = config.parDict['objIdent'],
longNames = config.parDict['longNames'],
useInterpolator = config.parDict['useInterpolator'],
measureShapes = config.parDict['measureShapes'],
invertMap = invertMap,
DS9RegionsPath = DS9RegionsPath)
# We write area mask here, because it gets modified by findObjects if removing rings
# NOTE: condition added to stop writing tile maps again when running nemoMass in forced photometry mode
maskFileName=config.selFnDir+os.path.sep+"areaMask#%s.fits" % (tileName)
surveyMask=np.array(filteredMapDict['surveyMask'], dtype = int)
if writeAreaMasks == True:
if os.path.exists(maskFileName) == False and os.path.exists(config.selFnDir+os.path.sep+"areaMask.fits") == False:
maps.saveFITS(maskFileName, surveyMask, filteredMapDict['wcs'], compressed = True,
compressionType = 'PLIO_1')
if measureFluxes == True:
photometry.measureFluxes(catalog, filteredMapDict, config.diagnosticsDir,
photFilteredMapDict = photFilteredMapDict,
useInterpolator = config.parDict['useInterpolator'])
else:
# Get S/N only - if the reference (fixed) filter scale has been given
# This is (probably) only used by maps.estimateContaminationFromInvertedMaps
if photFilter is not None:
photometry.getSNRValues(catalog, photFilteredMapDict['SNMap'],
filteredMapDict['wcs'], prefix = 'fixed_',
useInterpolator = config.parDict['useInterpolator'],
invertMap = invertMap)
catalogDict[label]['catalog']=catalog
# Merged/optimal catalogs
optimalCatalog=catalogs.makeOptimalCatalog(catalogDict, constraintsList = config.parDict['catalogCuts'])
return optimalCatalog
#------------------------------------------------------------------------------------------------------------
def makeSelFnCollection(config, mockSurvey):
"""Makes a collection of selection function dictionaries (one per footprint specified in selFnFootprints
in the config file, plus the full survey mask), that contain information on noise levels, area covered,
and completeness.
Returns a dictionary (keys: 'full' - corresponding to whole survey, plus other keys named by footprint).
"""
# Q varies across tiles
Q=signals.QFit(config)
# We only care about the filter used for fixed_ columns
photFilterLabel=config.parDict['photFilter']
for filterDict in config.parDict['mapFilters']:
if filterDict['label'] == photFilterLabel:
break
# We'll only calculate completeness for this given selection
SNRCut=config.parDict['selFnOptions']['fixedSNRCut']
# Handle any missing options for calcCompleteness (these aren't used by the default fast method anyway)
if 'numDraws' not in config.parDict['selFnOptions'].keys():
config.parDict['selFnOptions']['numDraws']=2000000
if 'numIterations' not in config.parDict['selFnOptions'].keys():
config.parDict['selFnOptions']['numIterations']=100
# We can calculate stats in different extra areas (e.g., inside optical survey footprints)
footprintsList=[]
if 'selFnFootprints' in config.parDict.keys():
footprintsList=footprintsList+config.parDict['selFnFootprints']
# Run the selection function calculation on each tile in turn
selFnCollection={'full': []}
for footprintDict in footprintsList:
if footprintDict['label'] not in selFnCollection.keys():
selFnCollection[footprintDict['label']]=[]
for tileName in config.tileNames:
RMSTab=completeness.getRMSTab(tileName, photFilterLabel, config.selFnDir)
compMz=completeness.calcCompleteness(RMSTab, SNRCut, tileName, mockSurvey, config.parDict['massOptions'], Q,
numDraws = config.parDict['selFnOptions']['numDraws'],
numIterations = config.parDict['selFnOptions']['numIterations'],
method = config.parDict['selFnOptions']['method'])
selFnDict={'tileName': tileName,
'RMSTab': RMSTab,
'tileAreaDeg2': RMSTab['areaDeg2'].sum(),
'compMz': compMz}
selFnCollection['full'].append(selFnDict)
# Generate footprint intersection masks (e.g., with HSC) and RMS tables, which are cached
# May as well do this bit here (in parallel) and assemble output later
for footprintDict in footprintsList:
completeness.makeIntersectionMask(tileName, config.selFnDir, footprintDict['label'], masksList = footprintDict['maskList'])
tileAreaDeg2=completeness.getTileTotalAreaDeg2(tileName, config.selFnDir, footprintLabel = footprintDict['label'])
if tileAreaDeg2 > 0:
RMSTab=completeness.getRMSTab(tileName, photFilterLabel, config.selFnDir,
footprintLabel = footprintDict['label'])
compMz=completeness.calcCompleteness(RMSTab, SNRCut, tileName, mockSurvey, config.parDict['massOptions'], Q,
numDraws = config.parDict['selFnOptions']['numDraws'],
numIterations = config.parDict['selFnOptions']['numIterations'],
method = config.parDict['selFnOptions']['method'])
selFnDict={'tileName': tileName,
'RMSTab': RMSTab,
'tileAreaDeg2': RMSTab['areaDeg2'].sum(),
'compMz': compMz}
selFnCollection[footprintDict['label']].append(selFnDict)
# Optional mass-limit maps
if 'massLimitMaps' in list(config.parDict['selFnOptions'].keys()):
for massLimitDict in config.parDict['selFnOptions']['massLimitMaps']:
completeness.makeMassLimitMap(SNRCut, massLimitDict['z'], tileName, photFilterLabel, mockSurvey,
config.parDict['massOptions'], Q, config.diagnosticsDir,
config.selFnDir)
return selFnCollection
#------------------------------------------------------------------------------------------------------------
def makeMockClusterCatalog(config, numMocksToMake = 1, combineMocks = False, writeCatalogs = True,
writeInfo = True, verbose = True):
"""Generate a mock cluster catalog using the given nemo config.
Returns:
List of catalogs (each is an astropy Table object)
"""
# Having changed nemoMock interface, we may need to make output dir
if os.path.exists(config.mocksDir) == False:
os.makedirs(config.mocksDir, exist_ok = True)
# Noise sources in mocks
if 'applyPoissonScatter' in config.parDict.keys():
applyPoissonScatter=config.parDict['applyPoissonScatter']
else:
applyPoissonScatter=True
if 'applyIntrinsicScatter' in config.parDict.keys():
applyIntrinsicScatter=config.parDict['applyIntrinsicScatter']
else:
applyIntrinsicScatter=True
if 'applyNoiseScatter' in config.parDict.keys():
applyNoiseScatter=config.parDict['applyNoiseScatter']
else:
applyNoiseScatter=True
if verbose: print(">>> Mock noise sources (Poisson, intrinsic, measurement noise) = (%s, %s, %s) ..." % (applyPoissonScatter, applyIntrinsicScatter, applyNoiseScatter))
# Q varies across tiles
Q=signals.QFit(config)
# We only care about the filter used for fixed_ columns
photFilterLabel=config.parDict['photFilter']
for filterDict in config.parDict['mapFilters']:
if filterDict['label'] == photFilterLabel:
break
# The same as was used for detecting objects
thresholdSigma=config.parDict['thresholdSigma']
# We need an assumed scaling relation for mock observations
scalingRelationDict=config.parDict['massOptions']
if verbose: print(">>> Setting up mock survey ...")
# NOTE: Sanity check is possible here: area in RMSTab should equal area from areaMask.fits
# If it isn't, there is a problem...
# Also, we're skipping the individual tile-loading routines here for speed
checkAreaConsistency=False
wcsDict={}
RMSMap=pyfits.open(config.selFnDir+os.path.sep+"RMSMap_%s.fits" % (photFilterLabel))
RMSTab=atpy.Table().read(config.selFnDir+os.path.sep+"RMSTab.fits")
count=0
totalAreaDeg2=0
RMSMapDict={}
areaDeg2Dict={}
if checkAreaConsistency == True:
areaMap=pyfits.open(config.selFnDir+os.path.sep+"areaMask.fits")
t0=time.time()
for tileName in config.tileNames:
count=count+1
if tileName == 'PRIMARY':
if tileName in RMSMap:
extName=tileName
data=RMSMap[extName].data
else:
data=None
if data is None:
for extName in RMSMap:
data=RMSMap[extName].data
if data is not None:
break
RMSMapDict[tileName]=RMSMap[extName].data
wcsDict[tileName]=astWCS.WCS(RMSMap[extName].header, mode = 'pyfits')
else:
RMSMapDict[tileName]=RMSMap[tileName].data
wcsDict[tileName]=astWCS.WCS(RMSMap[tileName].header, mode = 'pyfits')
# Area from RMS table
areaDeg2=RMSTab[RMSTab['tileName'] == tileName]['areaDeg2'].sum()
areaDeg2Dict[tileName]=areaDeg2
totalAreaDeg2=totalAreaDeg2+areaDeg2
# Area from map (slower)
if checkAreaConsistency == True:
areaMask, wcsDict[tileName]=completeness.loadAreaMask(tileName, config.selFnDir)
areaMask=areaMap[tileName].data
map_areaDeg2=(areaMask*maps.getPixelAreaArcmin2Map(areaMask.shape, wcsDict[tileName])).sum()/(60**2)
if abs(map_areaDeg2-areaDeg2) > 1e-4:
raise Exception("Area from areaMask.fits doesn't agree with area from RMSTab.fits")
RMSMap.close()
if checkAreaConsistency == True:
areaMap.close()
t1=time.time()
if verbose: print("... took %.3f sec ..." % (t1-t0))
# Useful for testing:
if 'seed' in config.parDict.keys():
seed=config.parDict['seed']
else:
seed=None
if seed is not None:
np.random.seed(seed)
# We're now using one MockSurvey object for the whole survey
massOptions=config.parDict['massOptions']
minMass=5e13
zMin=0.0
zMax=2.0
defCosmo={'H0': 70.0, 'Om0': 0.30, 'Ob0': 0.05, 'sigma8': 0.80, 'ns': 0.95, 'delta': 500, 'rhoType': 'critical'}
for key in defCosmo:
if key not in massOptions.keys():
massOptions[key]=defCosmo[key]
H0=massOptions['H0']
Om0=massOptions['Om0']
Ob0=massOptions['Ob0']
sigma8=massOptions['sigma8']
ns=massOptions['ns']
delta=massOptions['delta']
rhoType=massOptions['rhoType']
mockSurvey=MockSurvey.MockSurvey(minMass, totalAreaDeg2, zMin, zMax, H0, Om0, Ob0, sigma8, ns,
delta = delta, rhoType = rhoType, enableDrawSample = True)
print("... mock survey parameters:")
for key in defCosmo.keys():
print(" %s = %s" % (key, str(massOptions[key])))
for key in ['tenToA0', 'B0', 'Mpivot', 'sigma_int']:
print(" %s = %s" % (key, str(scalingRelationDict[key])))
print(" total area = %.1f square degrees" % (totalAreaDeg2))
print(" random seed = %s" % (str(seed)))
if verbose: print(">>> Making mock catalogs ...")
catList=[]
for i in range(numMocksToMake):
mockTabsList=[]
t0=time.time()
for tileName in config.tileNames:
# It's possible (depending on tiling) that blank tiles were included - so skip
# We may also have some tiles that are almost but not quite blank
if RMSMapDict[tileName].sum() == 0 or areaDeg2Dict[tileName] < 0.5:
continue
mockTab=mockSurvey.drawSample(RMSMapDict[tileName], scalingRelationDict, Q, wcs = wcsDict[tileName],
photFilterLabel = photFilterLabel, tileName = tileName, makeNames = True,
SNRLimit = thresholdSigma, applySNRCut = True,
areaDeg2 = areaDeg2Dict[tileName],
applyPoissonScatter = applyPoissonScatter,
applyIntrinsicScatter = applyIntrinsicScatter,
applyNoiseScatter = applyNoiseScatter)
if mockTab is not None:
mockTabsList.append(mockTab)
tab=atpy.vstack(mockTabsList)
catList.append(tab)
t1=time.time()
if verbose: print("... making mock catalog %d took %.3f sec ..." % (i+1, t1-t0))
# Write catalog and .reg file
if writeCatalogs == True:
#colNames=['name', 'RADeg', 'decDeg', 'template', 'redshift', 'redshiftErr', 'true_M500', 'true_fixed_y_c', 'fixed_SNR', 'fixed_y_c', 'fixed_err_y_c']
#colFmts =['%s', '%.6f', '%.6f', '%s', '%.3f', '%.3f', '%.3f', '%.3f', '%.1f', '%.3f', '%.3f']
mockCatalogFileName=config.mocksDir+os.path.sep+"mockCatalog_%d.csv" % (i+1)
catalogs.writeCatalog(tab, mockCatalogFileName)
catalogs.writeCatalog(tab, mockCatalogFileName.replace(".csv", ".fits"))
addInfo=[{'key': 'fixed_SNR', 'fmt': '%.1f'}]
catalogs.catalog2DS9(tab, mockCatalogFileName.replace(".csv", ".reg"), constraintsList = [],
addInfo = addInfo, color = "cyan")
if combineMocks == True:
tab=None
for i in range(numMocksToMake):
mockCatalogFileName=config.mocksDir+os.path.sep+"mockCatalog_%d.fits" % (i+1)
stackTab=atpy.Table().read(mockCatalogFileName)
if tab == None:
tab=stackTab
else:
tab=atpy.vstack([tab, stackTab])
outFileName=config.mocksDir+os.path.sep+"mockCatalog_combined.fits"
tab.meta['NEMOVER']=nemo.__version__
tab.write(outFileName, overwrite = True)
# Write a small text file with the parameters used to generate the mocks into the mocks dir (easier than using headers)
if writeInfo == True:
mockKeys=['massOptions', 'makeMockCatalogs', 'applyPoissonScatter', 'applyIntrinsicScatter', 'applyNoiseScatter']
with open(config.mocksDir+os.path.sep+"mockParameters.txt", "w") as outFile:
for m in mockKeys:
if m in config.parDict.keys():
outFile.write("%s: %s\n" % (m, config.parDict[m]))
return catList
#------------------------------------------------------------------------------------------------------------
def extractSpec(config, tab, method = 'CAP', diskRadiusArcmin = 4.0, highPassFilter = False,
estimateErrors = True, saveFilteredMaps = False):
"""Returns a table containing the spectral energy distribution, extracted using either compensated
aperture photometry (CAP) at each object location in the input catalog, or using a matched filter.
Maps at different frequencies will first be matched to the lowest resolution beam, using a Gaussian
kernel.
For the CAP method, at each object location, the temperature fluctuation is measured within a disk of
radius diskRadiusArcmin, after subtracting the background measured in an annulus between
diskRadiusArcmin < r < sqrt(2) * diskRadiusArcmin (i.e., this should be similar to the method
described in Schaan et al. 2020).
For the matched filter method, the catalog must contain a `template` column, as produced by the main
`nemo` script, with template names in the format Arnaud_M2e14_z0p4 (for example). This will be used to
set the signal scale used for each object. All definitions of filters in the config will be ignored,
in favour of a filter using a simple CMB + white noise model. Identical filters will be used for all
maps (i.e., the method of Saro et al. 2014).
Args:
config (:obj:`startup.NemoConfig`): Nemo configuration object.
tab (:obj:`astropy.table.Table`): Catalog containing input object positions. Must contain columns
'name', 'RADeg', 'decDeg'.
method (str, optional):
diskRadiusArcmin (float, optional): If using CAP method: disk aperture radius in arcmin, within
which the signal is measured. The background will be estimated in an annulus between
diskRadiusArcmin < r < sqrt(2) * diskRadiusArcmin.
highPassFilter (bool, optional): If using CAP method: if set, subtract the large scale
background using maps.subtractBackground, with the smoothing scale set to
2 * sqrt(2) * diskRadiusArcmin.
estimateErrors (bool, optional): If used CAP method: if set, estimate uncertainties by placing
random apertures throughout the map. For now, this is done on a tile-by-tile basis, and
doesn't take into account inhomogeneous noise within a tile.
saveFilteredMaps (bool, optional): If using matchedFilter method: save the filtered maps under
the `nemoSpecCache` directory (which is created in the current working directory, if it
doesn't already exist).
Returns:
Catalog containing spectral energy distribution measurements for each object.
For the CAP method, units of extracted signals are uK arcmin^2.
For the matchedFilter method, extracted signals are deltaT CMB amplitude in uK.
"""
diagnosticsDir=config.diagnosticsDir
# Choose lowest resolution as the reference beam - we match to that
refBeam=None
refFWHMArcmin=0
refIndex=0
beams=[]
for i in range(len(config.unfilteredMapsDictList)):
mapDict=config.unfilteredMapsDictList[i]
beam=signals.BeamProfile(mapDict['beamFileName'])
if beam.FWHMArcmin > refFWHMArcmin:
refBeam=beam
refFWHMArcmin=beam.FWHMArcmin
refIndex=i
beams.append(beam)
# Sort the list of beams and maps so that the one with the reference beam is in index 0
config.unfilteredMapsDictList.insert(0, config.unfilteredMapsDictList.pop(refIndex))
beams.insert(0, beams.pop(refIndex))
# Figure out how much we need to Gaussian blur to match the reference beam
# NOTE: This was an alternative to proper PSF-matching that wasn't good enough for ACT beams
#for i in range(1, len(config.unfilteredMapsDictList)):
#mapDict=config.unfilteredMapsDictList[i]
#beam=beams[i]
#degPerPix=np.mean(np.diff(beam.rDeg))
#assert(abs(np.diff(beam.rDeg).max()-degPerPix) < 0.001)
#resMin=1e6
#smoothPix=0
#attFactor=1.0
#for j in range(1, 100):
#smoothProf=ndimage.gaussian_filter1d(beam.profile1d, j)
#smoothProf=smoothProf/smoothProf.max()
#res=np.sum(np.power(refBeam.profile1d-smoothProf, 2))
#if res < resMin:
#resMin=res
#smoothPix=j
#attFactor=1/smoothProf.max()
#smoothScaleDeg=smoothPix*degPerPix
#mapDict['smoothScaleDeg']=smoothScaleDeg
#mapDict['smoothAttenuationFactor']=1/ndimage.gaussian_filter1d(beam.profile1d, smoothPix).max()
# For testing on CMB maps here
refMapDict=config.unfilteredMapsDictList[0]
# PSF matching via a convolution kernel
kernelDict={} # keys: tile, obsFreqGHz
for tileName in config.tileNames:
if tileName not in kernelDict.keys():
kernelDict[tileName]={}
for i in range(1, len(config.unfilteredMapsDictList)):
mapDict=config.unfilteredMapsDictList[i]
beam=beams[i]
degPerPix=np.mean(np.diff(beam.rDeg))
assert(abs(np.diff(beam.rDeg).max()-degPerPix) < 0.001)
# Calculate convolution kernel
sizePix=beam.profile1d.shape[0]*2
if sizePix % 2 == 0:
sizePix=sizePix+1
symRDeg=np.linspace(-0.5, 0.5, sizePix)
assert((symRDeg == 0).sum())
symProf=interpolate.splev(abs(symRDeg), beam.tck)
symRefProf=interpolate.splev(abs(symRDeg), refBeam.tck)
fSymRef=np.fft.fft(np.fft.fftshift(symRefProf))
fSymBeam=np.fft.fft(np.fft.fftshift(symProf))
fSymConv=fSymRef/fSymBeam
fSymConv[fSymBeam < 1e-1]=0 # Was 1e-2; this value avoids ringing, smaller values do not
symMatched=np.fft.ifft(fSymBeam*fSymConv).real
symConv=np.fft.ifft(fSymConv).real
# This allows normalization in same way as Gaussian smooth method
symConv=symConv/symConv.sum()
convedProf=ndimage.convolve(symProf, np.fft.fftshift(symConv))
attenuationFactor=1/convedProf.max() # norm
# Make profile object
peakIndex=np.argmax(np.fft.fftshift(symConv))
convKernel=signals.BeamProfile(profile1d = np.fft.fftshift(symConv)[peakIndex:], rDeg = symRDeg[peakIndex:])
## Check plots
#import pylab as plt
#plt.figure(figsize=(10,8))
#plt.plot(abs(symRDeg*60), symRefProf, label = 'ref', lw = 3)
#plt.plot(abs(symRDeg*60), convedProf*attenuationFactor, label = 'kernel convolved')
#integralRatio=np.trapz(symRefProf)/np.trapz(convedProf*attenuationFactor)
#plt.title("%.3f" % (integralRatio))
#plt.semilogy()
#plt.legend()
#ratio=(convedProf*attenuationFactor)/symRefProf
#plt.figure(figsize=(10,8))
#plt.plot(abs(symRDeg*60), ratio, label = 'ratio')
#plt.plot(abs(symRDeg*60), [1.0]*len(symRDeg), 'r-')
#plt.legend()
# Fudging 2d kernel to match (fix properly later)
# NOTE: Now done at higher res but doesn't make much difference
# (but DOES blow up in some tiles if you use e.g. have the resolution)
wcs=astWCS.WCS(config.tileCoordsDict[tileName]['header'], mode = 'pyfits').copy()
wcs.header['CDELT1']=np.diff(refBeam.rDeg)[0]
wcs.header['CDELT2']=np.diff(refBeam.rDeg)[0]
wcs.header['NAXIS1']=int(np.ceil(2*refBeam.rDeg.max()/wcs.header['CDELT1']))
wcs.header['NAXIS2']=int(np.ceil(2*refBeam.rDeg.max()/wcs.header['CDELT2']))
wcs.updateFromHeader()
shape=(wcs.header['NAXIS2'], wcs.header['NAXIS1'])
degreesMap=np.ones([shape[0], shape[1]], dtype = float)*1e6
RADeg, decDeg=wcs.pix2wcs(int(degreesMap.shape[1]/2), int(degreesMap.shape[0]/2))
degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs, RADeg, decDeg, 1.0)
beamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, beam, amplitude = None)
refBeamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, refBeam, amplitude = None)
matchedBeamMap=maps.convolveMapWithBeam(beamMap*attenuationFactor, wcs, convKernel, maxDistDegrees = 1.0)
# Find and apply radial fudge factor
yRow=np.where(refBeamMap == refBeamMap.max())[0][0]
rowValid=np.logical_and(degreesMap[yRow] < refBeam.rDeg.max(), matchedBeamMap[yRow] != 0)
ratio=refBeamMap[yRow][rowValid]/matchedBeamMap[yRow][rowValid]
zeroIndex=np.argmin(degreesMap[yRow][rowValid])
assert(degreesMap[yRow][rowValid][zeroIndex] == 0)
tck=interpolate.splrep(degreesMap[yRow][rowValid][zeroIndex:], ratio[zeroIndex:])
fudge=interpolate.splev(convKernel.rDeg, tck)
#fudge[fudge < 0.5]=1.0
#fudge[fudge > 1.5]=1.0
fudgeKernel=signals.BeamProfile(profile1d = convKernel.profile1d*fudge, rDeg = convKernel.rDeg)
## Check plot
#import pylab as plt
#plt.figure(figsize=(10,8))
#plt.plot(convKernel.rDeg, fudge, lw = 3, label = 'fudge')
#plt.plot(convKernel.rDeg, [1.0]*len(fudge), 'r-')
#plt.title("fudge")
##plt.ylim(0, 2)
#plt.legend()
#plt.show()
# 2nd fudge factor - match integrals of 2d kernels
fudgeMatchedBeamMap=maps.convolveMapWithBeam(beamMap*attenuationFactor, wcs, fudgeKernel, maxDistDegrees = 1.0)
attenuationFactor=refBeamMap.sum()/fudgeMatchedBeamMap.sum()
# Check at map pixelization that is actually used
#shape=(config.tileCoordsDict[tileName]['header']['NAXIS2'],
#config.tileCoordsDict[tileName]['header']['NAXIS1'])
#wcs=astWCS.WCS(config.tileCoordsDict[tileName]['header'], mode = 'pyfits').copy()
#degreesMap=np.ones([shape[0], shape[1]], dtype = float)*1e6
#RADeg, decDeg=wcs.pix2wcs(int(degreesMap.shape[1]/2), int(degreesMap.shape[0]/2))
#degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs, RADeg, decDeg, 1.0)
#beamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, beam, amplitude = None)
#refBeamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, refBeam, amplitude = None)
#fudgeMatchedBeamMap=maps.convolveMapWithBeam(beamMap*attenuationFactor, wcs, fudgeKernel, maxDistDegrees = 1.0)
## Check plot
#import pylab as plt
#yRow=np.where(refBeamMap == refBeamMap.max())[0][0]
#rowValid=np.logical_and(degreesMap[yRow] < refBeam.rDeg.max(), fudgeMatchedBeamMap[yRow] != 0)
#plt.figure(figsize=(10,8))
#plt.plot(degreesMap[yRow][rowValid]*60, refBeamMap[yRow][rowValid], lw = 3, label = 'ref')
#plt.plot(degreesMap[yRow][rowValid]*60, fudgeMatchedBeamMap[yRow][rowValid], label = 'fudged')
#integralRatio=np.trapz(fudgeMatchedBeamMap[yRow][rowValid])/np.trapz(refBeamMap[yRow][rowValid])
#plt.title("native map res - %.3f" % (integralRatio))
#plt.semilogy()
#plt.ylim(1e-5)
#plt.legend()
#plt.show()
#from astLib import astImages
#astImages.saveFITS("ref.fits", refBeamMap, wcs)
#astImages.saveFITS("fudgematched.fits", fudgeMatchedBeamMap, wcs)
#astImages.saveFITS("diff.fits", refBeamMap-fudgeMatchedBeamMap, wcs)
#import IPython
#IPython.embed()
#sys.exit()
# NOTE: If we're NOT passing in 2d kernels, don't need to organise by tile
kernelDict[tileName][mapDict['obsFreqGHz']]={'smoothKernel': fudgeKernel,
'smoothAttenuationFactor': attenuationFactor}
if method == 'CAP':
catalog=_extractSpecCAP(config, tab, kernelDict, diskRadiusArcmin = 4.0, highPassFilter = False,
estimateErrors = True)
elif method == 'matchedFilter':
catalog=_extractSpecMatchedFilter(config, tab, kernelDict, saveFilteredMaps = saveFilteredMaps)
else:
raise Exception("'method' should be 'CAP' or 'matchedFilter'")
return catalog
#------------------------------------------------------------------------------------------------------------
def _extractSpecMatchedFilter(config, tab, kernelDict, saveFilteredMaps = False, noiseMethod = 'dataMap'):
"""See extractSpec.
"""
cacheDir="nemoSpecCache"+os.path.sep+os.path.basename(config.rootOutDir)
os.makedirs(cacheDir, exist_ok = True)
# Build filter configs
allFilters={'class': 'ArnaudModelMatchedFilter',
'params': {'noiseParams': {'method': noiseMethod, 'noiseGridArcmin': 40.0},
'saveFilteredMaps': False,
'saveRMSMap': False,
'savePlots': False,
'saveDS9Regions': False,
'saveFilter': False,
'outputUnits': 'yc',
'edgeTrimArcmin': 0.0,
'GNFWParams': 'default'}}
filtersList=[]
templatesUsed=np.unique(tab['template']).tolist()
for t in templatesUsed:
newDict=copy.deepcopy(allFilters)
M500MSun=float(t.split("_M")[-1].split("_")[0])
z=float(t.split("_z")[-1].replace("p", "."))
newDict['params']['M500MSun']=M500MSun
newDict['params']['z']=z
newDict['label']=t
filtersList.append(newDict)
# Filter and extract
# NOTE: We assume index 0 of the unfiltered maps list is the reference for which the filter is made
catalogList=[]
for tileName in config.tileNames:
print("... rank %d: tileName = %s ..." % (config.rank, tileName))
diagnosticsDir=cacheDir+os.path.sep+tileName
os.makedirs(diagnosticsDir, exist_ok = True)
for f in filtersList:
tempTileTab=None # catalogs are organised by tile and template
filterObj=None
for mapDict in config.unfilteredMapsDictList:
if tempTileTab is None:
shape=(config.tileCoordsDict[tileName]['header']['NAXIS2'],
config.tileCoordsDict[tileName]['header']['NAXIS1'])
wcs=astWCS.WCS(config.tileCoordsDict[tileName]['header'], mode = 'pyfits')
tempTileTab=catalogs.getCatalogWithinImage(tab, shape, wcs)
tempTileTab=tempTileTab[tempTileTab['template'] == f['label']]
if tempTileTab is None or len(tempTileTab) == 0:
continue
if mapDict['obsFreqGHz'] == config.unfilteredMapsDictList[0]['obsFreqGHz']:
filteredMapDict, filterObj=filters.filterMaps([mapDict], f, tileName,
filteredMapsDir = cacheDir,
diagnosticsDir = diagnosticsDir,
selFnDir = cacheDir,
verbose = True,
undoPixelWindow = True,
returnFilter = True)
else:
mapDict['smoothKernel']=kernelDict[tileName][mapDict['obsFreqGHz']]['smoothKernel']
mapDict['smoothAttenuationFactor']=kernelDict[tileName][mapDict['obsFreqGHz']]['smoothAttenuationFactor']
mapDictToFilter=maps.preprocessMapDict(mapDict.copy(), tileName = tileName)
filteredMapDict['data']=filterObj.applyFilter(mapDictToFilter['data'])
RMSMap=filterObj.makeNoiseMap(filteredMapDict['data'])
filteredMapDict['SNMap']=np.zeros(filterObj.shape)
mask=np.greater(filteredMapDict['surveyMask'], 0)
filteredMapDict['SNMap'][mask]=filteredMapDict['data'][mask]/RMSMap[mask]
filteredMapDict['data']=enmap.apply_window(filteredMapDict['data'], pow=-1.0)
if saveFilteredMaps == True:
outFileName=cacheDir+os.path.sep+'%d_' % (mapDict['obsFreqGHz'])+f['label']+'#'+tileName+'.fits'
# Add conversion to delta T in here?
maps.saveFITS(outFileName, filteredMapDict['data'], filteredMapDict['wcs'])
freqTileTab=photometry.makeForcedPhotometryCatalog(filteredMapDict,
tempTileTab,
useInterpolator = config.parDict['useInterpolator'])
photometry.measureFluxes(freqTileTab, filteredMapDict, cacheDir,
useInterpolator = config.parDict['useInterpolator'],
ycObsFreqGHz = mapDict['obsFreqGHz'])
# We don't take tileName from the catalog, some objects in overlap areas may only get cut here
if len(freqTileTab) == 0:
tempTileTab=None
continue
tempTileTab, freqTileTab, rDeg=catalogs.crossMatch(tempTileTab, freqTileTab, radiusArcmin = 2.5)
colNames=['deltaT_c', 'y_c', 'SNR']
suff='_%d' % (mapDict['obsFreqGHz'])
for colName in colNames:
tempTileTab[colName+suff]=freqTileTab[colName]
if 'err_'+colName in freqTileTab.keys():
tempTileTab['err_'+colName+suff]=freqTileTab['err_'+colName]
if tempTileTab is not None and len(tempTileTab) > 0:
catalogList.append(tempTileTab)
if len(catalogList) > 0:
catalog=atpy.vstack(catalogList)
else:
catalog=[]
return catalog
#------------------------------------------------------------------------------------------------------------
def _extractSpecCAP(config, tab, kernelDict, method = 'CAP', diskRadiusArcmin = 4.0, highPassFilter = False,
estimateErrors = True):
"""See extractSpec.
"""
# Define apertures like Schaan et al. style compensated aperture photometry filter
innerRadiusArcmin=diskRadiusArcmin
outerRadiusArcmin=diskRadiusArcmin*np.sqrt(2)
catalogList=[]
for tileName in config.tileNames:
# This loads the maps, applies any masks, and smooths to approx. same scale
mapDictList=[]
freqLabels=[]
for mapDict in config.unfilteredMapsDictList:
mapDict=maps.preprocessMapDict(mapDict.copy(), tileName = tileName)
if highPassFilter == True:
mapDict['data']=maps.subtractBackground(mapDict['data'], mapDict['wcs'],
smoothScaleDeg = (2*outerRadiusArcmin)/60)
freqLabels.append(int(round(mapDict['obsFreqGHz'])))
mapDictList.append(mapDict)
wcs=mapDict['wcs']
shape=mapDict['data'].shape
# Extract spectra
pixAreaMap=maps.getPixelAreaArcmin2Map(shape, wcs)
maxSizeDeg=(outerRadiusArcmin*1.2)/60
tileTab=catalogs.getCatalogWithinImage(tab, shape, wcs)
for label in freqLabels:
tileTab['diskT_uKArcmin2_%s' % (label)]=np.zeros(len(tileTab))
tileTab['err_diskT_uKArcmin2_%s' % (label)]=np.zeros(len(tileTab))
tileTab['diskSNR_%s' % (label)]=np.zeros(len(tileTab))
for row in tileTab:
degreesMap=np.ones(shape, dtype = float)*1e6 # NOTE: never move this
degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs,
row['RADeg'], row['decDeg'],
maxSizeDeg)
innerMask=degreesMap < innerRadiusArcmin/60
outerMask=np.logical_and(degreesMap >= innerRadiusArcmin/60, degreesMap < outerRadiusArcmin/60)
for mapDict, label in zip(mapDictList, freqLabels):
d=mapDict['data']
diskFlux=(d[innerMask]*pixAreaMap[innerMask]).sum()-(d[outerMask]*pixAreaMap[outerMask]).sum()
row['diskT_uKArcmin2_%s' % (label)]=diskFlux
# Estimate noise in every measurement (on average) from spatting down on random positions
# This will break if noise is inhomogeneous though. But at least it's done separately for each tile.
# We can later add something that scales / fits using the weight map?
if estimateErrors == True:
randTab=catalogs.generateRandomSourcesCatalog(mapDict['surveyMask'], wcs, 1000)
for label in freqLabels:
randTab['diskT_uKArcmin2_%s' % (label)]=np.zeros(len(randTab))
for row in randTab:
degreesMap=np.ones(shape, dtype = float)*1e6 # NOTE: never move this
degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs,
row['RADeg'], row['decDeg'],
maxSizeDeg)
innerMask=degreesMap < innerRadiusArcmin/60
outerMask=np.logical_and(degreesMap >= innerRadiusArcmin/60, degreesMap < outerRadiusArcmin/60)
for mapDict, label in zip(mapDictList, freqLabels):
d=mapDict['data']
diskFlux=(d[innerMask]*pixAreaMap[innerMask]).sum()-(d[outerMask]*pixAreaMap[outerMask]).sum()
row['diskT_uKArcmin2_%s' % (label)]=diskFlux
noiseLevels={}
for label in freqLabels:
if signals.fSZ(float(label)) < 0:
SNRSign=-1
else:
SNRSign=1
noiseLevels[label]=np.percentile(abs(randTab['diskT_uKArcmin2_%s' % (label)]), 68.3)
tileTab['err_diskT_uKArcmin2_%s' % (label)]=noiseLevels[label]
tileTab['diskSNR_%s' % (label)]=SNRSign*(tileTab['diskT_uKArcmin2_%s' % (label)]/noiseLevels[label])
catalogList.append(tileTab)
catalog=atpy.vstack(catalogList)
return catalog
| 54.941788 | 220 | 0.592235 |
828f0e49b2ff5b08550d07840cd6144c5f6a6f99 | 5,026 | py | Python | pypkg-gen.py | GameMaker2k/Neo-Hockey-Test | 5737bfedf0d83f69964e85ac1dbf7e6a93c13f44 | [
"BSD-3-Clause"
] | 1 | 2020-04-04T10:25:42.000Z | 2020-04-04T10:25:42.000Z | pypkg-gen.py | GameMaker2k/Neo-Hockey-Test | 5737bfedf0d83f69964e85ac1dbf7e6a93c13f44 | [
"BSD-3-Clause"
] | null | null | null | pypkg-gen.py | GameMaker2k/Neo-Hockey-Test | 5737bfedf0d83f69964e85ac1dbf7e6a93c13f44 | [
"BSD-3-Clause"
] | 3 | 2021-09-07T08:44:33.000Z | 2021-12-07T23:49:39.000Z | #!/usr/bin/env python
'''
This program is free software; you can redistribute it and/or modify
it under the terms of the Revised BSD License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Revised BSD License for more details.
Copyright 2011-2016 Game Maker 2k - https://github.com/GameMaker2k
Copyright 2011-2016 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
$FileInfo: pypkg-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $
'''
from __future__ import absolute_import, division, print_function, unicode_literals;
import re, os, sys, time, platform, datetime, argparse, subprocess;
__version_info__ = (0, 2, 0, "rc1");
if(__version_info__[3]!=None):
__version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2])+"+"+str(__version_info__[3]);
if(__version_info__[3]==None):
__version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]);
proname = "pypkg-gen";
prover = __version__;
profullname = proname+" "+prover;
linuxdist = [None];
try:
linuxdist = platform.linux_distribution();
except AttributeError:
linuxdist = [None];
getlinuxdist = linuxdist;
setdistroname = "debian";
setdistrocname = "jessie";
if(getlinuxdist[0] is not None and (getlinuxdist[0].lower()=="debian" or getlinuxdist[0].lower()=="ubuntu" or getlinuxdist[0].lower()=="linuxmint")):
setdistroname = getlinuxdist[0].lower();
setdistrocname = getlinuxdist[2].lower();
if(setdistrocname==""):
lsblocatout = which_exec("lsb_release");
pylsblistp = subprocess.Popen([lsblocatout, "-c"], stdout=subprocess.PIPE, stderr=subprocess.PIPE);
pylsbout, pylsberr = pylsblistp.communicate();
if(sys.version[0]=="3"):
pylsbout = pylsbout.decode("utf-8");
pylsb_esc = re.escape("Codename:")+'([a-zA-Z\t+\s+]+)';
pylsbname = re.findall(pylsb_esc, pylsbout)[0].lower();
setdistrocname = pylsbname.strip();
if(getlinuxdist[0] is not None and getlinuxdist[0].lower()=="archlinux"):
setdistroname = getlinuxdist[0].lower();
setdistrocname = None;
parser = argparse.ArgumentParser(conflict_handler = "resolve", add_help = True);
parser.add_argument("-v", "--version", action = "version", version = profullname);
parser.add_argument("-s", "--source", default = os.path.realpath(os.getcwd()), help = "source dir");
parser.add_argument("-d", "--distro", default = setdistroname, help = "enter linux distribution name");
parser.add_argument("-c", "--codename", default = setdistrocname, help = "enter release code name");
parser.add_argument("-p", "--pyver", default = sys.version[0], help = "enter version of python to use");
getargs = parser.parse_args();
bashlocatout = which_exec("bash");
getargs.source = os.path.realpath(getargs.source);
getargs.codename = getargs.codename.lower();
getargs.distro = getargs.distro.lower();
if(getargs.pyver=="2"):
getpyver = "python2";
if(getargs.pyver=="3"):
getpyver = "python3";
if(getargs.pyver!="2" and getargs.pyver!="3"):
if(sys.version[0]=="2"):
getpyver = "python2";
if(sys.version[0]=="3"):
getpyver = "python3";
get_pkgbuild_dir = os.path.realpath(getargs.source+os.path.sep+"pkgbuild");
get_pkgbuild_dist_pre_list = [d for d in os.listdir(get_pkgbuild_dir) if os.path.isdir(os.path.join(get_pkgbuild_dir, d))];
get_pkgbuild_dist_list = [];
for dists in get_pkgbuild_dist_pre_list:
tmp_pkgbuild_python = os.path.realpath(get_pkgbuild_dir+os.path.sep+dists+os.path.sep+getpyver);
if(os.path.exists(tmp_pkgbuild_python) and os.path.isdir(tmp_pkgbuild_python)):
get_pkgbuild_dist_list.append(dists);
if(not getargs.distro in get_pkgbuild_dist_list):
print("Could not build for "+getargs.distro+" distro.");
sys.exit();
if(getargs.distro=="debian" or getargs.distro=="ubuntu" or getargs.distro=="linuxmint"):
pypkgpath = os.path.realpath(getargs.source+os.path.sep+"pkgbuild"+os.path.sep+getargs.distro+os.path.sep+getpyver+os.path.sep+"pydeb-gen.sh");
pypkgenlistp = subprocess.Popen([bashlocatout, pypkgpath, getargs.source, getargs.codename], stdout=subprocess.PIPE, stderr=subprocess.PIPE);
pypkgenout, pypkgenerr = pypkgenlistp.communicate();
if(sys.version[0]=="3"):
pypkgenout = pypkgenout.decode("utf-8");
print(pypkgenout);
pypkgenlistp.wait();
if(getargs.distro=="archlinux"):
pypkgpath = os.path.realpath(getargs.source+os.path.sep+"pkgbuild"+os.path.sep+getargs.distro+os.path.sep+getpyver+os.path.sep+"pypac-gen.sh");
pypkgenlistp = subprocess.Popen([bashlocatout, pypkgpath, getargs.source, getargs.codename], stdout=subprocess.PIPE, stderr=subprocess.PIPE);
pypkgenout, pypkgenerr = pypkgenlistp.communicate();
if(sys.version[0]=="3"):
pypkgenout = pypkgenout.decode("utf-8");
print(pypkgenout);
pypkgenlistp.wait();
| 45.279279 | 149 | 0.729208 |
829007d1ff44f42bdcbdcc5f79b823572db44839 | 194 | py | Python | 10/testtime.py | M0nica/python-foundations | fe5065d3af71511bdd0fcf437d1d9f15f9faf1ee | [
"MIT"
] | null | null | null | 10/testtime.py | M0nica/python-foundations | fe5065d3af71511bdd0fcf437d1d9f15f9faf1ee | [
"MIT"
] | null | null | null | 10/testtime.py | M0nica/python-foundations | fe5065d3af71511bdd0fcf437d1d9f15f9faf1ee | [
"MIT"
] | null | null | null | import time
print (time.strftime("%B %e, %Y"))
# Guides:
# how to formate date:
# http://strftime.net/
# how to use time:
# http://www.cyberciti.biz/faq/howto-get-current-date-time-in-python/
| 19.4 | 69 | 0.680412 |
8292fb356f36b5d5f890f807991392f40a46cdec | 514 | py | Python | 2020/02/Teil 2 - V01.py | HeWeMel/adventofcode | 90acb10f03f21ef388673bbcf132d04972175970 | [
"MIT"
] | 1 | 2020-12-12T19:34:59.000Z | 2020-12-12T19:34:59.000Z | 2020/02/Teil 2 - V01.py | HeWeMel/adventofcode | 90acb10f03f21ef388673bbcf132d04972175970 | [
"MIT"
] | null | null | null | 2020/02/Teil 2 - V01.py | HeWeMel/adventofcode | 90acb10f03f21ef388673bbcf132d04972175970 | [
"MIT"
] | null | null | null | import re
with open('input.txt', 'r') as f:
pw_ok=0
for line in f:
(rule,s,space_and_pw) = line.partition(':')
(lowhigh,s,c) = rule.partition(' ')
(low,s,high) = lowhigh.partition('-')
pw=space_and_pw[1:-1]
c1=pw[int(low)-1]
c2=pw[int(high)-1]
if (c1==c and c2!=c) or (c1!=c and c2==c):
print(low, high, c, pw, c1, c2, 'ok')
pw_ok+=1
else:
print(low, high, c, pw, c1, c2, 'falsch')
print (pw_ok)
#737 | 27.052632 | 53 | 0.486381 |
82959d9cf1c7742a4ce7e67d8116a609f7ef7317 | 8,399 | py | Python | slides_manager/openslide_engine.py | crs4/ome_seadragon | e2a7a2178c4abdff1b0a98bc194c672b2476e9a2 | [
"MIT"
] | 31 | 2016-02-16T15:11:25.000Z | 2021-06-21T15:58:58.000Z | slides_manager/openslide_engine.py | crs4/ome_seadragon | e2a7a2178c4abdff1b0a98bc194c672b2476e9a2 | [
"MIT"
] | 11 | 2017-06-23T17:23:47.000Z | 2022-03-31T14:19:27.000Z | slides_manager/openslide_engine.py | crs4/ome_seadragon | e2a7a2178c4abdff1b0a98bc194c672b2476e9a2 | [
"MIT"
] | 4 | 2016-12-15T22:08:04.000Z | 2019-10-24T23:12:53.000Z | # Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import openslide
from openslide import OpenSlide
from openslide.deepzoom import DeepZoomGenerator
from io import BytesIO
from PIL import Image
from .rendering_engine_interface import RenderingEngineInterface
from .. import settings
from ome_seadragon_cache import CacheDriverFactory
| 46.921788 | 120 | 0.654245 |
8296d1c9102045de4d1df9fbc075b8f844636279 | 4,772 | py | Python | pyppy/config.py | maehster/pyppy | 10aadd7ace210cb32c51cdd64060a3337d89324b | [
"MIT"
] | 5 | 2021-01-25T09:52:09.000Z | 2022-01-29T14:35:41.000Z | pyppy/config.py | maehster/pyppy | 10aadd7ace210cb32c51cdd64060a3337d89324b | [
"MIT"
] | 7 | 2021-01-23T10:49:01.000Z | 2021-01-30T08:17:38.000Z | pyppy/config.py | maehster/pyppy | 10aadd7ace210cb32c51cdd64060a3337d89324b | [
"MIT"
] | 1 | 2021-05-25T05:42:10.000Z | 2021-05-25T05:42:10.000Z | """Global config management
This module provides functions for initializing, accessing and destroying
a global config object. You can initialize a global config from any object.
However, in the context of pyppy, only the instance attributes of the
object are used and work with the decorators ``fill_args`` and ``condition``.
But you can use any object you like. The config management methods are
just a convenience reference to the original object.
Initialization
--------------
In this example, we initialize a global config from a ``NameSpace`` parsed
with a custom ``ArgumentParser``. For demonstration purposes, the parser
will not parse args from the commandline but from a list::
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("--message")
# parse_args returns an argparse.Namespace
args = parser.parse_args(["--message", "hello!"])
To initialize a global config object, import the function ``initialize_config``
and pass the args variable::
from pyppy.config import initialize_config
initialize_config(args)
You can also create an empty global object (which just holds a reference
to an empty ``object``) and change it afterwards via
accessing the global config object (see Config access section)::
from pyppy.config import initialize_config
initialize_config(args)
Access
------
Now that you have initialized the global config, you can use it
throughout your code::
from pyppy.config import config
print(config().message)
# "hello!"
Note
----
The original object that you used to initialize the global config
is returned any time you call ``config()``, so you can do everything
with the object that you could also do before.
Modification
------------
It is possible to change the global config object during time, e.g. to pass
information between objects in your code. We know that the term 'config'
is not ideal for these use cases and we're working on functionality to
handle these use cases in a better way. Here's an example of config
modification::
config().message = "bye!"
print(config().message)
Reset
-----
There can be only one global config object. So whenever you have
initialized a config you cannot initialize a new one. If you try to
an exception is raised. In the rare cases you might want to have
a new global config you can explicitly destroy the current one and
initialize a new one::
from pyppy.config import destroy_config
destroy_config()
initialize_config(args2)
"""
from types import SimpleNamespace
from pyppy.exc import ConfigAlreadyInitializedException
_CONFIG = "pyppy-config"
def initialize_config(obj: object = SimpleNamespace()) -> None:
"""
Initialize a global config with the specified object or
with an empty ``object`` if no object is given.
Parameters
----------
obj : object
Object to initialize the global config with. Whenever
you will call ``pyppy.config.config()`` you will get a r
reference to this object.
Returns
-------
None
Examples
--------
>>> destroy_config()
>>> c = SimpleNamespace()
>>> c.option = "say_hello"
>>> initialize_config(c)
>>> config().option
'say_hello'
>>> destroy_config()
"""
if hasattr(config, _CONFIG):
raise ConfigAlreadyInitializedException(
(
"Config has already been initialized. "
"If you want to initialize a new config call "
f"{destroy_config.__name__}()."
)
)
config(obj)
def config(_obj: object = None) -> object:
"""
Accesses a previously initialized global config.
Returns
-------
object:
The object that was used to initialize the global
config.
Examples
--------
>>> destroy_config()
>>> c = SimpleNamespace()
>>> c.option = "say_hello"
>>> initialize_config(c)
>>> config().option
'say_hello'
>>> destroy_config()
"""
if not hasattr(config, _CONFIG) and _obj:
setattr(config, _CONFIG, _obj)
if not hasattr(config, _CONFIG):
raise Exception("Please initialize config first!")
return getattr(config, _CONFIG)
def destroy_config() -> None:
"""
Deletes the global reference to the object that the config
was initialized with.
Examples
--------
>>> destroy_config()
>>> c = SimpleNamespace()
>>> c.option = "say_hello"
>>> initialize_config(c)
>>> config().option
'say_hello'
>>> destroy_config()
>>> config().option
Traceback (most recent call last):
...
Exception: Please initialize config first!
"""
if hasattr(config, _CONFIG):
delattr(config, _CONFIG)
| 28.404762 | 79 | 0.676027 |
82974de24f0cc3cfa731bcef6d90cc11159650a2 | 878 | py | Python | LeetCode/3_sum.py | milkrong/Basic-Python-DS-Algs | e3accd22d8cf25546f33883aac634a9bfe108b34 | [
"MIT"
] | null | null | null | LeetCode/3_sum.py | milkrong/Basic-Python-DS-Algs | e3accd22d8cf25546f33883aac634a9bfe108b34 | [
"MIT"
] | null | null | null | LeetCode/3_sum.py | milkrong/Basic-Python-DS-Algs | e3accd22d8cf25546f33883aac634a9bfe108b34 | [
"MIT"
] | null | null | null | def three_sum(nums):
"""
Given an array nums of n integers, are there elements a, b, c in nums such that a + b + c = 0?
Find all unique triplets in the array which gives the sum of zero.
:param nums: list[int]
:return: list[list[int]]
"""
if len(nums) < 3:
return []
nums.sort()
res = []
for i in range(len(nums) - 2):
if i > 0 and nums[i - 1] == nums[i]: continue
l, r = i + 1, len(nums) - 1
while l < r:
s = nums[i] + nums[l] + nums[r]
if s == 0:
res.append([nums[i], nums[l], nums[r]])
l += 1;
r -= 1
while l < r and nums[l] == nums[l - 1]: l += 1
while l < r and nums[r] == nums[r + 1]: r -= 1
elif s < 0:
l += 1
else:
r -= 1
return res | 30.275862 | 98 | 0.425968 |
8297797069048f1e64c87757d3ccf7043bd8704b | 3,690 | py | Python | src/tests/dao_test/guild_roles_dao_test.py | Veloxization/likahbot | 24e22711f514fc0878cf6fb9e516ad44425ea6a7 | [
"MIT"
] | null | null | null | src/tests/dao_test/guild_roles_dao_test.py | Veloxization/likahbot | 24e22711f514fc0878cf6fb9e516ad44425ea6a7 | [
"MIT"
] | null | null | null | src/tests/dao_test/guild_roles_dao_test.py | Veloxization/likahbot | 24e22711f514fc0878cf6fb9e516ad44425ea6a7 | [
"MIT"
] | null | null | null | import unittest
import os
from dao.guild_roles_dao import GuildRolesDAO
from dao.guild_role_categories_dao import GuildRoleCategoriesDAO
| 52.714286 | 103 | 0.746612 |
82995e877d2337617c9148dbf6692f9969d5a1fd | 1,115 | py | Python | qcic.py | milkllc/qcic | dfa8eae928689e3cb114587f62947b7d8397fdef | [
"MIT"
] | null | null | null | qcic.py | milkllc/qcic | dfa8eae928689e3cb114587f62947b7d8397fdef | [
"MIT"
] | null | null | null | qcic.py | milkllc/qcic | dfa8eae928689e3cb114587f62947b7d8397fdef | [
"MIT"
] | null | null | null | import picamera
import datetime
import os
delcount = 2
with picamera.PiCamera() as camera:
try:
check_fs()
tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
print "recording", tstamp
camera.start_recording(tstamp + '.h264')
camera.wait_recording(60)
while True:
check_fs()
tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
print "recording", tstamp
camera.split_recording(tstamp + '.h264')
camera.wait_recording(60)
except KeyboardInterrupt:
print "quitting"
camera.stop_recording()
| 25.340909 | 74 | 0.574888 |
8299ba8eed08b051c1bd7e22979a2992369a89ff | 4,398 | py | Python | forge/mock_handle.py | ujjwalsh/pyforge | 454d7df39f6d6cc7531d3f87e7b7f7d83ae6e66e | [
"BSD-3-Clause"
] | 7 | 2015-01-01T18:40:53.000Z | 2021-10-20T14:13:08.000Z | forge/mock_handle.py | ujjwalsh/pyforge | 454d7df39f6d6cc7531d3f87e7b7f7d83ae6e66e | [
"BSD-3-Clause"
] | 6 | 2016-03-31T16:40:30.000Z | 2020-12-23T07:24:53.000Z | forge/mock_handle.py | ujjwalsh/pyforge | 454d7df39f6d6cc7531d3f87e7b7f7d83ae6e66e | [
"BSD-3-Clause"
] | 9 | 2016-03-31T15:21:29.000Z | 2021-03-20T06:29:09.000Z | from .handle import ForgeHandle
| 48.32967 | 116 | 0.705548 |
8299d63942c82469cfa51d90a39b4e86d506709d | 4,599 | py | Python | RecoBTag/PerformanceDB/python/measure/Pool_mistag110118.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | RecoBTag/PerformanceDB/python/measure/Pool_mistag110118.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | RecoBTag/PerformanceDB/python/measure/Pool_mistag110118.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
from CondCore.DBCommon.CondDBCommon_cfi import *
PoolDBESSourceMistag110118 = cms.ESSource("PoolDBESSource",
CondDBCommon,
toGet = cms.VPSet(
#
# working points
#
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJBPLtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPLtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJBPLwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPLwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJBPMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJBPMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJBPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJBPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPTwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJPLtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPLtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJPLwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPLwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJPMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJPMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPTwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGSSVHEMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHEMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGSSVHEMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHEMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGSSVHPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGSSVHPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHPTwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGTCHELtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHELtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGTCHELwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHELwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGTCHEMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHEMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGTCHEMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHEMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGTCHPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGTCHPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHPTwp_v5_offline')
),
))
PoolDBESSourceMistag110118.connect = 'frontier://FrontierProd/CMS_COND_31X_PHYSICSTOOLS'
| 37.390244 | 88 | 0.704718 |
829b4d9d2ba83ae6309dbbbee76b950d8044a7f9 | 7,423 | py | Python | src/ScheduleEvaluation.py | franTarkenton/replication_health_check | 61d9197c6e007650437789ef7780da422af6b7fe | [
"Apache-2.0"
] | null | null | null | src/ScheduleEvaluation.py | franTarkenton/replication_health_check | 61d9197c6e007650437789ef7780da422af6b7fe | [
"Apache-2.0"
] | 3 | 2020-04-17T21:52:43.000Z | 2022-03-01T21:47:25.000Z | src/ScheduleEvaluation.py | franTarkenton/replication_health_check | 61d9197c6e007650437789ef7780da422af6b7fe | [
"Apache-2.0"
] | 3 | 2018-11-26T17:44:09.000Z | 2021-04-14T22:10:38.000Z | '''
Created on Nov 22, 2018
@author: kjnether
methods that evaluate the given schedule
'''
import logging
import FMEUtil.FMEServerApiData
import re
| 43.409357 | 78 | 0.555166 |
829c52d86cde3835b9fe8363fe095b5e95155b81 | 3,319 | py | Python | podcastista/ListenNowTab.py | andrsd/podcastista | c05a1de09d2820899aebe592d3d4b01d64d1e5fe | [
"MIT"
] | null | null | null | podcastista/ListenNowTab.py | andrsd/podcastista | c05a1de09d2820899aebe592d3d4b01d64d1e5fe | [
"MIT"
] | 17 | 2021-09-22T12:21:46.000Z | 2022-02-26T12:26:40.000Z | podcastista/ListenNowTab.py | andrsd/podcastista | c05a1de09d2820899aebe592d3d4b01d64d1e5fe | [
"MIT"
] | null | null | null | from PyQt5 import QtWidgets, QtCore
from podcastista.ShowEpisodeWidget import ShowEpisodeWidget
from podcastista.FlowLayout import FlowLayout
| 31.913462 | 77 | 0.617957 |
829d0c9553bb774075d15e5e3d5751bc89e20c32 | 866 | py | Python | ggpy/cruft/prolog_pyparser.py | hobson/ggpy | 4e6e6e876c3a4294cd711647051da2d9c1836b60 | [
"MIT"
] | 1 | 2015-01-26T19:07:45.000Z | 2015-01-26T19:07:45.000Z | ggpy/cruft/prolog_pyparser.py | hobson/ggpy | 4e6e6e876c3a4294cd711647051da2d9c1836b60 | [
"MIT"
] | null | null | null | ggpy/cruft/prolog_pyparser.py | hobson/ggpy | 4e6e6e876c3a4294cd711647051da2d9c1836b60 | [
"MIT"
] | null | null | null | import pyparsing as pp
#relationship will refer to 'track' in all of your examples
relationship = pp.Word(pp.alphas).setResultsName('relationship')
number = pp.Word(pp.nums + '.')
variable = pp.Word(pp.alphas)
# an argument to a relationship can be either a number or a variable
argument = number | variable
# arguments are a delimited list of 'argument' surrounded by parenthesis
arguments= (pp.Suppress('(') + pp.delimitedList(argument) +
pp.Suppress(')')).setResultsName('arguments')
# a fact is composed of a relationship and it's arguments
# (I'm aware it's actually more complicated than this
# it's just a simplifying assumption)
fact = (relationship + arguments).setResultsName('facts', listAllMatches=True)
# a sentence is a fact plus a period
sentence = fact + pp.Suppress('.')
# self explanatory
prolog_sentences = pp.OneOrMore(sentence) | 37.652174 | 78 | 0.743649 |
829d9d6e41067c52d752f4bdf77ffcbc9b8f2f17 | 4,496 | py | Python | Imaging/Core/Testing/Python/ReslicePermutations.py | inviCRO/VTK | a2dc2e79d4ecb8f6da900535b32e1a2a702c7f48 | [
"BSD-3-Clause"
] | null | null | null | Imaging/Core/Testing/Python/ReslicePermutations.py | inviCRO/VTK | a2dc2e79d4ecb8f6da900535b32e1a2a702c7f48 | [
"BSD-3-Clause"
] | null | null | null | Imaging/Core/Testing/Python/ReslicePermutations.py | inviCRO/VTK | a2dc2e79d4ecb8f6da900535b32e1a2a702c7f48 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# this script tests vtkImageReslice with various axes permutations,
# in order to cover a nasty set of "if" statements that check
# the intersections of the raster lines with the input bounding box.
# Image pipeline
reader = vtk.vtkImageReader()
reader.ReleaseDataFlagOff()
reader.SetDataByteOrderToLittleEndian()
reader.SetDataExtent(0,63,0,63,1,93)
reader.SetDataSpacing(3.2,3.2,1.5)
reader.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter")
reader.SetDataMask(0x7fff)
transform = vtk.vtkTransform()
# rotate about the center of the image
transform.Translate(+100.8,+100.8,+69.0)
transform.RotateWXYZ(10,1,1,0)
transform.Translate(-100.8,-100.8,-69.0)
reslice1 = vtk.vtkImageReslice()
reslice1.SetInputConnection(reader.GetOutputPort())
reslice1.SetResliceAxesDirectionCosines([1,0,0,0,1,0,0,0,1])
reslice1.SetResliceTransform(transform)
reslice1.SetOutputSpacing(3.2,3.2,3.2)
reslice1.SetOutputExtent(0,74,0,74,0,0)
reslice2 = vtk.vtkImageReslice()
reslice2.SetInputConnection(reader.GetOutputPort())
reslice2.SetResliceAxesDirectionCosines([0,1,0,0,0,1,1,0,0])
reslice2.SetResliceTransform(transform)
reslice2.SetOutputSpacing(3.2,3.2,3.2)
reslice2.SetOutputExtent(0,74,0,74,0,0)
reslice3 = vtk.vtkImageReslice()
reslice3.SetInputConnection(reader.GetOutputPort())
reslice3.SetResliceAxesDirectionCosines([0,0,1,1,0,0,0,1,0])
reslice3.SetResliceTransform(transform)
reslice3.SetOutputSpacing(3.2,3.2,3.2)
reslice3.SetOutputExtent(0,74,0,74,0,0)
reslice4 = vtk.vtkImageReslice()
reslice4.SetInputConnection(reader.GetOutputPort())
reslice4.SetResliceAxesDirectionCosines([-1,0,0,0,-1,0,0,0,-1])
reslice4.SetResliceTransform(transform)
reslice4.SetOutputSpacing(3.2,3.2,3.2)
reslice4.SetOutputExtent(0,74,0,74,0,0)
reslice5 = vtk.vtkImageReslice()
reslice5.SetInputConnection(reader.GetOutputPort())
reslice5.SetResliceAxesDirectionCosines([0,-1,0,0,0,-1,-1,0,0])
reslice5.SetResliceTransform(transform)
reslice5.SetOutputSpacing(3.2,3.2,3.2)
reslice5.SetOutputExtent(0,74,0,74,0,0)
reslice6 = vtk.vtkImageReslice()
reslice6.SetInputConnection(reader.GetOutputPort())
reslice6.SetResliceAxesDirectionCosines([0,0,-1,-1,0,0,0,-1,0])
reslice6.SetResliceTransform(transform)
reslice6.SetOutputSpacing(3.2,3.2,3.2)
reslice6.SetOutputExtent(0,74,0,74,0,0)
mapper1 = vtk.vtkImageMapper()
mapper1.SetInputConnection(reslice1.GetOutputPort())
mapper1.SetColorWindow(2000)
mapper1.SetColorLevel(1000)
mapper1.SetZSlice(0)
mapper2 = vtk.vtkImageMapper()
mapper2.SetInputConnection(reslice2.GetOutputPort())
mapper2.SetColorWindow(2000)
mapper2.SetColorLevel(1000)
mapper2.SetZSlice(0)
mapper3 = vtk.vtkImageMapper()
mapper3.SetInputConnection(reslice3.GetOutputPort())
mapper3.SetColorWindow(2000)
mapper3.SetColorLevel(1000)
mapper3.SetZSlice(0)
mapper4 = vtk.vtkImageMapper()
mapper4.SetInputConnection(reslice4.GetOutputPort())
mapper4.SetColorWindow(2000)
mapper4.SetColorLevel(1000)
mapper4.SetZSlice(0)
mapper5 = vtk.vtkImageMapper()
mapper5.SetInputConnection(reslice5.GetOutputPort())
mapper5.SetColorWindow(2000)
mapper5.SetColorLevel(1000)
mapper5.SetZSlice(0)
mapper6 = vtk.vtkImageMapper()
mapper6.SetInputConnection(reslice6.GetOutputPort())
mapper6.SetColorWindow(2000)
mapper6.SetColorLevel(1000)
mapper6.SetZSlice(0)
actor1 = vtk.vtkActor2D()
actor1.SetMapper(mapper1)
actor2 = vtk.vtkActor2D()
actor2.SetMapper(mapper2)
actor3 = vtk.vtkActor2D()
actor3.SetMapper(mapper3)
actor4 = vtk.vtkActor2D()
actor4.SetMapper(mapper4)
actor5 = vtk.vtkActor2D()
actor5.SetMapper(mapper5)
actor6 = vtk.vtkActor2D()
actor6.SetMapper(mapper6)
imager1 = vtk.vtkRenderer()
imager1.AddActor2D(actor1)
imager1.SetViewport(0.0,0.0,0.3333,0.5)
imager2 = vtk.vtkRenderer()
imager2.AddActor2D(actor2)
imager2.SetViewport(0.0,0.5,0.3333,1.0)
imager3 = vtk.vtkRenderer()
imager3.AddActor2D(actor3)
imager3.SetViewport(0.3333,0.0,0.6667,0.5)
imager4 = vtk.vtkRenderer()
imager4.AddActor2D(actor4)
imager4.SetViewport(0.3333,0.5,0.6667,1.0)
imager5 = vtk.vtkRenderer()
imager5.AddActor2D(actor5)
imager5.SetViewport(0.6667,0.0,1.0,0.5)
imager6 = vtk.vtkRenderer()
imager6.AddActor2D(actor6)
imager6.SetViewport(0.6667,0.5,1.0,1.0)
imgWin = vtk.vtkRenderWindow()
imgWin.AddRenderer(imager1)
imgWin.AddRenderer(imager2)
imgWin.AddRenderer(imager3)
imgWin.AddRenderer(imager4)
imgWin.AddRenderer(imager5)
imgWin.AddRenderer(imager6)
imgWin.SetSize(225,150)
imgWin.Render()
# --- end of script --
| 35.125 | 70 | 0.803158 |
829dd3506bffa917743930aa6c0983eab6866732 | 2,916 | py | Python | neuronlp2/nn/utils.py | ntunlp/ptrnet-depparser | 61cb113327ede02996b16ea4b9e19311062603c3 | [
"MIT"
] | 9 | 2019-09-03T11:03:45.000Z | 2021-09-19T05:38:25.000Z | neuronlp2/nn/utils.py | danifg/BottomUp-Hierarchical-PtrNet | 2b6ebdb63825eafd63d86700bbbc278cabfafeb2 | [
"MIT"
] | null | null | null | neuronlp2/nn/utils.py | danifg/BottomUp-Hierarchical-PtrNet | 2b6ebdb63825eafd63d86700bbbc278cabfafeb2 | [
"MIT"
] | 1 | 2019-09-24T06:19:25.000Z | 2019-09-24T06:19:25.000Z | import collections
from itertools import repeat
import torch
import torch.nn as nn
import torch.nn.utils.rnn as rnn_utils
_single = _ntuple(1)
_pair = _ntuple(2)
_triple = _ntuple(3)
_quadruple = _ntuple(4)
def prepare_rnn_seq(rnn_input, lengths, hx=None, masks=None, batch_first=False):
'''
Args:
rnn_input: [seq_len, batch, input_size]: tensor containing the features of the input sequence.
lengths: [batch]: tensor containing the lengthes of the input sequence
hx: [num_layers * num_directions, batch, hidden_size]: tensor containing the initial hidden state for each element in the batch.
masks: [seq_len, batch]: tensor containing the mask for each element in the batch.
batch_first: If True, then the input and output tensors are provided as [batch, seq_len, feature].
Returns:
'''
check_res = check_decreasing(lengths)
if check_res is None:
lens = lengths
rev_order = None
else:
lens, order, rev_order = check_res
batch_dim = 0 if batch_first else 1
rnn_input = rnn_input.index_select(batch_dim, order)
if hx is not None:
# hack lstm
if isinstance(hx, tuple):
hx, cx = hx
hx = hx.index_select(1, order)
cx = cx.index_select(1, order)
hx = (hx, cx)
else:
hx = hx.index_select(1, order)
lens = lens.tolist()
seq = rnn_utils.pack_padded_sequence(rnn_input, lens, batch_first=batch_first)
if masks is not None:
if batch_first:
masks = masks[:, :lens[0]]
else:
masks = masks[:lens[0]]
return seq, hx, rev_order, masks
| 32.043956 | 136 | 0.614883 |
829dd5cc20b5aa7c14726c3c740aa687c0a9650d | 194 | py | Python | Data_Analyst/Step_2_Intermediate_Python_and_Pandas/2_Data_Analysis_with_Pandas_Intermediate/3_Introduction_to_Pandas/7_Selecting_a_row/script.py | ustutz/dataquest | 6fa64fc824a060b19649ef912d11bee9ed671025 | [
"MIT"
] | 8 | 2017-01-20T13:24:26.000Z | 2019-04-05T19:02:13.000Z | Data_Analyst/Step_2_Intermediate_Python_and_Pandas/2_Data_Analysis_with_Pandas_Intermediate/3_Introduction_to_Pandas/7_Selecting_a_row/script.py | ustutz/dataquest | 6fa64fc824a060b19649ef912d11bee9ed671025 | [
"MIT"
] | null | null | null | Data_Analyst/Step_2_Intermediate_Python_and_Pandas/2_Data_Analysis_with_Pandas_Intermediate/3_Introduction_to_Pandas/7_Selecting_a_row/script.py | ustutz/dataquest | 6fa64fc824a060b19649ef912d11bee9ed671025 | [
"MIT"
] | 25 | 2016-10-27T16:27:54.000Z | 2021-07-06T14:36:40.000Z | import pandas as pandas_Pandas_Module
Script.main() | 14.923077 | 63 | 0.752577 |
829fa892ed939a93b224c00b60d5719ddb4dc7e0 | 2,176 | py | Python | examples/fire.py | pombreda/py-lepton | 586358747efe867208edafca112a3edbb24ff8f9 | [
"MIT"
] | 7 | 2018-02-20T02:56:03.000Z | 2020-01-23T05:35:55.000Z | examples/fire.py | caseman/py-lepton | 586358747efe867208edafca112a3edbb24ff8f9 | [
"MIT"
] | 1 | 2017-11-12T10:14:13.000Z | 2017-11-12T10:14:44.000Z | examples/fire.py | caseman/py-lepton | 586358747efe867208edafca112a3edbb24ff8f9 | [
"MIT"
] | 1 | 2019-01-05T00:38:50.000Z | 2019-01-05T00:38:50.000Z | #############################################################################
#
# Copyright (c) 2008 by Casey Duncan and contributors
# All Rights Reserved.
#
# This software is subject to the provisions of the MIT License
# A copy of the license should accompany this distribution.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
#
#############################################################################
"""Fire simulation using point sprites"""
__version__ = '$Id$'
import os
from pyglet import image
from pyglet.gl import *
from lepton import Particle, ParticleGroup, default_system
from lepton.renderer import PointRenderer
from lepton.texturizer import SpriteTexturizer, create_point_texture
from lepton.emitter import StaticEmitter
from lepton.domain import Line
from lepton.controller import Gravity, Lifetime, Movement, Fader, ColorBlender
win = pyglet.window.Window(resizable=True, visible=False)
win.clear()
glEnable(GL_BLEND)
glShadeModel(GL_SMOOTH)
glBlendFunc(GL_SRC_ALPHA,GL_ONE)
glDisable(GL_DEPTH_TEST)
flame = StaticEmitter(
rate=500,
template=Particle(
position=(300,25,0),
velocity=(0,0,0),
color=(1,1,1,1),
),
position=Line((win.width/2 - 85, -15, 0), (win.width/2 + 85, -15, 0)),
deviation=Particle(position=(10,0,0), velocity=(7,50,0), age=0.75)
)
default_system.add_global_controller(
Lifetime(6),
Gravity((0,20,0)),
Movement(),
ColorBlender(
[(0, (0,0,0.5,0)),
(0.5, (0,0,0.5,0.2)),
(0.75, (0,0.5,1,0.6)),
(1.5, (1,1,0,0.2)),
(2.7, (0.9,0.2,0,0.4)),
(3.2, (0.6,0.1,0.05,0.2)),
(4.0, (0.8,0.8,0.8,0.1)),
(6.0, (0.8,0.8,0.8,0)), ]
),
)
group = ParticleGroup(controllers=[flame],
renderer=PointRenderer(64, SpriteTexturizer(create_point_texture(64, 5))))
win.set_visible(True)
pyglet.clock.schedule_interval(default_system.update, (1.0/30.0))
pyglet.clock.set_fps_limit(None)
if __name__ == '__main__':
default_system.run_ahead(2, 30)
pyglet.app.run()
| 27.544304 | 78 | 0.665901 |
829fbfa6185a88b37d0e4fc7be2c4271027f431b | 3,810 | py | Python | landspout/cli.py | gmr/landspout | 1df922aa96c42dbfaa28681e748fbd97dfaf9836 | [
"BSD-3-Clause"
] | null | null | null | landspout/cli.py | gmr/landspout | 1df922aa96c42dbfaa28681e748fbd97dfaf9836 | [
"BSD-3-Clause"
] | null | null | null | landspout/cli.py | gmr/landspout | 1df922aa96c42dbfaa28681e748fbd97dfaf9836 | [
"BSD-3-Clause"
] | null | null | null | # coding=utf-8
"""
Command Line Interface
======================
"""
import argparse
import logging
import os
from os import path
import sys
from landspout import core, __version__
LOGGER = logging.getLogger('landspout')
LOGGING_FORMAT = '[%(asctime)-15s] %(levelname)-8s %(name)-15s: %(message)s'
def exit_application(message=None, code=0):
"""Exit the application displaying the message to info or error based upon
the exit code
:param str message: The exit message
:param int code: The exit code (default: 0)
"""
log_method = LOGGER.error if code else LOGGER.info
log_method(message.strip())
sys.exit(code)
def parse_cli_arguments():
"""Return the base argument parser for CLI applications.
:return: :class:`~argparse.ArgumentParser`
"""
parser = argparse.ArgumentParser(
'landspout', 'Static website generation tool',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve')
parser.add_argument('-s', '--source', metavar='SOURCE',
help='Source content directory',
default='content')
parser.add_argument('-d', '--destination', metavar='DEST',
help='Destination directory for built content',
default='build')
parser.add_argument('-t', '--templates', metavar='TEMPLATE DIR',
help='Template directory',
default='templates')
parser.add_argument('-b', '--base-uri-path', action='store', default='/')
parser.add_argument('--whitespace', action='store',
choices=['all', 'single', 'oneline'],
default='all',
help='Compress whitespace')
parser.add_argument('-n', '--namespace', type=argparse.FileType('r'),
help='Load a JSON file of values to inject into the '
'default rendering namespace.')
parser.add_argument('-i', '--interval', type=int, default=3,
help='Interval in seconds between file '
'checks while watching or serving')
parser.add_argument('--port', type=int, default=8080,
help='The port to listen on when serving')
parser.add_argument('--debug', action='store_true',
help='Extra verbose debug logging')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s {}'.format(__version__),
help='output version information, then exit')
parser.add_argument('command', nargs='?',
choices=['build', 'watch', 'serve'],
help='The command to run', default='build')
return parser.parse_args()
def validate_paths(args):
"""Ensure all of the configured paths actually exist."""
if not path.exists(args.destination):
LOGGER.warning('Destination path "%s" does not exist, creating',
args.destination)
os.makedirs(path.normpath(args.destination))
for file_path in [args.source, args.templates]:
if not path.exists(file_path):
exit_application('Path {} does not exist'.format(file_path), 1)
def main():
"""Application entry point"""
args = parse_cli_arguments()
log_level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(level=log_level, format=LOGGING_FORMAT)
LOGGER.info('Landspout v%s [%s]', __version__, args.command)
validate_paths(args)
landspout = core.Landspout(args)
if args.command == 'build':
landspout.build()
elif args.command == 'watch':
landspout.watch()
elif args.command == 'serve':
landspout.serve()
| 36.990291 | 78 | 0.599475 |
82a12ebdf14809677818644038ba067ccbd91713 | 474 | py | Python | examples/test_cross.py | rballester/ttpy | a2fdf08fae9d34cb1e5ba28482e82e04b249911b | [
"MIT"
] | null | null | null | examples/test_cross.py | rballester/ttpy | a2fdf08fae9d34cb1e5ba28482e82e04b249911b | [
"MIT"
] | null | null | null | examples/test_cross.py | rballester/ttpy | a2fdf08fae9d34cb1e5ba28482e82e04b249911b | [
"MIT"
] | 1 | 2021-01-10T07:02:09.000Z | 2021-01-10T07:02:09.000Z | import sys
sys.path.append('../')
import numpy as np
import tt
d = 30
n = 2 ** d
b = 1E3
h = b / (n + 1)
#x = np.arange(n)
#x = np.reshape(x, [2] * d, order = 'F')
#x = tt.tensor(x, 1e-12)
x = tt.xfun(2, d)
e = tt.ones(2, d)
x = x + e
x = x * h
sf = lambda x : np.sin(x) / x #Should be rank 2
y = tt.multifuncrs([x], sf, 1e-6, ['y0', tt.ones(2, d)])
#y1 = tt.tensor(sf(x.full()), 1e-8)
print "pi / 2 ~ ", tt.dot(y, tt.ones(2, d)) * h
#print (y - y1).norm() / y.norm()
| 18.230769 | 56 | 0.516878 |
82a1dcab7cd90d7023343f02b2320478208cc588 | 26,434 | py | Python | phone2board.py | brandjamie/phone2board | b27b6d8dfa944f03688df802a360f247f648b2f6 | [
"MIT"
] | null | null | null | phone2board.py | brandjamie/phone2board | b27b6d8dfa944f03688df802a360f247f648b2f6 | [
"MIT"
] | null | null | null | phone2board.py | brandjamie/phone2board | b27b6d8dfa944f03688df802a360f247f648b2f6 | [
"MIT"
] | null | null | null | import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.auth
import tornado.escape
import os.path
import logging
import sys
import urllib
import json
from uuid import uuid4
from tornado.options import define, options
define("port", default=8000, help="run on the given port", type=int)
#to do -
# check character set of inputs (not vital as 'block' added to each user).
# scores?
#------------------------------------------------------------------------------Main app code-------------------------------------------
#----------------------------------------------------------status handlers-------------------------
# these handle the asynch hooks from the pages and sending messages to the pages
# a lot of shared code here - I'm sure this could be better!
# message handlers - recieves messages from the pages (currently only control and client)
# - template handlers ------------- pages that are actually called by the browser.
if __name__ == '__main__':
# tornado.options.parse_command_line()
app = Application()
if len(sys.argv) > 1:
try:
with open(sys.argv[1]) as json_data:
app.gamefile = json.load(json_data)
json_data.close()
app.quiztype = app.gamefile["quiztype"]
if "notes" in app.gamefile:
app.notes = app.gamefile["notes"]
if "questionarray" in app.gamefile:
app.questionarray = app.gamefile["questionarray"]
else:
app.questionarray = "{}"
if "answerarray" in app.gamefile:
app.answerarray = app.gamefile["answerarray"]
else:
app.answerarray = "{}"
except:
print("not a valid json file, using defaults")
set_defaults()
else:
print("no file given - using defaults")
set_defaults()
app.status.setQuizType(app.quiztype)
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
| 36.970629 | 209 | 0.591208 |
82a2aae9ea64aaa7fb4b9cb2856b242dd76d5578 | 239 | py | Python | scripts/plotRUC.py | akrherz/radcomp | d44459f72891c6e1a92b61488e08422383b000d1 | [
"Apache-2.0"
] | 3 | 2015-04-18T22:23:27.000Z | 2016-05-12T11:24:32.000Z | scripts/plotRUC.py | akrherz/radcomp | d44459f72891c6e1a92b61488e08422383b000d1 | [
"Apache-2.0"
] | 4 | 2016-09-30T15:04:46.000Z | 2022-03-05T13:32:40.000Z | scripts/plotRUC.py | akrherz/radcomp | d44459f72891c6e1a92b61488e08422383b000d1 | [
"Apache-2.0"
] | 4 | 2015-04-18T22:23:57.000Z | 2017-05-07T15:23:37.000Z | import matplotlib.pyplot as plt
import netCDF4
import numpy
nc = netCDF4.Dataset("data/ructemps.nc")
data = nc.variables["tmpc"][17, :, :]
nc.close()
(fig, ax) = plt.subplots(1, 1)
ax.imshow(numpy.flipud(data))
fig.savefig("test.png")
| 17.071429 | 40 | 0.698745 |
82a4a9f7dd1ed9b3be8582ffaccf49c75f0cf8a6 | 3,031 | py | Python | tools/draw_cal_lr_ablation.py | twangnh/Calibration_mrcnn | e5f3076cefbe35297a403a753bb57e11503db818 | [
"Apache-2.0"
] | 87 | 2020-07-24T01:28:39.000Z | 2021-08-29T08:40:18.000Z | tools/draw_cal_lr_ablation.py | twangnh/Calibration_mrcnn | e5f3076cefbe35297a403a753bb57e11503db818 | [
"Apache-2.0"
] | 3 | 2020-09-27T12:59:28.000Z | 2022-01-06T13:14:08.000Z | tools/draw_cal_lr_ablation.py | twangnh/Calibration_mrcnn | e5f3076cefbe35297a403a753bb57e11503db818 | [
"Apache-2.0"
] | 20 | 2020-09-05T04:37:19.000Z | 2021-12-13T02:25:48.000Z |
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import math
from matplotlib.ticker import FormatStrFormatter
from matplotlib import scale as mscale
from matplotlib import transforms as mtransforms
# z = [0,0.1,0.3,0.9,1,2,5]
z = [7.8, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1230]
# thick = [20,40,20,60,37,32,21]ax1.set_xscale('log')
# thick=[15.4, 18.2, 18.7, 19.2, 19.4, 19.5, 19.9, 20.1, 20.4, 20.5, 20.6, 20.7, 20.8, 20.7, 20.7, 20.6, 20.6, 20.6, 20.5, 20.5, 19.8]
mrcnn=[17.7, 19.8, 20.0, 19.9, 20.2, 19.5, 19.1, 19.1]
x_ticks = [0.001, 0.002, 0.004, 0.008, 0.01, 0.02, 0.04, 0.08]
# plt.plot([1.0],[44.8], 'D', color = 'black')
# plt.plot([0],[35.9], 'D', color = 'red')
# plt.plot([1.0],[56.8], 'D', color = 'black')
fig = plt.figure(figsize=(8,5))
ax1 = fig.add_subplot(111)
matplotlib.rcParams.update({'font.size': 20})
ax1.plot(x_ticks, mrcnn, linestyle='dashed', marker='o', linewidth=2, c='k', label='mrcnn-r50-ag')
# ax1.plot(z, htc, marker='o', linewidth=2, c='g', label='htc')
# ax1.plot([1e-4],[15.4], 'D', color = 'green')
# ax1.plot([1230],[19.8], 'D', color = 'red')
plt.xlabel('calibration lr', size=16)
plt.ylabel('bAP', size=16)
# plt.gca().set_xscale('custom')
ax1.set_xscale('log')
ax1.set_xticks(x_ticks)
# from matplotlib.ticker import ScalarFormatter
# ax1.xaxis.set_major_formatter(ScalarFormatter())
# plt.legend(['calibration lr'], loc='best')
plt.minorticks_off()
plt.grid()
plt.savefig('calibration_lr.eps', format='eps', dpi=1000)
plt.show()
# import numpy as np
# import matplotlib.pyplot as plt
# from scipy.interpolate import interp1d
# y1=[35.9, 43.4, 46.1, 49.3, 50.3, 51.3, 51.4, 49.9, 49.5, 48.5, 44.8]
# y2=[40.5, 48.2, 53.9 , 56.9, 57.8, 59.2, 58.3, 57.9, 57.5, 57.2, 56.8]
# y3=[61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5]
# x = np.linspace(0, 1, num=11, endpoint=True)
#
# f1 = interp1d(x, y1, kind='cubic')
# f2 = interp1d(x, y2, kind='cubic')
# f3 = interp1d(x, y3, kind='cubic')
# xnew = np.linspace(0, 1, num=101, endpoint=True)
# plt.plot(xnew, f3(xnew), '--', color='fuchsia')
# plt.plot(xnew, f1(xnew), '--', color='blue')
# plt.plot(xnew, f2(xnew), '--', color='green')
#
# plt.plot([0],[40.5], 'D', color = 'red')
# plt.plot([1.0],[44.8], 'D', color = 'black')
# plt.plot([0],[35.9], 'D', color = 'red')
# plt.plot([1.0],[56.8], 'D', color = 'black')
# plt.plot(x, y3, 'o', color = 'fuchsia')
# plt.plot(x, y1, 'o', color = 'blue')
# plt.plot(x, y2, 'o', color = 'green')
# plt.plot([0],[40.5], 'D', color = 'red')
# plt.plot([1.0],[44.8], 'D', color = 'black')
# plt.plot([0],[35.9], 'D', color = 'red')
# plt.plot([1.0],[56.8], 'D', color = 'black')
# plt.legend(['teacher','0.25x', '0.5x', 'full-feature-imitation', 'only GT supervison'], loc='best')
# plt.xlabel('Thresholding factor')
# plt.ylabel('mAP')
# plt.title('Resulting mAPs of varying thresholding factors')
# #plt.legend(['0.5x'])
# # plt.savefig('varying_thresh.eps', format='eps', dpi=1000)
# plt.show()
| 35.244186 | 134 | 0.61069 |
82a4b552433b963daf6809d4d3f789619df85472 | 432 | py | Python | discord bot.py | salihdursun1/dc-bot | f5223f83134d6f8938d6bcf572613e80eb4ef33c | [
"Unlicense"
] | null | null | null | discord bot.py | salihdursun1/dc-bot | f5223f83134d6f8938d6bcf572613e80eb4ef33c | [
"Unlicense"
] | null | null | null | discord bot.py | salihdursun1/dc-bot | f5223f83134d6f8938d6bcf572613e80eb4ef33c | [
"Unlicense"
] | null | null | null | import discord
from discord.ext.commands import Bot
TOKEN = "<discordtoken>"
client = discord.Client()
bot = Bot(command_prefix="!")
bot.run(TOKEN) | 18 | 50 | 0.638889 |
82a4daed7ce221589ab2b1a7f5ba42efc8b6ae34 | 653 | py | Python | Lesson08/problem/problem_optional_pandas.py | AlexMazonowicz/PythonFundamentals | 5451f61d3b4e7cd285dea442795c25baa5072ef9 | [
"MIT"
] | 2 | 2020-02-27T01:33:43.000Z | 2021-03-29T13:11:54.000Z | Lesson08/problem/problem_optional_pandas.py | AlexMazonowicz/PythonFundamentals | 5451f61d3b4e7cd285dea442795c25baa5072ef9 | [
"MIT"
] | null | null | null | Lesson08/problem/problem_optional_pandas.py | AlexMazonowicz/PythonFundamentals | 5451f61d3b4e7cd285dea442795c25baa5072ef9 | [
"MIT"
] | 6 | 2019-03-18T04:49:11.000Z | 2022-03-22T04:03:19.000Z | import pandas as pd
# Global variable to set the base path to our dataset folder
base_url = '../dataset/'
def update_mailing_list_pandas(filename):
"""
Your docstring documentation starts here.
For more information on how to proper document your function, please refer to the official PEP8:
https://www.python.org/dev/peps/pep-0008/#documentation-strings.
"""
df = # Read your csv file with pandas
return # Your logic to filter only rows with the `active` flag the return the number of rows
# Calling the function to test your code
print(update_mailing_list_pandas('mailing_list.csv'))
| 29.681818 | 104 | 0.70291 |
82a57dff7d64fdf50fbba80937d52605a8fc479c | 7,357 | py | Python | example_problems/tutorial/euler_dir/services/is_eulerian_server.py | romeorizzi/TALight | 2b694cb487f41dd0d36d7aa39f5c9c5a21bfc18e | [
"MIT"
] | 4 | 2021-06-27T13:27:24.000Z | 2022-03-24T10:46:28.000Z | example_problems/tutorial/euler_dir/services/is_eulerian_server.py | romeorizzi/TALight | 2b694cb487f41dd0d36d7aa39f5c9c5a21bfc18e | [
"MIT"
] | 1 | 2021-01-23T06:50:31.000Z | 2021-03-17T15:35:18.000Z | example_problems/tutorial/euler_dir/services/is_eulerian_server.py | romeorizzi/TALight | 2b694cb487f41dd0d36d7aa39f5c9c5a21bfc18e | [
"MIT"
] | 5 | 2021-04-01T15:21:57.000Z | 2022-01-29T15:07:38.000Z | #!/usr/bin/env python3
# "This service will check your statement that a directed graph you provide us admits an eulerian walk (of the specified type)""
from os import EX_TEMPFAIL
from sys import stderr, exit
import collections
from multilanguage import Env, Lang, TALcolors
from TALinputs import TALinput
from euler_dir_lib import *
# METADATA OF THIS TAL_SERVICE:
args_list = [
('walk_type',str),
('feedback',str),
('eulerian',bool),
('MAXN',int),
('MAXM',int),
]
ENV =Env(args_list)
TAc =TALcolors(ENV)
LANG=Lang(ENV, TAc, lambda fstring: eval(f"f'{fstring}'"))
MAXN = ENV['MAXN']
MAXM = ENV['MAXM']
# START CODING YOUR SERVICE:
print(f"#? waiting for your directed graph.\nFormat: each line two numbers separated by space. On the first line the number of nodes (an integer n in the interval [1,{MAXN}]) and the number of arcs (an integer m in the interval [1,{MAXM}]). Then follow m lines, one for each arc, each with two numbers in the interval [0,n). These specify the tail node and the head node of the arc, in this order.\nAny line beggining with the '#' character is ignored.\nIf you prefer, you can use the 'TA_send_txt_file.py' util here to send us the lines of a file. Just plug in the util at the 'rtal connect' command like you do with any other bot and let the util feed in the file for you rather than acting by copy and paste yourself.")
n, m = TALinput(int, 2, TAc=TAc)
if n < 1:
TAc.print(LANG.render_feedback("n-LB", f"# ERRORE: il numero di nodi del grafo deve essere almeno 1. Invece il primo dei numeri che hai inserito n={n}."), "red")
exit(0)
if m < 0:
TAc.print(LANG.render_feedback("m-LB", f"# ERRORE: il numero di archi del grafo non pu essere negativo. Invece il secondo dei numeri che hai inserito m={m}."), "red")
exit(0)
if n > MAXN:
TAc.print(LANG.render_feedback("n-UB", f"# ERRORE: il numero di nodi del grafo non pu eccedere {ENV['MAXN']}. Invece il primo dei numeri che hai inserito n={n}>{ENV['MAXN']}."), "red")
exit(0)
if m > MAXM:
TAc.print(LANG.render_feedback("m-UB", f"# ERRORE: il numero di archi del grafo non pu eccedere {ENV['MAXM']}. Invece il secondo dei numeri che hai inserito n={n}>{ENV['MAXM']}."), "red")
exit(0)
g = Graph(int(n))
adj = [ [] for _ in range(n)]
for i in range(m):
head, tail = TALinput(int, 2, TAc=TAc)
if tail >= n or head >= n or tail < 0 or head < 0:
TAc.print(LANG.render_feedback("n-at-least-1", f"# ERRORE: entrambi gli estremi di un arco devono essere nodi del grafo, ossia numeri interi ricompresi nell'intervallo [0,{ENV['MAXN']}."), "red")
exit(0)
g.addEdge(int(head),int(tail))
adj[int(head)].append(int(tail))
eul = ENV['eulerian']
if eul == 1:
if ENV['walk_type'] == "closed":
answer1 = g.isEulerianCycle()
if answer1 == eul:
TAc.OK()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
printCircuit(adj)
exit(0)
else:
TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian cycle!"),"red")
exit(0)
else:
TAc.NO()
exit(0)
if ENV['walk_type'] == "open":
answer1 = g.isEulerianWalk()
answer2 = g.isEulerianCycle()
if answer1 == eul and answer2==False and answer1 ==True :
TAc.OK()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
printCircuit(adj)
exit(0)
else:
TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian walk!"),"red")
exit(0)
else:
TAc.NO()
exit(0)
if ENV['walk_type'] == "any":
answer1 = g.isEulerianCycle()
answer2 = g.isEulerianWalk()
if answer1 == eul or answer2 == eul:
TAc.OK()
if answer1 == eul:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
printCircuit(adj)
exit(0)
if answer2 == eul:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
g.printEulerTour()
exit(0)
else:
TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian walk/cycle!"),"red")
exit(0)
if eul == 0:
if ENV['walk_type'] == "closed":
answer1 = g.isEulerianCycle()
if answer1 == eul:
TAc.OK()
else:
TAc.NO()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
printCircuit(adj)
exit(0)
exit(0)
if ENV['walk_type'] == "open":
answer1 = g.isEulerianWalk()
answer2 = g.isEulerianCycle()
if answer1 == eul:
TAc.OK()
else:
TAc.NO()
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
printCircuit(adj)
exit(0)
if ENV['walk_type'] == "any":
answer1 = g.isEulerianCycle()
answer2 = g.isEulerianWalk()
if answer1 == True or answer2 == True:
TAc.NO()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
printCircuit(adj)
exit(0)
if answer2 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
g.printEulerTour()
exit(0)
else:
TAc.OK()
exit(0)
| 43.532544 | 722 | 0.578904 |
82a59289b498d6c0a5800f00f50c27c1b22e3ddd | 1,047 | py | Python | get_vocab.py | Amir-Mehrpanah/hgraph2graph | 6d37153afe09f7684381ce56e8366675e22833e9 | [
"MIT"
] | 182 | 2019-11-15T15:59:31.000Z | 2022-03-31T09:17:40.000Z | get_vocab.py | Amir-Mehrpanah/hgraph2graph | 6d37153afe09f7684381ce56e8366675e22833e9 | [
"MIT"
] | 30 | 2020-03-03T16:35:52.000Z | 2021-12-16T04:06:57.000Z | get_vocab.py | Amir-Mehrpanah/hgraph2graph | 6d37153afe09f7684381ce56e8366675e22833e9 | [
"MIT"
] | 60 | 2019-11-15T05:06:11.000Z | 2022-03-31T16:43:12.000Z | import sys
import argparse
from hgraph import *
from rdkit import Chem
from multiprocessing import Pool
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--ncpu', type=int, default=1)
args = parser.parse_args()
data = [mol for line in sys.stdin for mol in line.split()[:2]]
data = list(set(data))
batch_size = len(data) // args.ncpu + 1
batches = [data[i : i + batch_size] for i in range(0, len(data), batch_size)]
pool = Pool(args.ncpu)
vocab_list = pool.map(process, batches)
vocab = [(x,y) for vocab in vocab_list for x,y in vocab]
vocab = list(set(vocab))
for x,y in sorted(vocab):
print(x, y)
| 27.552632 | 81 | 0.603629 |
82a5daea9d746a5e0fd1a18fd73ba8a7a242e08f | 612 | py | Python | web_app/cornwall/views.py | blackradley/heathmynd | 4495f8fadef9d3a36a7d5b49fae2b61cceb158bc | [
"MIT"
] | null | null | null | web_app/cornwall/views.py | blackradley/heathmynd | 4495f8fadef9d3a36a7d5b49fae2b61cceb158bc | [
"MIT"
] | 4 | 2018-11-06T16:15:10.000Z | 2018-11-07T12:03:09.000Z | web_app/cornwall/views.py | blackradley/heathmynd | 4495f8fadef9d3a36a7d5b49fae2b61cceb158bc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
""" test """
from __future__ import unicode_literals
from django.template.loader import get_template
from django.contrib import messages
# Create your views here.
from django.http import HttpResponse
def index(request):
""" index """
template = get_template('cornwall/index.html')
messages.set_level(request, messages.DEBUG)
list(messages.get_messages(request))# clear out the previous messages
messages.add_message(request, messages.INFO, 'Hello world.')
context = {'nbar': 'cornwall'}
html = template.render(context, request)
return HttpResponse(html)
| 32.210526 | 73 | 0.730392 |
82a91b76040314d727ba1163f259b5cbea984d08 | 838 | py | Python | vshare/user_/urls.py | jeyrce/vshare | 269fe05a4dc36f6fbf831ddf5057af95312b75ca | [
"Apache-2.0"
] | 4 | 2019-11-30T06:07:14.000Z | 2020-10-27T08:48:23.000Z | vshare/user_/urls.py | jeeyshe/vshare | 269fe05a4dc36f6fbf831ddf5057af95312b75ca | [
"Apache-2.0"
] | null | null | null | vshare/user_/urls.py | jeeyshe/vshare | 269fe05a4dc36f6fbf831ddf5057af95312b75ca | [
"Apache-2.0"
] | null | null | null | # coding = utf-8
# env = python3.5.2
# author = lujianxin
# time = 201x-xx-xx
# purpose= - - -
from django.urls import re_path
from . import views
urlpatterns = [
#
re_path(r'usercenter$', views.UserCenter.as_view()),
re_path(r'details/(\d+)$', views.UserDetails.as_view()),
re_path(r'login$', views.Login.as_view()),
re_path(r'regist$', views.Regist.as_view()),
re_path(r'logout$', views.Logout.as_view()),
re_path(r'securecenter$', views.SecureCenter.as_view()),
re_path(r'write_article$', views.WriteArticle.as_view()),
re_path(r'change_art/(\d+)$', views.ChangeArt.as_view()),
re_path(r'cpwd$', views.ModifyPwd.as_view()),
re_path(r'findpwd$', views.FindPwd.as_view()),
re_path(r'cpwdsafe$', views.ModifyPwdSafe.as_view()),
]
if __name__ == '__main__':
pass
| 27.032258 | 61 | 0.656325 |
82a930b9747975fe0452c3e4307e6fa5f2321ccf | 1,825 | py | Python | Day_3/task2.py | DjaffDjaff/AdventOfCode | cf4f60dc71e349a44f4b5d07dbf4aa8555a4a37a | [
"MIT"
] | 2 | 2021-12-03T23:14:28.000Z | 2021-12-03T23:16:54.000Z | Day_3/task2.py | DjaffDjaff/AdventOfCode | cf4f60dc71e349a44f4b5d07dbf4aa8555a4a37a | [
"MIT"
] | null | null | null | Day_3/task2.py | DjaffDjaff/AdventOfCode | cf4f60dc71e349a44f4b5d07dbf4aa8555a4a37a | [
"MIT"
] | null | null | null | import math
oxygen_rating = 0
co2_rating = 0
length = 0
n_bits = 12
common = [0] * n_bits
anti = [0] * n_bits
numbers = []
with open("data.txt", "r") as f:
lines = f.readlines()
length = len(lines)
for line in lines:
bitmap = list(line.strip("\n"))
bitmap = [int(bit) for bit in bitmap]
numbers.append(bitmap)
#print(bitmap)
for j, bit in enumerate(bitmap):
common[j] += bit
# Let's find oxygen generator rating first
numbers_copy = [number for number in numbers]
for i in range(n_bits):
# Update common
common = new_bitmap(numbers)
# if more 1s in bit i
if common[i] >= len(numbers)/2:
most_c = 1
else:
most_c = 0
#print(f"In round {i+1}, most common: {most_c}")
numbers[:] = [number for number in numbers if (number[i] == most_c)]
#print(numbers)
if len(numbers) < 2:
break
oxygen_rating = int("".join(str(bit) for bit in numbers[0]), 2)
print("O2:",oxygen_rating)
for i in range(n_bits):
# Update common
common = new_bitmap(numbers_copy)
# if more 1s in bit i
if common[i] >= len(numbers_copy)/2:
most_c = 1
else:
most_c = 0
#print(f"In round {i+1}, most common: {most_c}")
numbers_copy[:] = [number for number in numbers_copy if (number[i] != most_c)]
#print(numbers_copy)
if len(numbers_copy) < 2:
break
co2_rating = int("".join(str(bit) for bit in numbers_copy[0]), 2)
print("CO2:", co2_rating)
print("Answer: ", oxygen_rating*co2_rating)
| 23.701299 | 83 | 0.566575 |
82a9ed6ace49d5ef752eef71a6cddc94ed97513e | 7,838 | py | Python | polyjuice/filters_and_selectors/perplex_filter.py | shwang/polyjuice | 5f9a3a23d95e4a3877cc048cbcef01f071dc6353 | [
"BSD-3-Clause"
] | 38 | 2021-05-25T02:18:40.000Z | 2022-03-25T12:09:58.000Z | polyjuice/filters_and_selectors/perplex_filter.py | shwang/polyjuice | 5f9a3a23d95e4a3877cc048cbcef01f071dc6353 | [
"BSD-3-Clause"
] | 7 | 2021-06-03T04:08:55.000Z | 2021-12-06T06:53:05.000Z | polyjuice/filters_and_selectors/perplex_filter.py | shwang/polyjuice | 5f9a3a23d95e4a3877cc048cbcef01f071dc6353 | [
"BSD-3-Clause"
] | 5 | 2021-11-12T21:43:59.000Z | 2022-03-22T21:51:08.000Z | import math
import numpy as np
from munch import Munch
from transformers import GPT2LMHeadModel, GPT2TokenizerFast
import torch
from copy import deepcopy
#########################################################################
### compute perplexity
#########################################################################
def compute_sent_perplexity(
sentences, perplex_scorer, log=True, reduce="prod", is_normalize=False, is_cuda=True):
"""Compute the sentence perplexity. For filtering.
Args:
sentences ([type]): [description]
perplex_scorer ([type]): [description]
log (bool, optional): [description]. Defaults to True.
reduce (str, optional): [description]. Defaults to "prod".
is_normalize (bool, optional): [description]. Defaults to False.
Returns:
[type]: [description]
"""
scores = []
model, tokenizer = perplex_scorer.model, perplex_scorer.tokenizer
outputs = _tokens_log_prob(sentences, model, tokenizer, is_cuda=is_cuda)
for sent_log_prob, sent_ids, sent_tokens in outputs:
score = reduce_perplex_prob(sent_log_prob, reduce=reduce, log=log)
if is_normalize:
score = normalize_score(score, len(sent_tokens))
scores.append(score)
return scores
def compute_delta_perplexity(edit_ops, perplex_scorer, is_normalize=False, is_cuda=True):
"""This is to compute the perplexity
Args:
edit_ops ([type]): [description]
perplex_scorer ([type]): [description]
is_normalize (bool, optional): [description]. Defaults to False.
Returns:
[type]: [description]
"""
tuples = []
#print(metadata.primary.acore.doc.text)
#print(metadata.primary.bcore.doc.text)
edit_ops = [o for o in edit_ops if o.op != "equal"]
for op in edit_ops:
aphrase, bphrase = (op.fromz_full, op.toz_full) if \
op.op == "insert" or op.op == "delete" else (op.fromz_core, op.toz_core)
asent, bsent = aphrase.doc, bphrase.doc
tuples += [(asent.text, aphrase.text), (bsent.text, bphrase.text)]
#print(tuples)
scores = compute_phrase_perplexity(tuples, perplex_scorer,
is_normalize=is_normalize, is_cuda=is_cuda)
#print(scores)
paired_scores = []
for i in range(len(edit_ops)):
# because of negative, it's i - i+1; lower the better.
#print(scores[2*i])
#print(scores[2*i+1])
paired_scores.append(Munch(
pr_sent=scores[2*i][0]-scores[2*i+1][0],
pr_phrase=scores[2*i][1]-scores[2*i+1][1]))
paired_scores = sorted(paired_scores, key=lambda x: (
max(x.pr_sent, x.pr_phrase)), reverse=True) # use the most ungrammar part as the
return paired_scores[0]
| 43.787709 | 122 | 0.666114 |
82ab0f9e283b82fa75f97cebd66085d095f1ab43 | 2,030 | py | Python | Python/example_controllers/visual_perception/flow.py | ricklentz/tdw | da40eec151acae20b28d6486defb4358d96adb0e | [
"BSD-2-Clause"
] | null | null | null | Python/example_controllers/visual_perception/flow.py | ricklentz/tdw | da40eec151acae20b28d6486defb4358d96adb0e | [
"BSD-2-Clause"
] | null | null | null | Python/example_controllers/visual_perception/flow.py | ricklentz/tdw | da40eec151acae20b28d6486defb4358d96adb0e | [
"BSD-2-Clause"
] | null | null | null | from tdw.controller import Controller
from tdw.tdw_utils import TDWUtils
from tdw.add_ons.image_capture import ImageCapture
from tdw.backend.paths import EXAMPLE_CONTROLLER_OUTPUT_PATH
"""
Get the _flow pass.
"""
c = Controller()
object_id_0 = c.get_unique_id()
object_id_1 = c.get_unique_id()
object_id_2 = c.get_unique_id()
object_id_3 = c.get_unique_id()
object_names = {object_id_0: "small_table_green_marble",
object_id_1: "rh10",
object_id_2: "jug01",
object_id_3: "jug05"}
output_directory = EXAMPLE_CONTROLLER_OUTPUT_PATH.joinpath("flow")
# Enable image capture for the _flow pass.
print(f"Images will be saved to: {output_directory}")
capture = ImageCapture(path=output_directory, pass_masks=["_flow"], avatar_ids=["a"])
c.add_ons.append(capture)
commands = [TDWUtils.create_empty_room(12, 12),
c.get_add_object(object_names[object_id_0],
object_id=object_id_0),
c.get_add_object(object_names[object_id_1],
position={"x": 0.7, "y": 0, "z": 0.4},
rotation={"x": 0, "y": 30, "z": 0},
object_id=object_id_1),
c.get_add_object(model_name=object_names[object_id_2],
position={"x": -0.3, "y": 0.9, "z": 0.2},
object_id=object_id_2),
c.get_add_object(object_names[object_id_3],
position={"x": 0.3, "y": 0.9, "z": -0.2},
object_id=object_id_3),
{"$type": "apply_force_to_object",
"id": object_id_1,
"force": {"x": 0, "y": 5, "z": -200}}]
commands.extend(TDWUtils.create_avatar(position={"x": 2.478, "y": 1.602, "z": 1.412},
look_at={"x": 0, "y": 0.2, "z": 0},
avatar_id="a"))
c.communicate(commands)
for i in range(3):
c.communicate([])
c.communicate({"$type": "terminate"})
| 39.803922 | 85 | 0.565025 |
82ac7d1720a0d22103d819e764e895c0a4bca209 | 2,844 | py | Python | main.py | pepetox/gae-angular-materialize | c6aee16dcc2eba75a254d783661e3115e492faa8 | [
"MIT"
] | 1 | 2015-10-18T13:48:23.000Z | 2015-10-18T13:48:23.000Z | main.py | pepetox/gae-angular-materialize | c6aee16dcc2eba75a254d783661e3115e492faa8 | [
"MIT"
] | null | null | null | main.py | pepetox/gae-angular-materialize | c6aee16dcc2eba75a254d783661e3115e492faa8 | [
"MIT"
] | null | null | null | # Copyright 2013 Google, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import modelCourse as model
import webapp2
from google.appengine.api import users
APP = webapp2.WSGIApplication([
('/rest/query', QueryHandler),
('/rest/insert', InsertHandler),
('/rest/delete', DeleteHandler),
('/rest/update', UpdateHandler),
('/rest/user', GetUser),
], debug=True)
| 26.830189 | 128 | 0.619902 |
82aec0d620a3d2b504e341e4b1d842730a0ba06a | 586 | py | Python | config.py | laundmo/counter-generator | 52b96ede55ea0d961c414102762c6430275d9fb9 | [
"MIT"
] | null | null | null | config.py | laundmo/counter-generator | 52b96ede55ea0d961c414102762c6430275d9fb9 | [
"MIT"
] | 4 | 2021-02-27T07:56:25.000Z | 2021-02-27T08:00:10.000Z | config.py | laundmo/counter-generator | 52b96ede55ea0d961c414102762c6430275d9fb9 | [
"MIT"
] | null | null | null | from sys import platform
try:
from yaml import CSafeLoader as Loader # use the C loader when possible
except ImportError:
from yaml import SafeLoader as Loader
import yaml
with open("config.yml") as f:
config = yaml.load(f, Loader=Loader) # load the config yaml
if platform in ("linux", "linux2", "win32"):
import PySimpleGUI
elif (
platform == "darwin"
): # Have to use web/remi on MacOS as the normal tkinter version causes a OS error
# TODO: Test on MacOS with tkinter possibly figure out how to get it working.
import PySimpleGUIWeb as PySimpleGUI
| 30.842105 | 83 | 0.721843 |
82affa262e4e61eb46885268e69de57c9213002a | 25,609 | py | Python | pysnmp/CISCO-IETF-PW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-IETF-PW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-IETF-PW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-IETF-PW-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-IETF-PW-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:43:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion")
CpwVcType, CpwGroupID, CpwVcIndexType, CpwOperStatus, CpwVcIDType = mibBuilder.importSymbols("CISCO-IETF-PW-TC-MIB", "CpwVcType", "CpwGroupID", "CpwVcIndexType", "CpwOperStatus", "CpwVcIDType")
ciscoExperiment, = mibBuilder.importSymbols("CISCO-SMI", "ciscoExperiment")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Counter32, MibIdentifier, experimental, ModuleIdentity, Unsigned32, NotificationType, IpAddress, TimeTicks, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, ObjectIdentity, Counter64, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "experimental", "ModuleIdentity", "Unsigned32", "NotificationType", "IpAddress", "TimeTicks", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Gauge32", "ObjectIdentity", "Counter64", "Integer32")
TruthValue, TimeStamp, StorageType, RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TimeStamp", "StorageType", "RowStatus", "TextualConvention", "DisplayString")
cpwVcMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 10, 106))
cpwVcMIB.setRevisions(('2004-03-17 12:00', '2003-02-26 12:00', '2002-05-26 12:00', '2002-01-30 12:00', '2001-11-07 12:00', '2001-07-11 12:00',))
if mibBuilder.loadTexts: cpwVcMIB.setLastUpdated('200403171200Z')
if mibBuilder.loadTexts: cpwVcMIB.setOrganization('Cisco Systems, Inc.')
cpwVcObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 1))
cpwVcNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 2))
cpwVcConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3))
cpwVcIndexNext = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcIndexNext.setStatus('current')
cpwVcTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2), )
if mibBuilder.loadTexts: cpwVcTable.setStatus('current')
cpwVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"))
if mibBuilder.loadTexts: cpwVcEntry.setStatus('current')
cpwVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 1), CpwVcIndexType())
if mibBuilder.loadTexts: cpwVcIndex.setStatus('current')
cpwVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 2), CpwVcType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcType.setStatus('current')
cpwVcOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("manual", 1), ("maintenanceProtocol", 2), ("other", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcOwner.setStatus('current')
cpwVcPsnType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("mpls", 1), ("l2tp", 2), ("ip", 3), ("mplsOverIp", 4), ("gre", 5), ("other", 6)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcPsnType.setStatus('current')
cpwVcSetUpPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcSetUpPriority.setStatus('current')
cpwVcHoldingPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcHoldingPriority.setStatus('current')
cpwVcInboundMode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("loose", 1), ("strict", 2))).clone('loose')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcInboundMode.setStatus('current')
cpwVcPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 8), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcPeerAddrType.setStatus('current')
cpwVcPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 9), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcPeerAddr.setStatus('current')
cpwVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 10), CpwVcIDType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcID.setStatus('current')
cpwVcLocalGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 11), CpwGroupID()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcLocalGroupID.setStatus('current')
cpwVcControlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 12), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcControlWord.setStatus('current')
cpwVcLocalIfMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 13), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcLocalIfMtu.setStatus('current')
cpwVcLocalIfString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 14), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcLocalIfString.setStatus('current')
cpwVcRemoteGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 15), CpwGroupID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcRemoteGroupID.setStatus('current')
cpwVcRemoteControlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noControlWord", 1), ("withControlWord", 2), ("notYetKnown", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcRemoteControlWord.setStatus('current')
cpwVcRemoteIfMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 17), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcRemoteIfMtu.setStatus('current')
cpwVcRemoteIfString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 18), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcRemoteIfString.setStatus('current')
cpwVcOutboundVcLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 19), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcOutboundVcLabel.setStatus('current')
cpwVcInboundVcLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 20), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcInboundVcLabel.setStatus('current')
cpwVcName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 21), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcName.setStatus('current')
cpwVcDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 22), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcDescr.setStatus('current')
cpwVcCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 23), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcCreateTime.setStatus('current')
cpwVcUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 24), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcUpTime.setStatus('current')
cpwVcAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcAdminStatus.setStatus('current')
cpwVcOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 26), CpwOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcOperStatus.setStatus('current')
cpwVcInboundOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 27), CpwOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcInboundOperStatus.setStatus('current')
cpwVcOutboundOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 28), CpwOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcOutboundOperStatus.setStatus('current')
cpwVcTimeElapsed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcTimeElapsed.setStatus('current')
cpwVcValidIntervals = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcValidIntervals.setStatus('current')
cpwVcRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 31), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcRowStatus.setStatus('current')
cpwVcStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 32), StorageType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcStorageType.setStatus('current')
cpwVcPerfCurrentTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3), )
if mibBuilder.loadTexts: cpwVcPerfCurrentTable.setStatus('current')
cpwVcPerfCurrentEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"))
if mibBuilder.loadTexts: cpwVcPerfCurrentEntry.setStatus('current')
cpwVcPerfCurrentInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentInHCPackets.setStatus('current')
cpwVcPerfCurrentInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentInHCBytes.setStatus('current')
cpwVcPerfCurrentOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentOutHCPackets.setStatus('current')
cpwVcPerfCurrentOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentOutHCBytes.setStatus('current')
cpwVcPerfIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4), )
if mibBuilder.loadTexts: cpwVcPerfIntervalTable.setStatus('current')
cpwVcPerfIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"), (0, "CISCO-IETF-PW-MIB", "cpwVcPerfIntervalNumber"))
if mibBuilder.loadTexts: cpwVcPerfIntervalEntry.setStatus('current')
cpwVcPerfIntervalNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96)))
if mibBuilder.loadTexts: cpwVcPerfIntervalNumber.setStatus('current')
cpwVcPerfIntervalValidData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalValidData.setStatus('current')
cpwVcPerfIntervalTimeElapsed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalTimeElapsed.setStatus('current')
cpwVcPerfIntervalInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalInHCPackets.setStatus('current')
cpwVcPerfIntervalInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalInHCBytes.setStatus('current')
cpwVcPerfIntervalOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalOutHCPackets.setStatus('current')
cpwVcPerfIntervalOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalOutHCBytes.setStatus('current')
cpwVcPerfTotalTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5), )
if mibBuilder.loadTexts: cpwVcPerfTotalTable.setStatus('current')
cpwVcPerfTotalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"))
if mibBuilder.loadTexts: cpwVcPerfTotalEntry.setStatus('current')
cpwVcPerfTotalInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalInHCPackets.setStatus('current')
cpwVcPerfTotalInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalInHCBytes.setStatus('current')
cpwVcPerfTotalOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalOutHCPackets.setStatus('current')
cpwVcPerfTotalOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalOutHCBytes.setStatus('current')
cpwVcPerfTotalDiscontinuityTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalDiscontinuityTime.setStatus('current')
cpwVcPerfTotalErrorPackets = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalErrorPackets.setStatus('current')
cpwVcIdMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7), )
if mibBuilder.loadTexts: cpwVcIdMappingTable.setStatus('current')
cpwVcIdMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcType"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcID"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingPeerAddrType"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingPeerAddr"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcIndex"))
if mibBuilder.loadTexts: cpwVcIdMappingEntry.setStatus('current')
cpwVcIdMappingVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 1), CpwVcType())
if mibBuilder.loadTexts: cpwVcIdMappingVcType.setStatus('current')
cpwVcIdMappingVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 2), CpwVcIDType())
if mibBuilder.loadTexts: cpwVcIdMappingVcID.setStatus('current')
cpwVcIdMappingPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 3), InetAddressType())
if mibBuilder.loadTexts: cpwVcIdMappingPeerAddrType.setStatus('current')
cpwVcIdMappingPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 4), InetAddress())
if mibBuilder.loadTexts: cpwVcIdMappingPeerAddr.setStatus('current')
cpwVcIdMappingVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 5), CpwVcIndexType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcIdMappingVcIndex.setStatus('current')
cpwVcPeerMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8), )
if mibBuilder.loadTexts: cpwVcPeerMappingTable.setStatus('current')
cpwVcPeerMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingPeerAddrType"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingPeerAddr"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcType"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcID"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcIndex"))
if mibBuilder.loadTexts: cpwVcPeerMappingEntry.setStatus('current')
cpwVcPeerMappingPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 1), InetAddressType())
if mibBuilder.loadTexts: cpwVcPeerMappingPeerAddrType.setStatus('current')
cpwVcPeerMappingPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 2), InetAddress())
if mibBuilder.loadTexts: cpwVcPeerMappingPeerAddr.setStatus('current')
cpwVcPeerMappingVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 3), CpwVcType())
if mibBuilder.loadTexts: cpwVcPeerMappingVcType.setStatus('current')
cpwVcPeerMappingVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 4), CpwVcIDType())
if mibBuilder.loadTexts: cpwVcPeerMappingVcID.setStatus('current')
cpwVcPeerMappingVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 5), CpwVcIndexType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPeerMappingVcIndex.setStatus('current')
cpwVcUpDownNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 9), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpwVcUpDownNotifEnable.setStatus('current')
cpwVcNotifRate = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 10), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpwVcNotifRate.setStatus('current')
cpwVcDown = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 106, 2, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus"))
if mibBuilder.loadTexts: cpwVcDown.setStatus('current')
cpwVcUp = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 106, 2, 2)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus"))
if mibBuilder.loadTexts: cpwVcUp.setStatus('current')
cpwVcGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1))
cpwVcCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 2))
cpwModuleCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 2, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcGroup"), ("CISCO-IETF-PW-MIB", "cpwVcPeformanceGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwModuleCompliance = cpwModuleCompliance.setStatus('current')
cpwVcGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcIndexNext"), ("CISCO-IETF-PW-MIB", "cpwVcType"), ("CISCO-IETF-PW-MIB", "cpwVcOwner"), ("CISCO-IETF-PW-MIB", "cpwVcPsnType"), ("CISCO-IETF-PW-MIB", "cpwVcSetUpPriority"), ("CISCO-IETF-PW-MIB", "cpwVcHoldingPriority"), ("CISCO-IETF-PW-MIB", "cpwVcInboundMode"), ("CISCO-IETF-PW-MIB", "cpwVcPeerAddrType"), ("CISCO-IETF-PW-MIB", "cpwVcPeerAddr"), ("CISCO-IETF-PW-MIB", "cpwVcID"), ("CISCO-IETF-PW-MIB", "cpwVcLocalGroupID"), ("CISCO-IETF-PW-MIB", "cpwVcControlWord"), ("CISCO-IETF-PW-MIB", "cpwVcLocalIfMtu"), ("CISCO-IETF-PW-MIB", "cpwVcLocalIfString"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteGroupID"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteControlWord"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteIfMtu"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteIfString"), ("CISCO-IETF-PW-MIB", "cpwVcOutboundVcLabel"), ("CISCO-IETF-PW-MIB", "cpwVcInboundVcLabel"), ("CISCO-IETF-PW-MIB", "cpwVcName"), ("CISCO-IETF-PW-MIB", "cpwVcDescr"), ("CISCO-IETF-PW-MIB", "cpwVcCreateTime"), ("CISCO-IETF-PW-MIB", "cpwVcUpTime"), ("CISCO-IETF-PW-MIB", "cpwVcAdminStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOutboundOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcInboundOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcTimeElapsed"), ("CISCO-IETF-PW-MIB", "cpwVcValidIntervals"), ("CISCO-IETF-PW-MIB", "cpwVcRowStatus"), ("CISCO-IETF-PW-MIB", "cpwVcStorageType"), ("CISCO-IETF-PW-MIB", "cpwVcUpDownNotifEnable"), ("CISCO-IETF-PW-MIB", "cpwVcNotifRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcGroup = cpwVcGroup.setStatus('current')
cpwVcPeformanceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 2)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalValidData"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalTimeElapsed"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalDiscontinuityTime"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalErrorPackets"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcPeformanceGroup = cpwVcPeformanceGroup.setStatus('current')
cpwVcMappingTablesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 3)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcIdMappingVcIndex"), ("CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcIndex"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcMappingTablesGroup = cpwVcMappingTablesGroup.setStatus('current')
cpwVcNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 4)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcUp"), ("CISCO-IETF-PW-MIB", "cpwVcDown"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcNotificationsGroup = cpwVcNotificationsGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-IETF-PW-MIB", cpwVcDown=cpwVcDown, cpwVcIdMappingVcType=cpwVcIdMappingVcType, cpwVcControlWord=cpwVcControlWord, cpwVcPerfIntervalValidData=cpwVcPerfIntervalValidData, cpwVcSetUpPriority=cpwVcSetUpPriority, cpwVcPsnType=cpwVcPsnType, cpwVcStorageType=cpwVcStorageType, cpwVcPeerMappingVcID=cpwVcPeerMappingVcID, cpwVcPeerMappingTable=cpwVcPeerMappingTable, cpwVcPerfTotalInHCBytes=cpwVcPerfTotalInHCBytes, PYSNMP_MODULE_ID=cpwVcMIB, cpwVcPerfIntervalTimeElapsed=cpwVcPerfIntervalTimeElapsed, cpwVcIdMappingPeerAddrType=cpwVcIdMappingPeerAddrType, cpwVcPeerAddrType=cpwVcPeerAddrType, cpwVcHoldingPriority=cpwVcHoldingPriority, cpwVcPerfTotalInHCPackets=cpwVcPerfTotalInHCPackets, cpwVcIndexNext=cpwVcIndexNext, cpwVcIdMappingTable=cpwVcIdMappingTable, cpwVcMappingTablesGroup=cpwVcMappingTablesGroup, cpwVcPeformanceGroup=cpwVcPeformanceGroup, cpwVcEntry=cpwVcEntry, cpwVcPeerAddr=cpwVcPeerAddr, cpwVcInboundVcLabel=cpwVcInboundVcLabel, cpwVcPerfTotalOutHCBytes=cpwVcPerfTotalOutHCBytes, cpwVcMIB=cpwVcMIB, cpwVcValidIntervals=cpwVcValidIntervals, cpwVcOwner=cpwVcOwner, cpwVcRemoteGroupID=cpwVcRemoteGroupID, cpwVcPerfIntervalTable=cpwVcPerfIntervalTable, cpwVcPeerMappingPeerAddr=cpwVcPeerMappingPeerAddr, cpwVcConformance=cpwVcConformance, cpwVcPerfIntervalOutHCPackets=cpwVcPerfIntervalOutHCPackets, cpwVcInboundOperStatus=cpwVcInboundOperStatus, cpwVcPerfCurrentTable=cpwVcPerfCurrentTable, cpwVcPerfTotalDiscontinuityTime=cpwVcPerfTotalDiscontinuityTime, cpwVcOutboundVcLabel=cpwVcOutboundVcLabel, cpwVcUp=cpwVcUp, cpwVcIdMappingVcID=cpwVcIdMappingVcID, cpwVcLocalIfString=cpwVcLocalIfString, cpwVcUpTime=cpwVcUpTime, cpwVcPeerMappingPeerAddrType=cpwVcPeerMappingPeerAddrType, cpwVcType=cpwVcType, cpwVcPeerMappingVcType=cpwVcPeerMappingVcType, cpwVcPerfIntervalEntry=cpwVcPerfIntervalEntry, cpwVcPerfIntervalNumber=cpwVcPerfIntervalNumber, cpwVcName=cpwVcName, cpwVcPerfIntervalOutHCBytes=cpwVcPerfIntervalOutHCBytes, cpwVcRemoteIfMtu=cpwVcRemoteIfMtu, cpwVcIdMappingPeerAddr=cpwVcIdMappingPeerAddr, cpwVcID=cpwVcID, cpwVcPerfIntervalInHCPackets=cpwVcPerfIntervalInHCPackets, cpwVcPerfTotalEntry=cpwVcPerfTotalEntry, cpwVcNotificationsGroup=cpwVcNotificationsGroup, cpwVcCreateTime=cpwVcCreateTime, cpwVcNotifRate=cpwVcNotifRate, cpwVcPerfCurrentInHCBytes=cpwVcPerfCurrentInHCBytes, cpwVcRemoteControlWord=cpwVcRemoteControlWord, cpwVcLocalIfMtu=cpwVcLocalIfMtu, cpwVcNotifications=cpwVcNotifications, cpwVcInboundMode=cpwVcInboundMode, cpwVcRemoteIfString=cpwVcRemoteIfString, cpwVcGroup=cpwVcGroup, cpwVcPerfTotalTable=cpwVcPerfTotalTable, cpwVcPerfTotalOutHCPackets=cpwVcPerfTotalOutHCPackets, cpwVcPeerMappingEntry=cpwVcPeerMappingEntry, cpwVcTable=cpwVcTable, cpwVcGroups=cpwVcGroups, cpwVcPerfIntervalInHCBytes=cpwVcPerfIntervalInHCBytes, cpwModuleCompliance=cpwModuleCompliance, cpwVcPerfCurrentOutHCPackets=cpwVcPerfCurrentOutHCPackets, cpwVcObjects=cpwVcObjects, cpwVcPeerMappingVcIndex=cpwVcPeerMappingVcIndex, cpwVcCompliances=cpwVcCompliances, cpwVcLocalGroupID=cpwVcLocalGroupID, cpwVcTimeElapsed=cpwVcTimeElapsed, cpwVcIndex=cpwVcIndex, cpwVcRowStatus=cpwVcRowStatus, cpwVcPerfTotalErrorPackets=cpwVcPerfTotalErrorPackets, cpwVcIdMappingEntry=cpwVcIdMappingEntry, cpwVcDescr=cpwVcDescr, cpwVcPerfCurrentEntry=cpwVcPerfCurrentEntry, cpwVcPerfCurrentInHCPackets=cpwVcPerfCurrentInHCPackets, cpwVcIdMappingVcIndex=cpwVcIdMappingVcIndex, cpwVcOperStatus=cpwVcOperStatus, cpwVcOutboundOperStatus=cpwVcOutboundOperStatus, cpwVcAdminStatus=cpwVcAdminStatus, cpwVcUpDownNotifEnable=cpwVcUpDownNotifEnable, cpwVcPerfCurrentOutHCBytes=cpwVcPerfCurrentOutHCBytes)
| 130.658163 | 3,605 | 0.7545 |
82b3cb2854e832088e3570125c2b7f5602582762 | 200 | py | Python | configs/sem_fpn/onaho_fpn.py | xiong-jie-y/mmsegmentation | 91159e2e5b9ac258440d714a40e0df6083aafee4 | [
"Apache-2.0"
] | 1 | 2021-09-20T22:48:16.000Z | 2021-09-20T22:48:16.000Z | configs/sem_fpn/onaho_fpn.py | xiong-jie-y/mmsegmentation | 91159e2e5b9ac258440d714a40e0df6083aafee4 | [
"Apache-2.0"
] | null | null | null | configs/sem_fpn/onaho_fpn.py | xiong-jie-y/mmsegmentation | 91159e2e5b9ac258440d714a40e0df6083aafee4 | [
"Apache-2.0"
] | null | null | null | _base_ = [
'../_base_/models/fpn_r50.py', '../_base_/datasets/onaho.py',
'../_base_/default_runtime.py', '../_base_/schedules/schedule_160k.py'
]
model = dict(decode_head=dict(num_classes=2))
| 33.333333 | 74 | 0.685 |
82b41b0e9cd71d4a56a4ea2a15f286f90fd054f6 | 4,324 | py | Python | jgem/dataset/__init__.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
] | null | null | null | jgem/dataset/__init__.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
] | null | null | null | jgem/dataset/__init__.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
] | null | null | null | """
Expression Dataset for analysis of matrix (RNASeq/microarray) data with annotations
"""
import pandas as PD
import numpy as N
from matplotlib import pylab as P
from collections import OrderedDict
from ast import literal_eval
# from ..plot.matrix import matshow_clustered
def read_bioinfo3_data(fname):
""" read bioinfo3.table.dataset type of data """
fobj = open(fname)
groups = OrderedDict()
cnt = 0
for line in fobj:
cnt += 1
if line[:2]=='#%':
if line.startswith('#%groups:'):
gname, members = line[len('#%groups:'):].split('=')
gname = gname.strip()
members = members.strip().split(',')
groups[gname] = members
datafields = line.strip().split('=')[1].strip().split(',')
elif line.startswith('#%fields'):
fields = line.strip().split('=')[1].strip().split(',')
elif not line.strip():
continue # empty line
else:
break
df = PD.read_table(fname, skiprows=cnt-1)
f2g = {}
for g,m in groups.items():
for f in m:
f2g[f] = g
df.columns = PD.MultiIndex.from_tuples([(x, f2g.get(x,'')) for x in df.columns], names=['samplename','group'])
e = ExpressionSet(df)
return e
def read_multiindex_data(fname, tupleize=True, index_names = ['samplename','group']):
""" read dataset table with MultiIndex in the header """
if not tupleize:
df = PD.read_table(fname, header=range(len(index_names)), index_col=[0], tupleize_cols=False)
e = ExpressionSet(df)
return e
df = PD.read_table(fname, index_col=0)
df.columns = PD.MultiIndex.from_tuples(df.columns.map(literal_eval).tolist(), names=index_names)
e = ExpressionSet(df)
return e
def read_grouped_table(fname, groupfn=lambda x: '_'.join(x.split('_')[:-1])):
""" Read dataset whose group is encoded in the colname. Column 0 is index. """
df = PD.read_table(fname)
f2g = {x:groupfn(x) for x in df.columns}
df.columns = PD.MultiIndex.from_tuples([(x, f2g[x]) for x in df.columns], names=['samplename','group'])
e = ExpressionSet(df)
return e
def concatenate(dic):
""" dic: dict of DataFrames
merge all using index and outer join
"""
keys = list(dic)
d = dic[keys[0]].merge(dic[keys[1]], left_index=True, right_index=True, how='outer', suffixes=('.'+keys[0],'.'+keys[1]))
for k in keys[2:]:
d = d.merge(dic[k], left_index=True, right_index=True, how='outer', suffixes=('','.'+k))
return d
| 35.442623 | 124 | 0.612165 |
82b4601eafecafbb6f782f6379a8c342a3e18c6c | 8,377 | py | Python | tests/test_sql.py | YPlan/django-perf-rec | e4face96502fda64c198e6e9951da91b0857eeec | [
"MIT"
] | 148 | 2016-09-19T13:53:34.000Z | 2018-06-27T11:48:00.000Z | tests/test_sql.py | YPlan/django-perf-rec | e4face96502fda64c198e6e9951da91b0857eeec | [
"MIT"
] | 36 | 2016-09-19T14:19:05.000Z | 2018-07-12T16:33:12.000Z | tests/test_sql.py | YPlan/django-perf-rec | e4face96502fda64c198e6e9951da91b0857eeec | [
"MIT"
] | 8 | 2016-09-29T12:13:07.000Z | 2018-07-11T07:53:33.000Z | from __future__ import annotations
from django_perf_rec.sql import sql_fingerprint
| 26.178125 | 88 | 0.549481 |
82b549e4607fd2be9e74cf5b94bf6e0c4162ac8a | 1,198 | py | Python | src/user_auth_api/serializers.py | Adstefnum/mockexams | af5681b034334be9c5aaf807161ca80a8a1b9948 | [
"BSD-3-Clause"
] | null | null | null | src/user_auth_api/serializers.py | Adstefnum/mockexams | af5681b034334be9c5aaf807161ca80a8a1b9948 | [
"BSD-3-Clause"
] | null | null | null | src/user_auth_api/serializers.py | Adstefnum/mockexams | af5681b034334be9c5aaf807161ca80a8a1b9948 | [
"BSD-3-Clause"
] | null | null | null | from rest_framework import serializers
from user_auth_api.models import User
# User Serializer
# Register Serializer | 22.603774 | 73 | 0.576795 |
82b57b3ca054137769bfb034aa43dd12bdcde046 | 9,653 | py | Python | cenv_script/cenv_script.py | technic/cenv_script | 6c3a9047faec4723f61ad5795f0d8019c0de03ec | [
"MIT"
] | null | null | null | cenv_script/cenv_script.py | technic/cenv_script | 6c3a9047faec4723f61ad5795f0d8019c0de03ec | [
"MIT"
] | null | null | null | cenv_script/cenv_script.py | technic/cenv_script | 6c3a9047faec4723f61ad5795f0d8019c0de03ec | [
"MIT"
] | null | null | null | """Main module."""
import json
import os
import re
import shutil
import subprocess
import sys
from pathlib import Path
from typing import List, Optional
import yaml
ENV_FILE = "environment.yml"
| 33.171821 | 115 | 0.574536 |
82b593a5d04b8635ad9d0bfca619ad7a94f582c9 | 2,671 | py | Python | cv_utils/cv_util_node.py | OAkyildiz/cibr_img_processing | 69f3293db80e9c0ae57369eaf2885b94adb330df | [
"MIT"
] | null | null | null | cv_utils/cv_util_node.py | OAkyildiz/cibr_img_processing | 69f3293db80e9c0ae57369eaf2885b94adb330df | [
"MIT"
] | null | null | null | cv_utils/cv_util_node.py | OAkyildiz/cibr_img_processing | 69f3293db80e9c0ae57369eaf2885b94adb330df | [
"MIT"
] | null | null | null | import sys
import rospy
import types
#from std_msgs.msg import String
from sensor_msgs.msg import Image
from cibr_img_processing.msg import Ints
from cv_bridge import CvBridge, CvBridgeError
#make int msgs
#TODO: get the img size from camera_indo topics
| 33.810127 | 98 | 0.622613 |
82b8f3579fbf367d54a1259558d837656079d6f8 | 448 | py | Python | pokepay/request/get_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
] | null | null | null | pokepay/request/get_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
] | null | null | null | pokepay/request/get_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
] | 1 | 2022-01-28T03:00:12.000Z | 2022-01-28T03:00:12.000Z | # DO NOT EDIT: File is generated by code generator.
from pokepay_partner_python_sdk.pokepay.request.request import PokepayRequest
from pokepay_partner_python_sdk.pokepay.response.shop_with_accounts import ShopWithAccounts
| 32 | 91 | 0.725446 |
82b9e4c2e702d4c81505c6425db3c75c45108c10 | 2,191 | py | Python | clearml/backend_interface/setupuploadmixin.py | arielleoren/clearml | 01f0be9895272c483129bab784a43cbd002022a7 | [
"Apache-2.0"
] | 2,097 | 2019-06-11T14:36:25.000Z | 2020-12-21T03:52:59.000Z | clearml/backend_interface/setupuploadmixin.py | arielleoren/clearml | 01f0be9895272c483129bab784a43cbd002022a7 | [
"Apache-2.0"
] | 347 | 2020-12-23T22:38:48.000Z | 2022-03-31T20:01:06.000Z | clearml/backend_interface/setupuploadmixin.py | arielleoren/clearml | 01f0be9895272c483129bab784a43cbd002022a7 | [
"Apache-2.0"
] | 256 | 2019-06-11T14:36:28.000Z | 2020-12-18T08:32:47.000Z | from abc import abstractproperty
from ..backend_config.bucket_config import S3BucketConfig
from ..storage.helper import StorageHelper
| 45.645833 | 167 | 0.665906 |
82ba0e0fc40394fedf62fac1ec2c951372c86121 | 2,872 | py | Python | tests/test_parser.py | szymon6927/parcels-parser | c2cee7a75edfbb0abba0fc4ea99c7a84e24e3749 | [
"MIT"
] | null | null | null | tests/test_parser.py | szymon6927/parcels-parser | c2cee7a75edfbb0abba0fc4ea99c7a84e24e3749 | [
"MIT"
] | null | null | null | tests/test_parser.py | szymon6927/parcels-parser | c2cee7a75edfbb0abba0fc4ea99c7a84e24e3749 | [
"MIT"
] | null | null | null | import os
import unittest
import pandas as pd
from application.ParcelsParser import ParcelsParser
if __name__ == '__main__':
unittest.main()
| 37.789474 | 99 | 0.683496 |
82badbb757028140899a1d3ea355a9a115e4d31b | 726 | py | Python | dataStructures/complete.py | KarlParkinson/practice | 6bbbd4a8e320732523d83297c1021f52601a20d8 | [
"MIT"
] | null | null | null | dataStructures/complete.py | KarlParkinson/practice | 6bbbd4a8e320732523d83297c1021f52601a20d8 | [
"MIT"
] | null | null | null | dataStructures/complete.py | KarlParkinson/practice | 6bbbd4a8e320732523d83297c1021f52601a20d8 | [
"MIT"
] | null | null | null | import binTree
import queue
t = binTree.BinaryTree(1)
t.insertLeft(2)
t.insertRight(3)
t.getRightChild().insertLeft(5)
t.getRightChild().insertRight(6)
print complete(t)
| 21.352941 | 40 | 0.541322 |
82bbb29af0b1433647177912df15449203606a08 | 3,322 | py | Python | sd_maskrcnn/sd_maskrcnn/gop/src/eval_bnd.py | marctuscher/cv_pipeline | b641423e72ea292139a5e35a411e30c1e21c7070 | [
"MIT"
] | 1 | 2021-03-28T17:46:45.000Z | 2021-03-28T17:46:45.000Z | sd-maskrcnn/sd_maskrcnn/gop/src/eval_bnd.py | jayef0/cv_pipeline | dc3b79062174f583a3a90ac8deea918c498c0dd5 | [
"MIT"
] | null | null | null | sd-maskrcnn/sd_maskrcnn/gop/src/eval_bnd.py | jayef0/cv_pipeline | dc3b79062174f583a3a90ac8deea918c498c0dd5 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8
"""
Copyright (c) 2014, Philipp Krhenbhl
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Stanford University nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY Philipp Krhenbhl ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL Philipp Krhenbhl BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from .gop import *
import numpy as np
from .util import *
LATEX_OUTPUT=True
for bnd in ['st','sf','mssf','ds']:
# Load the dataset
over_segs,segmentations,boxes = loadVOCAndOverSeg( "test", detector=bnd, year="2012" )
has_box = [len(b)>0 for b in boxes]
boxes = [np.vstack(b).astype(np.int32) if len(b)>0 else np.zeros((0,4),dtype=np.int32) for b in boxes]
# Generate the proposals
s = []
s.append( (50,5,0.7) ) # ~250 props
s.append( (100,5,0.75) ) # ~450 props
s.append( (180,5,0.8) ) # ~650 props
s.append( (200,7,0.85) ) # ~1100 props
s.append( (250,10,0.9) ) # ~2200 props
s.append( (290,20,0.9) ) # ~4400 props
for N_S,N_T,iou in s:
prop_settings = setupBaseline( N_S, N_T, iou )
bo,b_bo,pool_s,box_pool_s = dataset.proposeAndEvaluate( over_segs, segmentations, boxes, proposals.Proposal( prop_settings ) )
if LATEX_OUTPUT:
print(( "Baseline %s ($%d$,$%d$) & %d & %0.3f & %0.3f & %0.3f & %0.3f & \\\\"%(bnd, N_S,N_T,np.mean(pool_s),np.mean(bo[:,0]),np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]), np.mean(bo[:,0]>=0.5), np.mean(bo[:,0]>=0.7) ) ))
else:
print(( "ABO ", np.mean(bo[:,0]) ))
print(( "cover ", np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]) ))
print(( "recall ", np.mean(bo[:,0]>=0.5), "\t", np.mean(bo[:,0]>=0.6), "\t", np.mean(bo[:,0]>=0.7), "\t", np.mean(bo[:,0]>=0.8), "\t", np.mean(bo[:,0]>=0.9), "\t", np.mean(bo[:,0]>=1) ))
print(( "# props ", np.mean(pool_s) ))
print(( "box ABO ", np.mean(b_bo) ))
print(( "box recall ", np.mean(b_bo>=0.5), "\t", np.mean(b_bo>=0.6), "\t", np.mean(b_bo>=0.7), "\t", np.mean(b_bo>=0.8), "\t", np.mean(b_bo>=0.9), "\t", np.mean(b_bo>=1) ))
print(( "# box ", np.mean(box_pool_s[~np.isnan(box_pool_s)]) ))
| 53.580645 | 219 | 0.654425 |
82bc8b7d1c31f1a7b50154e6eb1646fd9530ca29 | 1,473 | py | Python | ctr_prediction/datasets/Amazon/AmazonElectronics_x1/convert_amazonelectronics_x1.py | jimzhu/OpenCTR-benchmarks | e8e723cd7a0ef5ddd40e735b85ce7669955a3a99 | [
"Apache-2.0"
] | 59 | 2021-10-31T13:59:37.000Z | 2022-03-31T12:05:55.000Z | ctr_prediction/datasets/Amazon/AmazonElectronics_x1/convert_amazonelectronics_x1.py | jimzhu/OpenCTR-benchmarks | e8e723cd7a0ef5ddd40e735b85ce7669955a3a99 | [
"Apache-2.0"
] | 5 | 2021-12-06T12:11:21.000Z | 2022-03-18T06:21:13.000Z | ctr_prediction/datasets/Amazon/AmazonElectronics_x1/convert_amazonelectronics_x1.py | jimzhu/OpenCTR-benchmarks | e8e723cd7a0ef5ddd40e735b85ce7669955a3a99 | [
"Apache-2.0"
] | 17 | 2021-10-21T10:44:09.000Z | 2022-03-24T11:35:09.000Z | import pickle
import pandas as pd
# cat aa ab ac > dataset.pkl from https://github.com/zhougr1993/DeepInterestNetwork
with open('dataset.pkl', 'rb') as f:
train_set = pickle.load(f, encoding='bytes')
test_set = pickle.load(f, encoding='bytes')
cate_list = pickle.load(f, encoding='bytes')
user_count, item_count, cate_count = pickle.load(f, encoding='bytes')
train_data = []
for sample in train_set:
user_id = sample[0]
item_id = sample[2]
item_history = "^".join([str(i) for i in sample[1]])
label = sample[3]
cate_id = cate_list[item_id]
cate_history = "^".join([str(i) for i in cate_list[sample[1]]])
train_data.append([label, user_id, item_id, cate_id, item_history, cate_history])
train_df = pd.DataFrame(train_data, columns=['label', 'user_id', 'item_id', 'cate_id', 'item_history', 'cate_history'])
train_df.to_csv("train.csv", index=False)
test_data = []
for sample in test_set:
user_id = sample[0]
item_pair = sample[2]
item_history = "^".join([str(i) for i in sample[1]])
cate_history = "^".join([str(i) for i in cate_list[sample[1]]])
test_data.append([1, user_id, item_pair[0], cate_list[item_pair[0]], item_history, cate_history])
test_data.append([0, user_id, item_pair[1], cate_list[item_pair[1]], item_history, cate_history])
test_df = pd.DataFrame(test_data, columns=['label', 'user_id', 'item_id', 'cate_id', 'item_history', 'cate_history'])
test_df.to_csv("test.csv", index=False)
| 42.085714 | 119 | 0.692464 |
82bea645f31e2de3666e262ad0a20085ef770deb | 656 | py | Python | email_extras/admin.py | maqmigh/django-email-extras | c991b59fa53f9a5324ea7d9f3cc65bc1a9aa8e42 | [
"BSD-2-Clause"
] | 33 | 2015-03-17T12:08:05.000Z | 2021-12-17T23:06:26.000Z | email_extras/admin.py | maqmigh/django-email-extras | c991b59fa53f9a5324ea7d9f3cc65bc1a9aa8e42 | [
"BSD-2-Clause"
] | 26 | 2015-10-09T01:01:00.000Z | 2021-02-09T11:11:52.000Z | email_extras/admin.py | maqmigh/django-email-extras | c991b59fa53f9a5324ea7d9f3cc65bc1a9aa8e42 | [
"BSD-2-Clause"
] | 29 | 2015-02-25T07:51:12.000Z | 2022-02-27T07:05:40.000Z |
from email_extras.settings import USE_GNUPG
if USE_GNUPG:
from django.contrib import admin
from email_extras.models import Key, Address
from email_extras.forms import KeyForm
admin.site.register(Key, KeyAdmin)
admin.site.register(Address, AddressAdmin)
| 26.24 | 54 | 0.652439 |
82c010e02b691e4b2aad5f24f459cf89f58d643c | 6,265 | py | Python | Tableau-Supported/Python/insert_data_with_expressions.py | TableauKyle/hyper-api-samples | 37c21c988122c6dbfb662d9ec72d90c4cd30e4cc | [
"MIT"
] | 73 | 2020-04-29T15:41:55.000Z | 2022-03-12T04:55:24.000Z | Tableau-Supported/Python/insert_data_with_expressions.py | TableauKyle/hyper-api-samples | 37c21c988122c6dbfb662d9ec72d90c4cd30e4cc | [
"MIT"
] | 32 | 2020-06-10T00:47:20.000Z | 2022-03-28T11:19:00.000Z | Tableau-Supported/Python/insert_data_with_expressions.py | TableauKyle/hyper-api-samples | 37c21c988122c6dbfb662d9ec72d90c4cd30e4cc | [
"MIT"
] | 54 | 2020-05-01T20:01:51.000Z | 2022-03-28T11:11:00.000Z | # -----------------------------------------------------------------------------
#
# This file is the copyrighted property of Tableau Software and is protected
# by registered patents and other applicable U.S. and international laws and
# regulations.
#
# You may adapt this file and modify it to fit into your context and use it
# as a template to start your own projects.
#
# -----------------------------------------------------------------------------
import shutil
from pathlib import Path
from tableauhyperapi import HyperProcess, Telemetry, \
Connection, CreateMode, \
NOT_NULLABLE, NULLABLE, SqlType, TableDefinition, \
Inserter, \
escape_name, escape_string_literal, \
TableName, Name, \
HyperException
# The table is called "Extract" and will be created in the "Extract" schema.
# This has historically been the default table name and schema for extracts created by Tableau
extract_table = TableDefinition(
table_name=TableName("Extract", "Extract"),
columns=[
TableDefinition.Column(name='Order ID', type=SqlType.int(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Timestamp', type=SqlType.timestamp(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Mode', type=SqlType.text(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Priority', type=SqlType.int(), nullability=NOT_NULLABLE)
]
)
def run_insert_data_with_expressions():
"""
An example of how to push down computations to Hyper during insertion with expressions.
"""
print("EXAMPLE - Push down computations to Hyper during insertion with expressions")
path_to_database = Path("orders.hyper")
# Starts the Hyper Process with telemetry enabled to send data to Tableau.
# To opt out, simply set telemetry=Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU.
with HyperProcess(telemetry=Telemetry.SEND_USAGE_DATA_TO_TABLEAU) as hyper:
# Creates new Hyper file "orders.hyper".
# Replaces file with CreateMode.CREATE_AND_REPLACE if it already exists.
with Connection(endpoint=hyper.endpoint,
database=path_to_database,
create_mode=CreateMode.CREATE_AND_REPLACE) as connection:
connection.catalog.create_schema(schema=extract_table.table_name.schema_name)
connection.catalog.create_table(table_definition=extract_table)
# Hyper API's Inserter allows users to transform data during insertion.
# To make use of data transformation during insertion, the inserter requires the following inputs
# 1. The connection to the Hyper instance containing the table.
# 2. The table name or table defintion into which data is inserted.
# 3. List of Inserter.ColumnMapping.
# This list informs the inserter how each column in the target table is tranformed.
# The list must contain all the columns into which data is inserted.
# "Inserter.ColumnMapping" maps a valid SQL expression (if any) to a column in the target table.
# For example Inserter.ColumnMapping('target_column_name', f'{escape_name("colA")}*{escape_name("colB")}')
# The column "target_column" contains the product of "colA" and "colB" after successful insertion.
# SQL expression string is optional in Inserter.ColumnMapping.
# For a column without any transformation only the column name is required.
# For example Inserter.ColumnMapping('no_data_transformation_column')
# 4. The Column Definition of all input values provided to the Inserter
# Inserter definition contains the column definition for the values that are inserted
inserter_definition = [
TableDefinition.Column(name='Order ID', type=SqlType.int(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Timestamp Text', type=SqlType.text(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Mode', type=SqlType.text(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Priority Text', type=SqlType.text(), nullability=NOT_NULLABLE)]
# Column 'Order Id' is inserted into "Extract"."Extract" as-is
# Column 'Ship Timestamp' in "Extract"."Extract" of timestamp type is computed from Column 'Ship Timestamp Text' of text type using 'to_timestamp()'
# Column 'Ship Mode' is inserted into "Extract"."Extract" as-is
# Column 'Ship Priority' is "Extract"."Extract" of integer type is computed from Colum 'Ship Priority Text' of text type using 'CASE' statement
shipPriorityAsIntCaseExpression = f'CASE {escape_name("Ship Priority Text")} ' \
f'WHEN {escape_string_literal("Urgent")} THEN 1 ' \
f'WHEN {escape_string_literal("Medium")} THEN 2 ' \
f'WHEN {escape_string_literal("Low")} THEN 3 END'
column_mappings = [
'Order ID',
Inserter.ColumnMapping(
'Ship Timestamp', f'to_timestamp({escape_name("Ship Timestamp Text")}, {escape_string_literal("YYYY-MM-DD HH24:MI:SS")})'),
'Ship Mode',
Inserter.ColumnMapping('Ship Priority', shipPriorityAsIntCaseExpression)
]
# Data to be inserted
data_to_insert = [
[399, '2012-09-13 10:00:00', 'Express Class', 'Urgent'],
[530, '2012-07-12 14:00:00', 'Standard Class', 'Low']
]
# Insert data into "Extract"."Extract" table with expressions
with Inserter(connection, extract_table, column_mappings, inserter_definition=inserter_definition) as inserter:
inserter.add_rows(rows=data_to_insert)
inserter.execute()
print("The data was added to the table.")
print("The connection to the Hyper file has been closed.")
print("The Hyper process has been shut down.")
if __name__ == '__main__':
try:
run_insert_data_with_expressions()
except HyperException as ex:
print(ex)
exit(1)
| 53.547009 | 160 | 0.653312 |
82c029ca3481da78e9c1db45150fc5d81b30aeac | 2,234 | py | Python | dumplogs/bin.py | xinhuagu/dumplogs | 5580ff5fe4b054ab9a007e1a023b01fa71917f80 | [
"BSD-3-Clause"
] | 1 | 2021-05-02T11:51:45.000Z | 2021-05-02T11:51:45.000Z | dumplogs/bin.py | xinhuagu/dumplogs | 5580ff5fe4b054ab9a007e1a023b01fa71917f80 | [
"BSD-3-Clause"
] | null | null | null | dumplogs/bin.py | xinhuagu/dumplogs | 5580ff5fe4b054ab9a007e1a023b01fa71917f80 | [
"BSD-3-Clause"
] | null | null | null | import boto3
import argparse
import os,sys
| 29.394737 | 87 | 0.581021 |
82c2685c2ffd7e5c7861dd6a5e7721b4f4a54e32 | 5,239 | py | Python | ch5/gaussian_mixture.py | susantamoh84/HandsOn-Unsupervised-Learning-with-Python | 056953d0462923a674faf0a23b27239bc9f69975 | [
"MIT"
] | 25 | 2018-09-03T11:12:49.000Z | 2022-03-13T01:42:57.000Z | Chapter05/gaussian_mixture.py | AIRob/HandsOn-Unsupervised-Learning-with-Python | 1dbe9b3fdf5255f610e0c9c52a82935baa6a4a3e | [
"MIT"
] | null | null | null | Chapter05/gaussian_mixture.py | AIRob/HandsOn-Unsupervised-Learning-with-Python | 1dbe9b3fdf5255f610e0c9c52a82935baa6a4a3e | [
"MIT"
] | 35 | 2018-09-15T11:06:12.000Z | 2021-12-08T04:28:55.000Z | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.datasets import make_blobs
from sklearn.mixture import GaussianMixture
from sklearn.cluster import KMeans
from matplotlib.patches import Ellipse
# For reproducibility
np.random.seed(1000)
nb_samples = 300
nb_centers = 2
if __name__ == '__main__':
# Create the dataset
X, Y = make_blobs(n_samples=nb_samples, n_features=2, center_box=[-1, 1], centers=nb_centers,
cluster_std=[1.0, 0.6], random_state=1000)
# Show the dataset
sns.set()
fig, ax = plt.subplots(figsize=(15, 9))
ax.scatter(X[:, 0], X[:, 1], s=120)
ax.set_xlabel(r'$x_0$', fontsize=14)
ax.set_ylabel(r'$x_1$', fontsize=14)
plt.show()
# Train the model
gm = GaussianMixture(n_components=2, random_state=1000)
gm.fit(X)
Y_pred = gm.fit_predict(X)
print('Means: \n{}'.format(gm.means_))
print('Covariance matrices: \n{}'.format(gm.covariances_))
print('Weights: \n{}'.format(gm.weights_))
m1 = gm.means_[0]
m2 = gm.means_[1]
c1 = gm.covariances_[0]
c2 = gm.covariances_[1]
we1 = 1 + gm.weights_[0]
we2 = 1 + gm.weights_[1]
# Eigendecompose the covariances
w1, v1 = np.linalg.eigh(c1)
w2, v2 = np.linalg.eigh(c2)
nv1 = v1 / np.linalg.norm(v1)
nv2 = v2 / np.linalg.norm(v2)
print('Eigenvalues 1: \n{}'.format(w1))
print('Eigenvectors 1: \n{}'.format(nv1))
print('Eigenvalues 2: \n{}'.format(w2))
print('Eigenvectors 2: \n{}'.format(nv2))
a1 = np.arccos(np.dot(nv1[:, 1], [1.0, 0.0]) / np.linalg.norm(nv1[:, 1])) * 180.0 / np.pi
a2 = np.arccos(np.dot(nv2[:, 1], [1.0, 0.0]) / np.linalg.norm(nv2[:, 1])) * 180.0 / np.pi
# Perform K-Means clustering
km = KMeans(n_clusters=2, random_state=1000)
km.fit(X)
Y_pred_km = km.predict(X)
# Show the comparison of the results
fig, ax = plt.subplots(1, 2, figsize=(22, 9), sharey=True)
ax[0].scatter(X[Y_pred == 0, 0], X[Y_pred == 0, 1], s=80, marker='o', label='Gaussian 1')
ax[0].scatter(X[Y_pred == 1, 0], X[Y_pred == 1, 1], s=80, marker='d', label='Gaussian 2')
g1 = Ellipse(xy=m1, width=w1[1] * 3, height=w1[0] * 3, fill=False, linestyle='dashed', angle=a1, color='black',
linewidth=1)
g1_1 = Ellipse(xy=m1, width=w1[1] * 2, height=w1[0] * 2, fill=False, linestyle='dashed', angle=a1, color='black',
linewidth=2)
g1_2 = Ellipse(xy=m1, width=w1[1] * 1.4, height=w1[0] * 1.4, fill=False, linestyle='dashed', angle=a1,
color='black', linewidth=3)
g2 = Ellipse(xy=m2, width=w2[1] * 3, height=w2[0] * 3, fill=False, linestyle='dashed', angle=a2, color='black',
linewidth=1)
g2_1 = Ellipse(xy=m2, width=w2[1] * 2, height=w2[0] * 2, fill=False, linestyle='dashed', angle=a2, color='black',
linewidth=2)
g2_2 = Ellipse(xy=m2, width=w2[1] * 1.4, height=w2[0] * 1.4, fill=False, linestyle='dashed', angle=a2,
color='black', linewidth=3)
ax[0].set_xlabel(r'$x_0$', fontsize=16)
ax[0].set_ylabel(r'$x_1$', fontsize=16)
ax[0].add_artist(g1)
ax[0].add_artist(g1_1)
ax[0].add_artist(g1_2)
ax[0].add_artist(g2)
ax[0].add_artist(g2_1)
ax[0].add_artist(g2_2)
ax[0].set_title('Gaussian Mixture', fontsize=16)
ax[0].legend(fontsize=16)
ax[1].scatter(X[Y_pred_km == 0, 0], X[Y_pred_km == 0, 1], s=80, marker='o', label='Cluster 1')
ax[1].scatter(X[Y_pred_km == 1, 0], X[Y_pred_km == 1, 1], s=80, marker='d', label='Cluster 2')
ax[1].set_xlabel(r'$x_0$', fontsize=16)
ax[1].set_title('K-Means', fontsize=16)
ax[1].legend(fontsize=16)
# Predict the probability of some sample points
print('P([0, -2]=G1) = {:.3f} and P([0, -2]=G2) = {:.3f}'.format(*list(gm.predict_proba([[0.0, -2.0]]).squeeze())))
print('P([1, -1]=G1) = {:.3f} and P([1, -1]=G2) = {:.3f}'.format(*list(gm.predict_proba([[1.0, -1.0]]).squeeze())))
print('P([1, 0]=G1) = {:.3f} and P([1, 0]=G2) = {:.3f}'.format(*list(gm.predict_proba([[1.0, 0.0]]).squeeze())))
plt.show()
# Compute AICs, BICs, and log-likelihood
n_max_components = 20
aics = []
bics = []
log_likelihoods = []
for n in range(1, n_max_components + 1):
gm = GaussianMixture(n_components=n, random_state=1000)
gm.fit(X)
aics.append(gm.aic(X))
bics.append(gm.bic(X))
log_likelihoods.append(gm.score(X) * nb_samples)
# Show the results
fig, ax = plt.subplots(1, 3, figsize=(20, 6))
ax[0].plot(range(1, n_max_components + 1), aics)
ax[0].set_xticks(range(1, n_max_components + 1))
ax[0].set_xlabel('Number of Gaussians', fontsize=14)
ax[0].set_title('AIC', fontsize=14)
ax[1].plot(range(1, n_max_components + 1), bics)
ax[1].set_xticks(range(1, n_max_components + 1))
ax[1].set_xlabel('Number of Gaussians', fontsize=14)
ax[1].set_title('BIC', fontsize=14)
ax[2].plot(range(1, n_max_components + 1), log_likelihoods)
ax[2].set_xticks(range(1, n_max_components + 1))
ax[2].set_xlabel('Number of Gaussians', fontsize=14)
ax[2].set_title('Log-likelihood', fontsize=14)
plt.show()
| 32.339506 | 119 | 0.604314 |
82c29ca8b328d9cb75ca5d391549720bbf654d8a | 5,771 | py | Python | shipyard2/shipyard2/rules/images/merge_image.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | 3 | 2016-01-04T06:28:52.000Z | 2020-09-20T13:18:40.000Z | shipyard2/shipyard2/rules/images/merge_image.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | null | null | null | shipyard2/shipyard2/rules/images/merge_image.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
] | null | null | null | __all__ = [
'DEFAULT_FILTERS',
'DEFAULT_XAR_FILTERS',
'merge_image',
]
import contextlib
import logging
import tempfile
from pathlib import Path
from g1 import scripts
from g1.containers import models
from g1.containers import scripts as ctr_scripts
from . import utils
LOG = logging.getLogger(__name__)
DEFAULT_FILTERS = (
# Do not leak any source codes to the application image.
# Keep drydock path in sync with //bases:build.
('exclude', '/home/plumber/drydock'),
('exclude', '/home/plumber/.gradle'),
('exclude', '/home/plumber/.gsutil'),
('exclude', '/home/plumber/.python_history'),
('exclude', '/home/plumber/.vpython_cipd_cache'),
('exclude', '/home/plumber/.vpython-root'),
('exclude', '/home/plumber/.wget-hsts'),
('exclude', '/root/.cache'),
('exclude', '/usr/src'),
# Include only relevant files under /etc.
('include', '/etc/'),
# We use distro java at the moment.
('include', '/etc/alternatives/'),
('include', '/etc/alternatives/java'),
('include', '/etc/java*'),
('include', '/etc/java*/**'),
('include', '/etc/group'),
('include', '/etc/group-'),
('include', '/etc/gshadow'),
('include', '/etc/gshadow-'),
('include', '/etc/inputrc'),
('include', '/etc/ld.so.cache'),
('include', '/etc/passwd'),
('include', '/etc/passwd-'),
('include', '/etc/shadow'),
('include', '/etc/shadow-'),
('include', '/etc/ssl/'),
('include', '/etc/ssl/**'),
('include', '/etc/subgid'),
('include', '/etc/subgid-'),
('include', '/etc/subuid'),
('include', '/etc/subuid-'),
('include', '/etc/sudoers.d/'),
('include', '/etc/sudoers.d/**'),
('exclude', '/etc/**'),
# Exclude distro binaries from application image (note that base
# image includes a base set of distro binaries).
('exclude', '/bin'),
('exclude', '/sbin'),
# We use distro java at the moment.
('include', '/usr/bin/'),
('include', '/usr/bin/java'),
('exclude', '/usr/bin/**'),
('exclude', '/usr/bin'),
('exclude', '/usr/sbin'),
# Exclude headers.
('exclude', '/usr/include'),
('exclude', '/usr/local/include'),
# Exclude distro systemd files.
('exclude', '/lib/systemd'),
('exclude', '/usr/lib/systemd'),
# In general, don't exclude distro libraries since we might depend
# on them, except these libraries.
('exclude', '/usr/lib/apt'),
('exclude', '/usr/lib/gcc'),
('exclude', '/usr/lib/git-core'),
('exclude', '/usr/lib/python*'),
('exclude', '/usr/lib/**/*perl*'),
# Exclude these to save more space.
('exclude', '/usr/share/**'),
('exclude', '/var/**'),
)
# For XAR images, we only include a few selected directories, and
# exclude everything else.
#
# To support Python, we include our code under /usr/local in the XAR
# image (like our pod image). An alternative is to use venv to install
# our codebase, but this seems to be too much effort; so we do not take
# this approach for now.
#
# We explicitly remove CPython binaries from /usr/local/bin so that the
# `env` command will not (and should not) resolve to them.
#
# We do not include /usr/bin/java (symlink to /etc/alternatives) for
# now. If you want to use Java, you have to directly invoke it under
# /usr/lib/jvm/...
DEFAULT_XAR_FILTERS = (
('include', '/usr/'),
('include', '/usr/lib/'),
('exclude', '/usr/lib/**/*perl*'),
('include', '/usr/lib/jvm/'),
('include', '/usr/lib/jvm/**'),
('include', '/usr/lib/x86_64-linux-gnu/'),
('include', '/usr/lib/x86_64-linux-gnu/**'),
('include', '/usr/local/'),
('include', '/usr/local/bin/'),
('exclude', '/usr/local/bin/python*'),
('include', '/usr/local/bin/*'),
('include', '/usr/local/lib/'),
('include', '/usr/local/lib/**'),
('exclude', '**'),
)
| 34.35119 | 78 | 0.612199 |
82c30affdd6735cd19f09c9fa98712ebb317fd91 | 289 | py | Python | python3/best_time_stock1.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | 1 | 2020-10-08T09:17:40.000Z | 2020-10-08T09:17:40.000Z | python3/best_time_stock1.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | python3/best_time_stock1.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | """
Space : O(1)
Time : O(n)
"""
| 19.266667 | 50 | 0.439446 |
82c33d6f16c0ad3e4c5059353c658ad5302c575d | 175 | py | Python | environments/assets/gym_collectball/__init__.py | GPaolo/SERENE | 83bc38a37ad8f1be9695d2483fd463428d4dae23 | [
"MIT"
] | 3 | 2021-04-19T21:55:00.000Z | 2021-12-20T15:26:12.000Z | environments/assets/gym_collectball/__init__.py | GPaolo/SERENE | 83bc38a37ad8f1be9695d2483fd463428d4dae23 | [
"MIT"
] | null | null | null | environments/assets/gym_collectball/__init__.py | GPaolo/SERENE | 83bc38a37ad8f1be9695d2483fd463428d4dae23 | [
"MIT"
] | null | null | null | # Created by Giuseppe Paolo
# Date: 27/08/2020
from gym.envs.registration import register
register(
id='CollectBall-v0',
entry_point='gym_collectball.envs:CollectBall'
) | 21.875 | 48 | 0.771429 |
82c36eb8e029351535cbcf82344721060c30bebf | 3,534 | py | Python | foreverbull/foreverbull.py | quantfamily/foreverbull-python | 4f8144b6d964e9c0d1209f0421dc960b82a15400 | [
"Apache-2.0"
] | null | null | null | foreverbull/foreverbull.py | quantfamily/foreverbull-python | 4f8144b6d964e9c0d1209f0421dc960b82a15400 | [
"Apache-2.0"
] | 9 | 2021-11-24T10:45:27.000Z | 2022-02-26T19:12:47.000Z | foreverbull/foreverbull.py | quantfamily/foreverbull-python | 4f8144b6d964e9c0d1209f0421dc960b82a15400 | [
"Apache-2.0"
] | null | null | null | import logging
import threading
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import Queue
from foreverbull.worker.worker import WorkerHandler
from foreverbull_core.models.finance import EndOfDay
from foreverbull_core.models.socket import Request
from foreverbull_core.models.worker import Instance
from foreverbull_core.socket.client import ContextClient, SocketClient
from foreverbull_core.socket.exceptions import SocketClosed, SocketTimeout
from foreverbull_core.socket.router import MessageRouter
| 36.061224 | 91 | 0.649689 |
82c418de34320061d50470074e4e4e6e0fe9752b | 704 | py | Python | scopus/tests/test_AffiliationSearch.py | crew102/scopus | d8791c162cef4c2f830d983b435333d9d8eaf472 | [
"MIT"
] | null | null | null | scopus/tests/test_AffiliationSearch.py | crew102/scopus | d8791c162cef4c2f830d983b435333d9d8eaf472 | [
"MIT"
] | null | null | null | scopus/tests/test_AffiliationSearch.py | crew102/scopus | d8791c162cef4c2f830d983b435333d9d8eaf472 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `AffiliationSearch` module."""
from collections import namedtuple
from nose.tools import assert_equal, assert_true
import scopus
s = scopus.AffiliationSearch('af-id(60021784)', refresh=True)
| 29.333333 | 79 | 0.691761 |
82c5022208b58d4f46a1d7ce39f5bdeb44953f3f | 566 | py | Python | MechOS/simple_messages/int.py | PierceATronics/MechOS | 8eeb68b65b8c20b642db52baad1379fd0847b362 | [
"MIT"
] | null | null | null | MechOS/simple_messages/int.py | PierceATronics/MechOS | 8eeb68b65b8c20b642db52baad1379fd0847b362 | [
"MIT"
] | null | null | null | MechOS/simple_messages/int.py | PierceATronics/MechOS | 8eeb68b65b8c20b642db52baad1379fd0847b362 | [
"MIT"
] | null | null | null | '''
'''
import struct
| 20.962963 | 77 | 0.556537 |
82c56d7c16636bc69a537283da6c0edaf26dd821 | 377 | py | Python | Curso Python/PythonExercicios/ex017.py | marcos-saba/Cursos | 1c063392867e9ed86d141dad8861a2a35488b1c6 | [
"MIT"
] | null | null | null | Curso Python/PythonExercicios/ex017.py | marcos-saba/Cursos | 1c063392867e9ed86d141dad8861a2a35488b1c6 | [
"MIT"
] | null | null | null | Curso Python/PythonExercicios/ex017.py | marcos-saba/Cursos | 1c063392867e9ed86d141dad8861a2a35488b1c6 | [
"MIT"
] | null | null | null | #from math import hypot
import math
print('='*5, 'Clculo tringulo retngulo', '='*5)
cat_op = float(input('Digite o comprimento do cateto oposto: '))
cat_adj = float(input('Digite o comprimento do cateto adjacente: '))
hip = math.hypot(cat_op, cat_adj)
print(f'O comprimento da hipotenusa do tringulo retngulo, cujos catetos so {cat_op:.2f} e {cat_adj:.2f} {hip:.2f}.')
| 47.125 | 121 | 0.729443 |
82c5f5ed054e4540c225e7fd44668ed1c842c358 | 312 | py | Python | exercicios/ex074.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
] | null | null | null | exercicios/ex074.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
] | null | null | null | exercicios/ex074.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
] | null | null | null | from random import randint
numeros = (randint(0, 10), randint(0, 10), randint(0, 10), randint(0, 10), randint(0, 10))
print(f'Os cinco nmeros so: ', end='')
for n in numeros: # Exibe nmeros sorteados
print(n, end=' ')
print(f'\nO MAIOR nmero {max(numeros)}')
print(f'O MENOR nmero {min(numeros)}')
| 39 | 90 | 0.666667 |
82c61ef5a2ffb92917f588c48559df6bc3be2564 | 10,832 | py | Python | libs3/maxwellccs.py | tmpbci/LJ | 4c40e2ddf862f94dcfeb3cc48c41aad44a3a8d34 | [
"CNRI-Python"
] | 7 | 2019-03-20T00:09:14.000Z | 2022-03-06T23:18:20.000Z | libs3/maxwellccs.py | tmpbci/LJ | 4c40e2ddf862f94dcfeb3cc48c41aad44a3a8d34 | [
"CNRI-Python"
] | null | null | null | libs3/maxwellccs.py | tmpbci/LJ | 4c40e2ddf862f94dcfeb3cc48c41aad44a3a8d34 | [
"CNRI-Python"
] | null | null | null | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Maxwell Macros
v0.7.0
by Sam Neurohack
from /team/laser
Launchpad set a "current path"
"""
from OSC3 import OSCServer, OSCClient, OSCMessage
import time
import numpy as np
import rtmidi
from rtmidi.midiutil import open_midiinput
from threading import Thread
from rtmidi.midiconstants import (CHANNEL_PRESSURE, CONTROLLER_CHANGE, NOTE_ON, NOTE_OFF,
PITCH_BEND, POLY_PRESSURE, PROGRAM_CHANGE)
import os, json
import midi3
if os.uname()[1]=='raspberrypi':
pass
port = 8090
ip = "127.0.0.1"
mididest = 'Session 1'
djdest = 'Port'
midichannel = 1
computerIP = ['127.0.0.1','192.168.2.95','192.168.2.52','127.0.0.1',
'127.0.0.1','127.0.0.1','127.0.0.1','127.0.0.1']
computer = 0
# store current value for computer 1
cc1 =[0]*140
current = {
"patch": 0,
"prefixLeft": "/osc/left/X",
"prefixRight": "/osc/right/X",
"suffix": "/amp",
"path": "/osc/left/X/curvetype",
"pathLeft": "/osc/left/X/curvetype",
"pathRight": "/osc/left/X/curvetype",
"previousmacro": -1,
"LeftCurveType": 0,
"lfo": 1,
"rotator": 1,
"translator": 1
}
specificvalues = {
# Sine: 0-32, Tri: 33-64, Square: 65-96, Line: 96-127
"curvetype": {"sin": 0, "saw": 33, "squ": 95, "lin": 127},
"freqlimit": {"1": 0, "4": 26, "16": 52, "32": 80, "127": 127},
"amptype": {"constant": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127},
"phasemodtype": {"linear": 0,"sin": 90},
"phaseoffsettype": {"manual": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127},
"ampoffsettype": { "manual": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127},
"inversion": {"off": 0, "on": 127},
"colortype": {"solid": 0, "lfo": 127},
"modtype": {"sin": 0,"linear": 127},
"switch": {"off": 0,"on": 127},
"operation": {"+": 0, "-": 50, "*": 127}
}
#
# Maxwell CCs
#
# /cc cc number value
# Jog send 127 to left and 1 to right
# increase or decrease current CC defined in current path
# Jog send 127 to left and 1 to right
# increase or decrease current CC defined in current path
# Parameter change : to left 127 / to right 0 or 1
# Change type : trig with only with midi value 127 on a CC event
# Left cue button 127 = on 0 = off
# Right cue button 127 = on 0 = off
# increase/decrease a CC
| 26.745679 | 110 | 0.625185 |
82c72df17c47f59db7183dbcc92de68aef849d6a | 11,660 | py | Python | functions_alignComp.py | lauvegar/VLBI_spectral_properties_Bfield | 6d07b6b0549ba266d2c56adcf664219a500e75e8 | [
"MIT"
] | 1 | 2020-03-14T14:55:17.000Z | 2020-03-14T14:55:17.000Z | functions_alignComp.py | lauvegar/VLBI_spectral_properties_Bfield | 6d07b6b0549ba266d2c56adcf664219a500e75e8 | [
"MIT"
] | null | null | null | functions_alignComp.py | lauvegar/VLBI_spectral_properties_Bfield | 6d07b6b0549ba266d2c56adcf664219a500e75e8 | [
"MIT"
] | 1 | 2021-01-29T14:08:16.000Z | 2021-01-29T14:08:16.000Z | import numpy as np
import matplotlib.pyplot as plt
from pylab import *
#import pyspeckit as ps
from scipy import io
from scipy import stats
from scipy.optimize import leastsq
#from lmfit import minimize, Parameters, Parameter, report_fit
#from lmfit.models import GaussianModel
import scipy.optimize as optimization
import matplotlib.ticker as ticker
import cmath as math
import pickle
import iminuit
import astropy.io.fits as pf
import os,glob
#import string,math,sys,fileinput,glob,time
#load modules
#from pylab import *
import subprocess as sub
import re
#from plot_components import get_ellipse_coords, ellipse_axis
import urllib2
from astropy import units as u
#from astropy.coordinates import SkyCoord
#FUNCTION TO READ THE HEADER AND TAKE IMPORTANT PARAMETERS AS
#cell
#BMAJ, BMIN, BPA
#date, freq and epoch
def natural_keys(text):
'''
alist.sort(key=natural_keys) sorts in human order
http://nedbatchelder.com/blog/200712/human_sorting.html
(See Toothy's implementation in the comments)
'''
return [ atoi(c) for c in re.split('(\d+)', text) ]
def get_ellipse_coords(a=0.0, b=0.0, x=0.0, y=0.0, angle=0.0, k=2):
""" Draws an ellipse using (360*k + 1) discrete points; based on pseudo code
given at http://en.wikipedia.org/wiki/Ellipse
k = 1 means 361 points (degree by degree)
a = major axis distance,
b = minor axis distance,
x = offset along the x-axis
y = offset along the y-axis
angle = clockwise rotation [in degrees] of the ellipse;
* angle=0 : the ellipse is aligned with the positive x-axis
* angle=30 : rotated 30 degrees clockwise from positive x-axis
"""
pts = np.zeros((360*k+1, 2))
beta = -angle * np.pi/180.0
sin_beta = np.sin(beta)
cos_beta = np.cos(beta)
alpha = np.radians(np.r_[0.:360.:1j*(360*k+1)])
sin_alpha = np.sin(alpha)
cos_alpha = np.cos(alpha)
pts[:, 0] = x + (a * cos_alpha * cos_beta - b * sin_alpha * sin_beta)
pts[:, 1] = y + (a * cos_alpha * sin_beta + b * sin_alpha * cos_beta)
return pts
| 29.004975 | 140 | 0.613036 |
82c74e30b862d202367459727b08bf47fdb074f4 | 1,762 | py | Python | osbuild/dist.py | dnarvaez/osbuild | 08031487481ba23597f19cb3e106628e5c9d440d | [
"Apache-2.0"
] | null | null | null | osbuild/dist.py | dnarvaez/osbuild | 08031487481ba23597f19cb3e106628e5c9d440d | [
"Apache-2.0"
] | 1 | 2016-11-13T01:04:18.000Z | 2016-11-13T01:04:18.000Z | osbuild/dist.py | dnarvaez/osbuild | 08031487481ba23597f19cb3e106628e5c9d440d | [
"Apache-2.0"
] | 2 | 2015-01-06T20:57:55.000Z | 2015-11-15T20:14:09.000Z | # Copyright 2013 Daniel Narvaez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
from distutils.sysconfig import parse_makefile
from osbuild import config
from osbuild import command
_dist_builders = {}
_dist_builders['autotools'] = _autotools_dist_builder
| 25.536232 | 74 | 0.715096 |
82c7e82524f111efe667928715ea87dcc4155b43 | 1,194 | py | Python | neural_net/game_status.py | Ipgnosis/tic_tac_toe | e1519b702531965cc647ff37c1c46d72f4b3b24e | [
"BSD-3-Clause"
] | null | null | null | neural_net/game_status.py | Ipgnosis/tic_tac_toe | e1519b702531965cc647ff37c1c46d72f4b3b24e | [
"BSD-3-Clause"
] | 4 | 2021-03-25T19:52:40.000Z | 2021-12-12T17:57:11.000Z | neural_net/game_status.py | Ipgnosis/tic_tac_toe | e1519b702531965cc647ff37c1c46d72f4b3b24e | [
"BSD-3-Clause"
] | null | null | null | # node to capture and communicate game status
# written by Russell on 5/18
| 24.367347 | 101 | 0.569514 |
82c885deedbc0d14255bfcc8dfea36b0a64e58d5 | 13,340 | py | Python | alphatrading/system/db_methods/method_sqlite3.py | LoannData/Q26_AlphaTrading | b8e6983e59f942352150f76541d880143cca4478 | [
"MIT"
] | null | null | null | alphatrading/system/db_methods/method_sqlite3.py | LoannData/Q26_AlphaTrading | b8e6983e59f942352150f76541d880143cca4478 | [
"MIT"
] | null | null | null | alphatrading/system/db_methods/method_sqlite3.py | LoannData/Q26_AlphaTrading | b8e6983e59f942352150f76541d880143cca4478 | [
"MIT"
] | null | null | null | """
"""
import sqlite3
import numpy as np
import math
| 32.378641 | 139 | 0.422414 |
82c9034910103390615809d1175c2317626103b0 | 4,705 | py | Python | pysport/horseracing/lattice_calibration.py | notbanker/pysport | fbeb1f1efa493aa26ffb58156b86ce2aee3482bf | [
"MIT"
] | null | null | null | pysport/horseracing/lattice_calibration.py | notbanker/pysport | fbeb1f1efa493aa26ffb58156b86ce2aee3482bf | [
"MIT"
] | null | null | null | pysport/horseracing/lattice_calibration.py | notbanker/pysport | fbeb1f1efa493aa26ffb58156b86ce2aee3482bf | [
"MIT"
] | null | null | null | from .lattice import skew_normal_density, center_density,\
state_prices_from_offsets, densities_and_coefs_from_offsets, winner_of_many,\
expected_payoff, densities_from_offsets, implicit_state_prices, densitiesPlot
import pandas as pd # todo: get rid of this dependency
import numpy as np
RACING_L = 500
RACING_UNIT = 0.1
RACING_SCALE = 1.0
RACING_A = 1.0
def make_nan_2000( x ) :
""" Longshots """
if pd.isnull( x ):
return 2000.
else:
return x
def normalize( p ):
""" Naive renormalization of probabilities """
S = sum( p )
return [ pr/S for pr in p ]
def prices_from_dividends( dividends ):
""" Risk neutral probabilities using naive renormalization """
return normalize( [ 1. / make_nan_2000(x) for x in dividends ] )
def dividends_from_prices( prices ):
""" Australian style dividends """
return [ 1./d for d in normalize( prices ) ]
def racing_density( loc ):
""" A rough and ready distribution of performance distributions for one round """
density = skew_normal_density( L=RACING_L, unit=RACING_UNIT, loc=0, scale=RACING_SCALE, a=RACING_A )
return center_density( density )
def dividend_implied_ability( dividends, density ):
""" Infer risk-neutral implied_ability from Australian style dividends
:param dividends: [ 7.6, 12.0, ... ]
:return: [ float ] Implied ability
"""
state_prices = prices_from_dividends( dividends )
implied_offsets_guess = [ 0 for _ in state_prices]
L = len( density )/2
offset_samples = list( xrange( -L/4, L/4 ))[::-1]
ability = implied_ability( prices = state_prices, density = density, \
offset_samples = offset_samples, implied_offsets_guess = implied_offsets_guess, nIter = 3)
return ability
def ability_implied_dividends( ability, density ):
""" Return betfair style prices
:param ability:
:return: [ 7.6, 12.3, ... ]
"""
state_prices = state_prices_from_offsets( density=density, offsets = ability)
return [ 1./sp for sp in state_prices ]
def implied_ability( prices, density, offset_samples = None, implied_offsets_guess = None, nIter = 3, verbose = False, visualize = False):
""" Finds location translations of a fixed density so as to replicate given state prices for winning """
L = len( density )
if offset_samples is None:
offset_samples = list( xrange( -L/4, L/4 ))[::-1] # offset_samples should be descending TODO: add check for this
else:
_assert_descending( offset_samples )
if implied_offsets_guess is None:
implied_offsets_guess = range( len(prices) )
# First guess at densities
densities, coefs = densities_and_coefs_from_offsets( density, implied_offsets_guess )
densityAllGuess, multiplicityAllGuess = winner_of_many( densities )
densityAll = densityAllGuess.copy()
multiplicityAll = multiplicityAllGuess.copy()
guess_prices = [ np.sum( expected_payoff( density, densityAll, multiplicityAll, cdf = None, cdfAll = None)) for density in densities]
for _ in xrange( nIter ):
if visualize:
# temporary hack to check progress of optimization
densitiesPlot( [ densityAll] + densities , unit=0.1 )
implied_prices = implicit_state_prices( density=density, densityAll=densityAll, multiplicityAll = multiplicityAll, offsets=offset_samples )
implied_offsets = np.interp( prices, implied_prices, offset_samples )
densities = densities_from_offsets( density, implied_offsets )
densityAll, multiplicityAll = winner_of_many( densities )
guess_prices = [ np.sum(expected_payoff(density, densityAll, multiplicityAll, cdf = None, cdfAll = None)) for density in densities ]
approx_prices = [ np.round( pri, 3 ) for pri in prices]
approx_guesses = [ np.round( pri, 3 ) for pri in guess_prices]
if verbose:
print zip( approx_prices, approx_guesses )[:5]
return implied_offsets
| 42.772727 | 160 | 0.671413 |
82ca9321fb77ad0b8c97cc3c98eb832716ddecc4 | 4,832 | py | Python | var/spack/repos/builtin/packages/autoconf/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360 | 2017-11-06T08:47:01.000Z | 2022-03-31T14:45:33.000Z | var/spack/repos/builtin/packages/autoconf/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838 | 2017-11-04T07:49:45.000Z | 2022-03-31T23:38:39.000Z | var/spack/repos/builtin/packages/autoconf/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793 | 2017-11-04T07:45:50.000Z | 2022-03-30T14:31:53.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
| 45.158879 | 112 | 0.657078 |
82cb0803d2457f595d667a7981bfa23935775448 | 1,096 | py | Python | src/wallet/web/schemas/categories.py | clayman-micro/wallet | b78f650aed7d57167db81a0530fd78dbc12d527e | [
"MIT"
] | 2 | 2015-10-18T15:36:37.000Z | 2015-10-19T04:57:00.000Z | src/wallet/web/schemas/categories.py | clayman74/wallet | b78f650aed7d57167db81a0530fd78dbc12d527e | [
"MIT"
] | 7 | 2021-06-26T16:51:13.000Z | 2021-11-29T19:05:00.000Z | src/wallet/web/schemas/categories.py | clayman-micro/wallet | b78f650aed7d57167db81a0530fd78dbc12d527e | [
"MIT"
] | null | null | null | from aiohttp_micro.web.handlers.openapi import PayloadSchema, ResponseSchema
from marshmallow import fields, post_load, Schema
from wallet.core.entities.categories import CategoryFilters
from wallet.web.schemas.abc import CollectionFiltersSchema
| 29.621622 | 100 | 0.762774 |
82cb1f7a824b2011c270ad30649e677322c356f9 | 127 | py | Python | scons_gbd_docs/Gbd/Docs/SConscript.py | ASoftTech/Scons.Gbd.Docs | 4d9fb7585d9565f57306774efb4342fe9b8822f2 | [
"MIT"
] | null | null | null | scons_gbd_docs/Gbd/Docs/SConscript.py | ASoftTech/Scons.Gbd.Docs | 4d9fb7585d9565f57306774efb4342fe9b8822f2 | [
"MIT"
] | null | null | null | scons_gbd_docs/Gbd/Docs/SConscript.py | ASoftTech/Scons.Gbd.Docs | 4d9fb7585d9565f57306774efb4342fe9b8822f2 | [
"MIT"
] | null | null | null | SConscript('Mkdocs/Common/SConscript.py')
SConscript('Pandoc/Common/SConscript.py')
SConscript('Doxygen/Common/SConscript.py')
| 31.75 | 42 | 0.811024 |
82cb4d12dfd598eacff3048f5dbbafb527f62c06 | 11,563 | py | Python | seg/segmentor/tools/module_runner.py | Frank-Abagnal/HRFormer | d7d362770de8648f8e0a379a71cee25f42954503 | [
"MIT"
] | 254 | 2021-08-13T10:05:22.000Z | 2022-03-25T09:21:45.000Z | seg/segmentor/tools/module_runner.py | Sense-X/HRFormer | 1245b88b5824fbd8cdb358b5ee909a4e537a2ef5 | [
"MIT"
] | 17 | 2021-09-08T01:40:49.000Z | 2022-03-23T10:53:47.000Z | seg/segmentor/tools/module_runner.py | Sense-X/HRFormer | 1245b88b5824fbd8cdb358b5ee909a4e537a2ef5 | [
"MIT"
] | 48 | 2021-08-13T14:06:58.000Z | 2022-03-30T02:41:26.000Z | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: Donny You(youansheng@gmail.com)
# Some methods used by main methods.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
from collections import OrderedDict
import torch
import torch.nn as nn
from torch.nn.parallel.scatter_gather import gather as torch_gather
from lib.extensions.parallel.data_parallel import DataParallelModel
from lib.utils.tools.logger import Logger as Log
from lib.utils.distributed import get_rank, is_distributed
| 41.894928 | 115 | 0.585488 |
82cc626afaea4df2938aee10cb59917cc59cdc28 | 1,861 | py | Python | scripts/si_figs.py | gbirzu/density-dependent_dispersal_growth | edd1207f57b63e2827af385d4e868306ff308746 | [
"MIT"
] | null | null | null | scripts/si_figs.py | gbirzu/density-dependent_dispersal_growth | edd1207f57b63e2827af385d4e868306ff308746 | [
"MIT"
] | null | null | null | scripts/si_figs.py | gbirzu/density-dependent_dispersal_growth | edd1207f57b63e2827af385d4e868306ff308746 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import pickle
import scipy.stats as stats
data_path = '../data/het_average.dat'
output_dir = '../figures/'
# Configure matplotlib environment
helvetica_scale_factor = 0.92 # rescale Helvetica to other fonts of same size
mpl.rcParams['font.size'] = 10 * helvetica_scale_factor
mpl.rcParams['font.family'] = 'sans-serif'
mpl.rcParams['font.sans-serif'] = 'Helvetica Neue'
mpl.rcParams['axes.titlesize'] = 12 * helvetica_scale_factor
single_col_width = 3.43 # = 8.7 cm
double_col_width = 7.01 # = 17.8 cm
if __name__ == '__main__':
with open(data_path, 'rb') as f_in:
het_averages = pickle.load(f_in)
plot_het_comparison(het_averages)
ne_global = fit_Ne(het_averages, averaging='global')
ne_local = fit_Ne(het_averages, averaging='local')
print('Ne (global averaging): ', ne_global)
print('Ne (local averaging): ', ne_local)
print('Ne difference: ', 100 * (ne_global - ne_local) / ne_global, '%')
| 33.836364 | 119 | 0.703923 |
82cd32f83dde9f87b3ac04ec47ec6fefab6101d7 | 7,532 | py | Python | language.py | sanine-a/dream-atlas | cd44c43ec6cf5e7a95ae231ba7174a6891d93474 | [
"MIT"
] | null | null | null | language.py | sanine-a/dream-atlas | cd44c43ec6cf5e7a95ae231ba7174a6891d93474 | [
"MIT"
] | null | null | null | language.py | sanine-a/dream-atlas | cd44c43ec6cf5e7a95ae231ba7174a6891d93474 | [
"MIT"
] | null | null | null | from random import random, choice, seed, shuffle, randint
from math import ceil
import copy
target = [ 2, 2, 3, 1, 4, 5 ]
consonants_base = [ 'p', 't', 'k', 'm', 'n' ]
vowels = [ [ 'a', 'i', 'u' ],
[ 'a', 'i', 'u', 'e', 'o' ],
[ 'a', 'A', 'i', 'I', 'u', 'U', 'e', 'E', 'o', 'O' ] ]
consonants_extra = [ 'b', 'd', 'j', 's', 'z', 'y', 'q', 'G', '?', 'N', 'r', 'f', 'v', 'T', 'D', 'S', 'Z', 'x', 'h', 'w', 'l', 'C' ]
sibilants = [ ['s',], [ 's', 'S' ], ['s', 'S', 'f'] ]
liquids = [ ['r'], ['l'], ['r','l'], ['w','y'], ['r','l','w','y'] ]
orthography1 = { 'name':'nordic', 'j':'dz', 'y':'j', 'T':'th', 'D':'', 'S':'sh', 'Z':'zh', 'N':'ng', '?':"'", 'G':'q', 'C':'ch', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' }
orthography2 = { 'name':'czech', 'T':'th', 'D':'th', 'S':'', 'Z':'', 'C':'', 'G':'q', 'N':'ng', '?':'-', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' }
orthography3 = { 'name':'french', 'T':'th', 'D':'th', 'S':'ch', 'G':'gh', 'C':'tc', '?':"'", 'N':'ng', 'Z':'z', 'k':'c', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' }
orthography4 = { 'name':'mexica', 'k':'c', 'G':'gh', 'N':'ng', 'T':'th', 'D':'th', 'S':'x', 'C':'ch', '?':"'", 'Z':'zh', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' }
orthographies = ( orthography1, orthography2, orthography3, orthography4 )
syllables = ( [ 'CV', ],
[ 'CV', 'V' ],
[ 'CV', 'CVC' ],
[ 'CV', 'CVC', 'V' ],
[ 'CVC', ],
[ 'CVC', 'CRVC', 'CV', 'CRV' ],
[ 'CVC', 'CRVC', 'CVRC', 'CV', 'CRV' ], [ 'CVC', 'CRVC', 'CVCC', 'CRVCC', 'CV', 'CRV' ],
[ 'CVC', 'CRVC', 'CVRC', 'CVCC', 'CRVCC', 'CV', 'CRV' ],
[ 'CV', 'CVC', 'SCV', 'SCVC' ],
[ 'CVC', 'CVCC', 'SVC', 'SVCC', 'CV', 'SCV' ],
[ 'CVC', 'CVCC', 'CRVC', 'SCVC', 'SCRVC', 'CV', 'CRV', 'SCV', 'SCRV' ] )
government = [ 'Republic of ', 'Kingdom of ', 'Confederacy of ', 'Satrapy of ','Empire of ' ]
'''
lang1 = language()
for j in range(10):
print('Language '+str(j+1))
for i in range(5):
word = lang1.cityname()
print(lang1.orthographic(word).title())
lang1 = lang1.derive()
print(' ')
'''
| 34.392694 | 175 | 0.454461 |
82cfea168601da39ca8ee801205fdee39d24a8a0 | 446 | py | Python | week/templatetags/sidebar_data.py | uno-isqa-8950/fitgirl-inc | 2656e7340e85ab8cbeb0de19dcbc81030b9b5b81 | [
"MIT"
] | 6 | 2018-09-11T15:30:10.000Z | 2020-01-14T17:29:07.000Z | week/templatetags/sidebar_data.py | uno-isqa-8950/fitgirl-inc | 2656e7340e85ab8cbeb0de19dcbc81030b9b5b81 | [
"MIT"
] | 722 | 2018-08-29T17:27:38.000Z | 2022-03-11T23:28:33.000Z | week/templatetags/sidebar_data.py | uno-isqa-8950/fitgirl-inc | 2656e7340e85ab8cbeb0de19dcbc81030b9b5b81 | [
"MIT"
] | 13 | 2018-08-29T07:42:01.000Z | 2019-04-21T22:34:30.000Z | from django import template
from week.models import SidebarContentPage,SidebarImagePage
register = template.Library()
| 26.235294 | 59 | 0.784753 |
82d236c6e0b9c063b565077e0441849e2549c37e | 1,097 | py | Python | tests/functional/Hydro/AcousticWave/CSPH_mod_package.py | jmikeowen/Spheral | 3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 22 | 2018-07-31T21:38:22.000Z | 2020-06-29T08:58:33.000Z | tests/Hydro/AcousticWave/CSPH_mod_package.py | markguozhiming/spheral | bbb982102e61edb8a1d00cf780bfa571835e1b61 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 41 | 2020-09-28T23:14:27.000Z | 2022-03-28T17:01:33.000Z | tests/Hydro/AcousticWave/CSPH_mod_package.py | markguozhiming/spheral | bbb982102e61edb8a1d00cf780bfa571835e1b61 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 7 | 2019-12-01T07:00:06.000Z | 2020-09-15T21:12:39.000Z | #-------------------------------------------------------------------------------
# A mock physics package to mess around with the CRKSPH corrections.
#-------------------------------------------------------------------------------
from Spheral1d import *
| 26.756098 | 80 | 0.539654 |
82d391d63340bb25ffc76c9865651669de389703 | 8,452 | py | Python | fbm-scraper.py | cbdelavenne/fb-messenger-media-scraper | ff4ed228f3520f208e048e34ae24d7576b0089bc | [
"MIT"
] | 8 | 2019-11-23T17:45:11.000Z | 2021-05-27T10:41:47.000Z | fbm-scraper.py | cbdelavenne/fb-messenger-media-scraper | ff4ed228f3520f208e048e34ae24d7576b0089bc | [
"MIT"
] | 10 | 2019-11-23T17:41:22.000Z | 2022-01-03T11:10:50.000Z | fbm-scraper.py | cbdelavenne/fb-messenger-media-scraper | ff4ed228f3520f208e048e34ae24d7576b0089bc | [
"MIT"
] | 4 | 2020-03-21T23:24:40.000Z | 2022-02-20T10:40:38.000Z | import os
import requests
import time
import uuid
import configparser
import datetime
import fbchat
import re
from fbchat import Client, ImageAttachment
from fbchat import FBchatException
from pathlib import Path
politeness_index = 0.5 # ;)
epoch = datetime.datetime(1970, 1, 1)
# Hack to get the login to work, see: https://github.com/fbchat-dev/fbchat/issues/615#issuecomment-716089816
fbchat._state.FB_DTSG_REGEX = re.compile(r'"name":"fb_dtsg","value":"(.*?)"')
def download_file_from_url(url, target_path):
"""
Download image from a given URL to a specified target path.
:param url: URL of file to download
:param target_path: Local target path to save the file
:type url: str
:type target_path: str
"""
if url is not None:
r = requests.get(url)
with open(target_path, 'wb') as f:
print('\tDownloading image to {path}'.format(path=target_path))
f.write(r.content)
def convert_date_to_epoch(date, as_int=True):
"""
Convert a given date string to epoch (int in milliseconds)
:param date: Date string (preferred format %Y-%m-%d)
:param as_int: Return unix timestamp as an integer value, instead of a float
:type date: str
:type as_int: int
:return: int
"""
try:
dt = datetime.datetime.strptime(date, '%Y-%m-%d')
res = ((dt - epoch).total_seconds() * 1000.0) # convert to milliseconds
return int(res) if as_int else res
except ValueError:
return None
def convert_epoch_to_datetime(timestamp, dt_format='%Y-%m-%d_%H.%M.%S'):
"""
Convert epoch (unix time in ms) to a datetime string
:param timestamp: Unix time in ms
:param dt_format: Format of datetime string
:type timestamp: str
:type dt_format: str
:return:
"""
s = int(timestamp) / 1000.0
dt_str = datetime.datetime.fromtimestamp(s).strftime(dt_format)
return dt_str
if __name__ == '__main__':
config_path = Path('.') / 'config.ini'
if os.path.exists(config_path) is False:
raise Exception("Please create config.ini under this script's current directory")
# Load config file
config = configparser.ConfigParser()
config.read(config_path)
download_path = config.get('Download', 'path')
if os.path.exists(download_path) is False:
raise Exception("The path specified in download_path does not exist ({path}). Please specify a valid path in "
"config.ini".format(path=download_path))
# Initialize FB Client
fb_email = config.get('Credentials', 'email')
fb_pw = config.get('Credentials', 'password')
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36"
fb_client = Client(fb_email, fb_pw, user_agent=user_agent)
# Search for latest threads
thread_search_limit = int(config.get('Threads', 'search_limit'))
thread_search_before = convert_date_to_epoch(config.get('Threads', 'before_date'))
if thread_search_before is not None:
threads = fb_client.fetchThreadList(limit=thread_search_limit, before=thread_search_before)
else:
threads = fb_client.fetchThreadList(limit=thread_search_limit)
# Find correct thread for given user URL
my_thread = None
friend_url = config.get('Friend', 'url')
for thread in threads:
if hasattr(thread, 'url') and (thread.url == friend_url):
my_thread = thread
break
# Get Messages for my_thread
if my_thread is not None:
thread_message_count = my_thread.message_count
thread_message_name = my_thread.name
print('Found {count} messages in thread with {friend_name}'.format(count=thread_message_count,
friend_name=thread_message_name))
message_before_date = config.get('Messages', 'before_date')
message_search_limit = int(config.get('Messages', 'search_limit'))
message_search_before = convert_date_to_epoch(message_before_date)
if message_search_limit > thread_message_count:
message_search_limit = thread_message_count
print('\tWarning: Message search limit was greater than the total number of messages in thread.\n')
if message_search_before is not None:
messages = fb_client.fetchThreadMessages(my_thread.uid, limit=message_search_limit,
before=message_search_before)
print('Searching for images in the {message_limit} messages sent before {before_date}...'.format(
message_limit=message_search_limit, before_date=message_before_date))
else:
messages = fb_client.fetchThreadMessages(my_thread.uid, limit=message_search_limit)
print('Searching for images in the last {message_limit} messages...'.format(
message_limit=message_search_limit))
sender_id = None
if config.getboolean('Media', 'sender_only'):
sender_id = my_thread.uid
print('\tNote: Only images sent by {friend_name} will be downloaded (as specified by sender_only in your '
'config.ini)'.format(friend_name=thread_message_name))
# Extract Image attachments' full-sized image signed URLs (along with their original file extension)
total_count = 0
skip_count = 0
full_images = []
last_message_date = None
print('\n')
extension_blacklist = str.split(config.get('Media', 'ext_blacklist'), ',')
for message in messages:
message_datetime = convert_epoch_to_datetime(message.timestamp)
if len(message.attachments) > 0:
if (sender_id is None) or (sender_id == message.author):
for attachment in message.attachments:
if isinstance(attachment, ImageAttachment):
try:
attachment_ext = str.lower(attachment.original_extension)
if attachment_ext not in extension_blacklist:
full_images.append({
'extension': attachment_ext,
'timestamp': message_datetime,
'full_url': fb_client.fetchImageUrl(attachment.uid)
})
print('+', sep=' ', end='', flush=True)
else:
skip_count += 1
print('-', sep=' ', end='', flush=True)
total_count += 1
except FBchatException:
pass # ignore errors
last_message_date = message_datetime
# Download Full Images
if len(full_images) > 0:
images_count = len(full_images)
print('\n\nFound a total of {total_count} images. Skipped {skip_count} images that had a blacklisted '
'extension'.format(total_count=total_count, skip_count=skip_count))
print('Attempting to download {count} images...................\n'.format(count=images_count))
for full_image in full_images:
friend_name = str.lower(my_thread.name).replace(' ', '_')
file_uid = str(uuid.uuid4())
file_ext = full_image['extension']
file_timestamp = full_image['timestamp']
img_url = full_image['full_url']
image_path = ''.join([download_path, '\\', 'fb-image-', file_uid, '-', friend_name, '-',
file_timestamp, '.', file_ext])
download_file_from_url(img_url, image_path)
# Sleep half a second between file downloads to avoid getting flagged as a bot
time.sleep(politeness_index)
else:
print('No images to download in the last {count} messages'.format(count=message_search_limit))
# Reminder of last message found
print('\nLast message scanned for image attachments was dated: {last_message_date}'.format(
last_message_date=last_message_date))
else:
print('Thread not found for URL provided')
| 41.229268 | 139 | 0.614411 |
82d3afd1c39a5492eb62a1c160ebc7e3bbf21e20 | 1,565 | py | Python | guru/users/models.py | Jeromeschmidt/Guru | 3128a539e55b46afceb33b59c0bafaec7e9f630a | [
"MIT"
] | null | null | null | guru/users/models.py | Jeromeschmidt/Guru | 3128a539e55b46afceb33b59c0bafaec7e9f630a | [
"MIT"
] | 1 | 2021-02-26T02:49:34.000Z | 2021-02-26T02:49:34.000Z | guru/users/models.py | Jeromeschmidt/Guru | 3128a539e55b46afceb33b59c0bafaec7e9f630a | [
"MIT"
] | 1 | 2020-02-24T18:09:00.000Z | 2020-02-24T18:09:00.000Z | from django.contrib.auth.models import AbstractUser
from django.db.models import (BooleanField, CASCADE, CharField, FloatField,
IntegerField, ManyToManyField, Model,
OneToOneField, PositiveSmallIntegerField)
from django.contrib.postgres.fields import ArrayField
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
| 44.714286 | 75 | 0.676677 |
82d3d58b46fde9d57d6d1387e15cc36141a10208 | 7,676 | py | Python | movie.py | jmclinn/mapdraw | bdbddb164a82a3cf9b2673006caae4274948a420 | [
"MIT"
] | null | null | null | movie.py | jmclinn/mapdraw | bdbddb164a82a3cf9b2673006caae4274948a420 | [
"MIT"
] | null | null | null | movie.py | jmclinn/mapdraw | bdbddb164a82a3cf9b2673006caae4274948a420 | [
"MIT"
] | null | null | null | import os,time
## File Variable (USER INPUT)
## ==========================
## if multiple files are being accessed to create movie...
## ...specify the beginning and ending of the file names...
## ...and the date list text file in the variables below
## Please use True or False to set whether multiple files will be accessed for movie
file_is_variable = False
## If file_is_variable = True
## --------------------------
## make sure to leave trailing slash '/' on 'path_to_files'
path_to_files = '/path/to/files/'
## For series of files with similar prefixes (file_part1) and filetypes (file_part2)
file_part1 = 'pre.fixes.'
file_part2 = '.nc'
## location of file listing (with each entry on a new line) the variable part of the filename
dates_list_text_file = '/path/to/file/variable_list.txt'
## If file_is_variable = False
## ---------------------------
#file = '/path/to/single/file.nc'
file = '/Users/Jon/Documents/other_projects/Aluie/visuals/1-12/mapdraw/sgs.nc'
## Variables (USER INPUT)
## ======================
## all variable lists must be the same length
## set unused variables equal to '_empty_'
## if variable requires double-quotes on command line include them --> '" ... "'
## -----------------------------------------------------------------------------
data = 'sgsflux' #cannot be '_empty_'
lat = 'u_lat' #cannot be '_empty_'
lon = 'u_lon' #cannot be '_empty_'
depth = 'w_dep,9' #cannot be '_empty_'
mask = '-1e33,#000000'
maxr = '100' #use for 'max'
minr = '-100' #use for 'min'
norm = '_empty_'
colors = '"0:#0000AA,45:#0000FF,50:#FFFFFF,55:#FF0000,100:#AA0000"'
clr_min_max = '_empty_'
title = '_empty_'
crop = '_empty_'
lines = '_empty_'
## Sphere (for mapping onto Earth's spherical representation)
## ----------------------------------------------------------
## For use of 'sphere' set to True. If not leave False.
sphere_mapping = False
## Number of images (must match other variable list lengths from above)
sphere_frames = 3
## Start and stop points of sphere rotation (leave start/stop the same for no rotation in lat/lon)
sphere_lon_start = -10
sphere_lon_stop = 10
sphere_lat_start = -10
sphere_lat_stop = 10
## 'zoom' argument described in README file (leave False if zoom = 1)
zoom = 1.5
## Primary Variable (USER INPUT)
## =============================
## choose from the variables above
## specify without quotes
## if not a list will only output single result
## --------------------------------------------
primary_variable = file
## Save Location (USER INPUT)
## ==========================
## provide folder location (without filename(s))
## ---------------------------------------------
save = '/Users/Jon/Desktop/'
## Image Filename Prefix (USER INPUT)
## ==================================
## prefix for output filenames before auto-incremented counter
## -----------------------------------------------------------
file_prefix = 'img_'
## Image Counter Start (USER INPUT)
## ================================
## start of auto-incremented counter
## ---------------------------------
count_start = 0
## Image File Type (USER INPUT)
## ============================
## ex: '.png' or '.jpg'
## --------------------
img_type = '.png'
## Display Toggle (USER INPUT)
## ==========================
## toggle if each image displays in the loop
## use 'yes' or 'no' to control display preference
## -----------------------------------------------
display = 'no'
# # # # # # # # # # # # # # # # # # # # # # # # #
# ---- NO USER INPUTS AFTER THIS POINT ---- #
# # # # # # # # # # # # # # # # # # # # # # # # #
## If 'file' is variable this establishes list of files to loop through (Do Not Alter)
## ===================================================================================
if file_is_variable:
file1 = []
file0 = open(dates_list_text_file,'r').read().splitlines()
for line in file0:
file1.append(str(path_to_files) + str(file_part1) + str(line) + str(file_part2))
file = file1
primary_variable = file
## Parsing of 'sphere' rotation inputs (Do Not Alter)
## ==================================================
if sphere_mapping:
lon_step = ( sphere_lon_stop - sphere_lon_start ) / ( sphere_frames - 1 )
lat_step = ( sphere_lat_stop - sphere_lat_start ) / ( sphere_frames - 1 )
sphere = []
for i in range(sphere_frames):
sphere.append(str(sphere_lon_start + lon_step * i)+','+str(sphere_lat_start + lat_step * i))
primary_variable = sphere
## Defining & Executing Command Expression (Do Not Alter)
## ======================================================
displayx = 'display ' + display
command = displayx
if title != '_empty_':
titlex = ' title ' + str(title)
command = command + titlex
if lines != '_empty_':
linesx = ' lines ' + str(lines)
command = command + linesx
if type(primary_variable) is list:
loop_len = len(primary_variable)
else:
loop_len = 1
for i in range(loop_len):
savex = ' save ' + str(save) + str(file_prefix) + str(i + int(count_start)) + str(img_type)
command = command + savex
if type(file) is list:
filei = file[i]
else:
filei = file
if i != '_empty_':
filex = ' file ' + str(filei)
command = command + filex
if type(data) is list:
datai = data[i]
else:
datai = data
if datai != '_empty_':
datax = ' data ' + str(datai)
command = command + datax
if type(lat) is list:
lati = lat[i]
else:
lati = lat
if lati != '_empty_':
latx = ' lat ' + str(lati)
command = command + latx
if type(lon) is list:
loni = lon[i]
else:
loni = lon
if loni != '_empty_':
lonx = ' lon ' + str(loni)
command = command + lonx
if type(depth) is list:
depthi = depth[i]
else:
depthi = depth
if depthi != '_empty_':
depthx = ' depth ' + str(depthi)
command = command + depthx
if type(mask) is list:
maski = mask[i]
else:
maski = mask
if maski != '_empty_':
maskx = ' mask ' + str(maski)
command = command + maskx
if type(maxr) is list:
maxri = maxr[i]
else:
maxri = maxr
if maxri != '_empty_':
maxrx = ' max ' + str(maxri)
command = command + maxrx
if type(minr) is list:
minri = minr[i]
else:
minri = minr
if minri != '_empty_':
minrx = ' min ' + str(minri)
command = command + minrx
if type(norm) is list:
normi = norm[i]
else:
normi = norm
if normi != '_empty_':
normx = ' norm ' + str(normi)
command = command + normx
if type(crop) is list:
cropi = crop[i]
else:
cropi = crop
if cropi != '_empty_':
cropx = ' crop ' + str(cropi)
command = command + cropx
if type(colors) is list:
colorsi = colors[i]
else:
colorsi = colors
if colorsi != '_empty_':
colorsx = ' colors ' + str(colorsi)
command = command + colorsx
if type(clr_min_max) is list:
clr_min_maxi = clr_min_max[i]
else:
clr_min_maxi = clr_min_max
if clr_min_maxi != '_empty_':
clr_min_maxx = ' clr_min_max ' + str(clr_min_maxi)
command = command + clr_min_maxx
if sphere_mapping:
spherei = sphere[i]
spherex = ' sphere ' + str(spherei)
command = command + spherex
if type(zoom) is list:
zoomi = zoom[i]
elif zoom:
zoomi = zoom
if zoom:
zoomx = ' zoom ' + str(zoomi)
command = command + zoomx
time0 = time.time()
os.system('python map.py ' + command)
if display == 'no':
print str(i) + ' - ' + str(round((time.time() - time0),2)) + ' sec' | 28.220588 | 98 | 0.549635 |
82d45629fe3b78bf615a134ee2b08fe22d31ec28 | 4,544 | py | Python | gaetk2/tools/auth0tools.py | mdornseif/appengine-toolkit2 | 47ee6bf99b8e461ee64eae75bf24fb462d99b0ab | [
"MIT"
] | 1 | 2018-08-16T16:15:30.000Z | 2018-08-16T16:15:30.000Z | gaetk2/tools/auth0tools.py | mdornseif/appengine-toolkit2 | 47ee6bf99b8e461ee64eae75bf24fb462d99b0ab | [
"MIT"
] | 3 | 2018-08-14T09:52:11.000Z | 2021-12-13T19:54:07.000Z | gaetk2/tools/auth0tools.py | mdornseif/appengine-toolkit2 | 47ee6bf99b8e461ee64eae75bf24fb462d99b0ab | [
"MIT"
] | 1 | 2018-09-28T05:55:27.000Z | 2018-09-28T05:55:27.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""gaetk2.tools.auth0.py Tools for working with auth0
Created by Maximillian Dornseif on 2017-12-05.
Copyright 2017 HUDROA. MIT Licensed.
"""
from __future__ import unicode_literals
import logging
from google.appengine.api import memcache
from auth0.v3.authentication import GetToken
from auth0.v3.exceptions import Auth0Error
from auth0.v3.management import Auth0
from gaetk2.config import gaetkconfig
logger = logging.getLogger(__name__)
def get_auth0_access_token():
"""Get a Token for the Management-API."""
ret = memcache.get('get_auth0_access_token()')
if not ret:
assert gaetkconfig.AUTH0_DOMAIN != '*unset*'
assert gaetkconfig.AUTH0_CLIENT_ID != '*unset*'
get_token = GetToken(gaetkconfig.AUTH0_DOMAIN)
token = get_token.client_credentials(
gaetkconfig.AUTH0_CLIENT_ID,
gaetkconfig.AUTH0_CLIENT_SECRET,
'https://{}/api/v2/'.format(gaetkconfig.AUTH0_DOMAIN))
ret = token['access_token']
memcache.set('get_auth0_access_token()', ret, token['expires_in'] / 2)
return ret
def create_from_credential(credential):
"""Create an entry in the Auth0.DefaultDatabase for a credential."""
if credential.external_uid:
return
if not credential.secret:
return
if not credential.email:
return
if not getattr(credential, 'name', None):
credential.name = credential.text
if not getattr(credential, 'name', None):
credential.name = credential.org_designator
auth0api = Auth0(gaetkconfig.AUTH0_DOMAIN, get_auth0_access_token())
payload = {
'connection': 'DefaultDatabase',
'email': credential.email,
'password': credential.secret,
'user_id': credential.uid,
'user_metadata': {
'name': credential.name,
'nickname': 'User fuer {}'.format(credential.org_designator)
},
'email_verified': True,
'verify_email': False,
'app_metadata': {
'org_designator': credential.org_designator,
'permissions': credential.permissions,
}
}
newuser = None
try:
newuser = auth0api.users.create(payload)
except Auth0Error as ex:
if ex.status_code in [400, 409] and ex.message == 'The user already exists.':
logger.info('The user already exists: %s %r %s', credential.uid, ex, payload)
try:
newuser = auth0api.users.get('auth0|{}'.format(credential.uid))
except:
logger.warn('email collision? %s', credential.uid)
# propbably we have an E-Mail Address collision. This means
# several Credentials with the same E-Mail Adresses.
reply = auth0api.users.list(
connection='DefaultDatabase',
q='email:"{}"'.format(credential.email),
search_engine='v2')
if reply['length'] > 0:
logger.info('reply=%s', reply)
other_uid = reply['users'][0]['user_id']
newuser = auth0api.users.get(other_uid)
# doppelbelegung bei Auth0 notieren
if newuser.get('app_metadata'):
logger.debug('app_metadata=%r', newuser['app_metadata'])
altd = newuser['app_metadata'].get('org_designator_alt', [])
altd = list(set(altd + [credential.org_designator]))
altu = newuser['app_metadata'].get('uid_alt', [])
altu = list(set(altu + [credential.uid]))
logger.warn('updating duplicate Auth0 %s %s %s %s', altd, altu, other_uid, newuser)
auth0api.users.update(
other_uid,
{'app_metadata': {'org_designator_alt': altd,
'uid_alt': altu}})
else:
logger.error('%r newuser = %s %s', 'auth0|{}'.format(credential.uid), newuser, ex)
raise
except:
logger.warn('payload = %s', payload)
raise
if newuser is None or (newuser.get('error')):
logger.warn('reply=%s payload = %s', newuser, payload)
raise RuntimeError('Auth0-Fehler: %s' % newuser)
logger.info('new auth0 user %s', newuser)
credential.meta['auth0_user_id'] = credential.external_uid = newuser['user_id']
credential.put()
return
| 39.172414 | 107 | 0.590889 |
82d6583dc3d6537a4f4d2769235a1441edc42642 | 705 | py | Python | Q56MergeIntervals.py | ChenliangLi205/LeetCode | 6c547c338eb05042cb68f57f737dce483964e2fd | [
"MIT"
] | null | null | null | Q56MergeIntervals.py | ChenliangLi205/LeetCode | 6c547c338eb05042cb68f57f737dce483964e2fd | [
"MIT"
] | null | null | null | Q56MergeIntervals.py | ChenliangLi205/LeetCode | 6c547c338eb05042cb68f57f737dce483964e2fd | [
"MIT"
] | null | null | null | # Definition for an interval.
# class Interval:
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
| 28.2 | 49 | 0.520567 |
82d79ad0214596b7ecad4fe78d6e48cdeddf92f7 | 843 | py | Python | .github/scripts/check-status.py | antmicro/f4pga-arch-defs | dac6ffd8890227ea541ee892549e41c68588ad99 | [
"ISC"
] | null | null | null | .github/scripts/check-status.py | antmicro/f4pga-arch-defs | dac6ffd8890227ea541ee892549e41c68588ad99 | [
"ISC"
] | 78 | 2022-03-01T19:40:20.000Z | 2022-03-31T19:56:24.000Z | .github/scripts/check-status.py | antmicro/f4pga-arch-defs | dac6ffd8890227ea541ee892549e41c68588ad99 | [
"ISC"
] | null | null | null | #!/usr/bin/env python3
from sys import argv
from pathlib import Path
from re import compile as re_compile
PACKAGE_RE = re_compile("symbiflow-arch-defs-([a-zA-Z0-9_-]+)-([a-z0-9])")
with (Path(__file__).parent.parent.parent / 'packages.list').open('r') as rptr:
for artifact in rptr.read().splitlines():
m = PACKAGE_RE.match(artifact)
assert m, f"Package name not recognized! {artifact}"
package_name = m.group(1)
if package_name == "install":
package_name == "toolchain"
with (Path("install") /
f"symbiflow-{package_name}-latest").open("w") as wptr:
wptr.write(
'https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/'
f'foss-fpga-tools/symbiflow-arch-defs/continuous/install/{argv[1]}/{artifact}'
)
| 35.125 | 94 | 0.622776 |
82d83bbbc397d5fb8c89450eac58244503912c31 | 500 | py | Python | DocOCR/urls.py | trangnm58/DocOCR | 7ec6087323cf2d06906878c55be236fb1950ce57 | [
"Apache-2.0"
] | null | null | null | DocOCR/urls.py | trangnm58/DocOCR | 7ec6087323cf2d06906878c55be236fb1950ce57 | [
"Apache-2.0"
] | null | null | null | DocOCR/urls.py | trangnm58/DocOCR | 7ec6087323cf2d06906878c55be236fb1950ce57 | [
"Apache-2.0"
] | null | null | null | from django.conf.urls import url, include
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/viet_ocr/', include('viet_ocr.api.urls', namespace="viet_ocr-api")),
url(r'^api/post_process/', include('post_process.api.urls', namespace="post_process-api")),
url(r'^api/pre_process/', include('pre_process.api.urls', namespace="pre_process-api")),
url(r'^api/doc_ocr/', include('doc_ocr.api.urls', namespace="doc_ocr-api")),
]
| 45.454545 | 95 | 0.7 |
82d9c382128c028bc583ab744d986723b6f36dd9 | 839 | py | Python | utils/neuron/models/metrics/multi_task_metrics.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
] | 12 | 2021-07-27T07:18:24.000Z | 2022-03-09T13:52:20.000Z | utils/neuron/models/metrics/multi_task_metrics.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
] | 2 | 2021-08-03T09:21:33.000Z | 2021-12-29T14:25:30.000Z | utils/neuron/models/metrics/multi_task_metrics.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
] | 3 | 2021-11-18T14:46:40.000Z | 2022-01-03T15:47:23.000Z | import torch
import torch.nn as nn
import neuron.ops as ops
from neuron.config import registry
| 27.064516 | 62 | 0.582837 |
82da9d5e6799fe68c63757266b57886cf2eb5dae | 3,198 | py | Python | incremental-update.py | tarasowski/apache-spark | e42d6abe5fa08ff1e231d16169efaed0e01fc4a9 | [
"MIT"
] | 1 | 2019-08-13T09:17:19.000Z | 2019-08-13T09:17:19.000Z | incremental-update.py | tarasowski/apache-spark | e42d6abe5fa08ff1e231d16169efaed0e01fc4a9 | [
"MIT"
] | null | null | null | incremental-update.py | tarasowski/apache-spark | e42d6abe5fa08ff1e231d16169efaed0e01fc4a9 | [
"MIT"
] | null | null | null | from pyspark.sql import SparkSession
from pyspark.sql.types import DateType
from pyspark.sql.functions import col
from pyspark.sql import types as t
import sys
from pyspark.sql.window import Window
from pyspark.sql.functions import spark_partition_id
from pyspark.sql import Row
spark = SparkSession \
.builder \
.appName("Python Spark SQL basic example") \
.config("spark.some.config.option", "some-value") \
.getOrCreate()
# https://dwbi.org/pages/75/methods-of-incremental-loading-in-data-warehouse
customers = [
Row(1, "John", "Individual", "22-Mar-2012"),
Row(2, "Ryan", "Individual", "22-Mar-2012"),
Row(3, "Bakers", "Corporate", "23-Mar-2012"),
]
sales = [
Row(1, 1, "White sheet (A4)", 100, 4.00, "22-Mar-2012"),
Row(2, 1, "James Clip (Box)", 1, 2.50, "22-Mar-2012"),
Row(3, 2, "Whiteboard Maker", 1, 2.00, "22-Mar-2012"),
Row(4, 3, "Letter Envelop", 200, 75.00, "23-Mar-2012"),
Row(5, 1, "Paper Clip", 12, 4.00, "23-Mar-2012"),
]
batch = [
Row(1, "22-Mar-2012", "Success"),
]
customersDF = spark.createDataFrame(customers, schema=["customer_id", "customer_name", "type", "entry_date"])
salesDF = spark.createDataFrame(sales, schema=["id", "customer_id", "product_description", "qty", "revenue", "sales_date"])
batchDF = spark.createDataFrame(batch, schema=["batch_id", "loaded_untill", "status"])
customersDF.createOrReplaceTempView("customers")
salesDF.createOrReplaceTempView("sales")
batchDF.createOrReplaceTempView("batch")
_23_march_customers = spark.sql("""
select t.*
from customers t
where t.entry_date > (select nvl(
max(b.loaded_untill),
to_date("01-01-1900", "MM-DD-YYYY")
)
from batch b
where b.status = "Success")
""")
_23_march_sales = spark.sql("""
select t.*
from sales t
where t.sales_date > (select nvl(
max(b.loaded_untill),
to_date("01-01-1900", "MM-DD-YYYY")
)
from batch b
where b.status = "Success")
""")
print("customers table")
_23_march_customers.show()
print("sales table")
_23_march_sales.show()
# Incremental Data Load Patterns
# https://www.youtube.com/watch?v=INuucWEg3sY
# 1) Stage / left Outer Join (moving to another server, make a staging and left join, check null on right table, you know this data is new)
# 2) Control Table
# Load | Cust | Table | Date
# Id | Table |Id |Date
# 3) Change Data Capture
# Source based incremental loading
# https://support.timextender.com/hc/en-us/articles/115001301963-How-incremental-loading-works
# The source table have a reliable natural or surrogate key and reliable incremental field such as "ModifiedDateTime" or "TimeStamp"
| 35.932584 | 139 | 0.596936 |
82dad9c48cf2ee5a8b767bdd94a5e6cdf8574098 | 116 | py | Python | asset/admin.py | shoaibsaikat/Django-Office-Management-BackEnd | bb8ec201e4d414c16f5bac1907a2641d80c5970a | [
"Apache-2.0"
] | null | null | null | asset/admin.py | shoaibsaikat/Django-Office-Management-BackEnd | bb8ec201e4d414c16f5bac1907a2641d80c5970a | [
"Apache-2.0"
] | null | null | null | asset/admin.py | shoaibsaikat/Django-Office-Management-BackEnd | bb8ec201e4d414c16f5bac1907a2641d80c5970a | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from .models import Asset
# Register your models here.
admin.site.register(Asset) | 19.333333 | 32 | 0.801724 |
82dd697abb6c6bff11f04261d8e04916561eba16 | 360 | py | Python | instagram_api/response/send_confirm_email.py | Yuego/instagram_api | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | [
"MIT"
] | 13 | 2019-08-07T21:24:34.000Z | 2020-12-12T12:23:50.000Z | instagram_api/response/send_confirm_email.py | Yuego/instagram_api | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | [
"MIT"
] | null | null | null | instagram_api/response/send_confirm_email.py | Yuego/instagram_api | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | [
"MIT"
] | null | null | null | from .mapper import ApiResponse, ApiResponseInterface
from .mapper.types import Timestamp, AnyType
__all__ = ['SendConfirmEmailResponse']
| 24 | 79 | 0.816667 |
82de56b86e1e73fa5d0bacfcbe9e4a18d9698647 | 1,256 | py | Python | webpages/views.py | 18praneeth/udayagiri-scl-maxo | 67ac939265d7837e39329162d7dd935a52130978 | [
"MIT"
] | 8 | 2021-01-01T17:04:45.000Z | 2021-06-24T05:53:13.000Z | webpages/views.py | 18praneeth/udayagiri-scl-maxo | 67ac939265d7837e39329162d7dd935a52130978 | [
"MIT"
] | 11 | 2021-01-01T15:04:04.000Z | 2021-01-10T07:47:12.000Z | webpages/views.py | 18praneeth/udayagiri-scl-maxo | 67ac939265d7837e39329162d7dd935a52130978 | [
"MIT"
] | 7 | 2020-12-14T12:44:17.000Z | 2021-01-15T14:29:13.000Z | from django.shortcuts import render, redirect
from django.contrib import messages
from .models import Contact
from django.contrib.auth.decorators import login_required
| 26.166667 | 62 | 0.680732 |
82df65585957bc89145bf1319aef1409ff095c3a | 3,281 | py | Python | src/pywbemReq/tupletree.py | sinbawang/smisarray | 698448c7661af1d1a4491e5aeb58825899aff710 | [
"MIT"
] | 2 | 2019-03-13T14:02:45.000Z | 2020-02-21T02:20:47.000Z | src/pywbemReq/tupletree.py | Foglight/foglight-smis-storage-array-community-cartridge | 64c070e6c62c5c8c2052af2b402103f78d72a330 | [
"MIT"
] | 1 | 2017-08-10T13:55:17.000Z | 2017-09-28T19:56:15.000Z | src/pywbemReq/tupletree.py | Foglight/foglight-smis-storage-array-community-cartridge | 64c070e6c62c5c8c2052af2b402103f78d72a330 | [
"MIT"
] | null | null | null | #
# (C) Copyright 2003,2004 Hewlett-Packard Development Company, L.P.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# Author: Martin Pool <mbp@hp.com>
#
"""
tupletree - Convert XML DOM objects to and from tuple trees.
DOM is the standard in-memory representation of XML documents, but it
is very cumbersome for some types of processing where XML encodes
object structures rather than text documents. Direct mapping to Python
classes may not be a good match either.
tupletrees may be created from an in-memory DOM using
dom_to_tupletree(), or from a string using xml_to_tupletree().
Since the Python XML libraries deal mostly with Unicode strings they
are also returned here. If plain Strings are passed in they will be
converted by xmldom.
Each node of the tuple tree is a Python 4-tuple, corresponding to an
XML Element (i.e. <tag>):
(NAME, ATTRS, CONTENTS, None)
The NAME is the name of the element.
The ATTRS are a name-value hash of element attributes.
The CONTENTS is a list of child elements.
The fourth element is reserved.
"""
import xml.dom.minidom
from pywbemReq.cim_types import is_text
__all__ = ['dom_to_tupletree', 'xml_to_tupletree']
def dom_to_tupletree(node):
"""Convert a DOM object to a pyRXP-style tuple tree.
Each element is a 4-tuple of (NAME, ATTRS, CONTENTS, None).
Very nice for processing complex nested trees.
"""
if node.nodeType == node.DOCUMENT_NODE:
# boring; pop down one level
return dom_to_tupletree(node.firstChild)
assert node.nodeType == node.ELEMENT_NODE
name = node.nodeName
attrs = {}
contents = []
for child in node.childNodes:
if child.nodeType == child.ELEMENT_NODE:
contents.append(dom_to_tupletree(child))
elif child.nodeType == child.TEXT_NODE:
assert is_text(child.nodeValue), \
"text node %s is not a string" % repr(child)
contents.append(child.nodeValue)
elif child.nodeType == child.CDATA_SECTION_NODE:
contents.append(child.nodeValue)
else:
raise RuntimeError("can't handle %s" % child)
for i in range(node.attributes.length):
attr_node = node.attributes.item(i)
attrs[attr_node.nodeName] = attr_node.nodeValue
# XXX: Cannot yet handle comments, cdata, processing instructions and
# other XML batshit.
# it's so easy in retrospect!
return name, attrs, contents, None
def xml_to_tupletree(xml_string):
"""Parse XML straight into tupletree."""
dom_xml = xml.dom.minidom.parseString(xml_string)
return dom_to_tupletree(dom_xml)
| 32.81 | 73 | 0.719902 |
82e04d672370030e6dd5e6577a1aa78e567b3a27 | 1,723 | py | Python | src/Word.py | AlexandreLadriere/ColorfulWords | 48219337946639306a6854ec3b5d8814ce86d609 | [
"MIT"
] | null | null | null | src/Word.py | AlexandreLadriere/ColorfulWords | 48219337946639306a6854ec3b5d8814ce86d609 | [
"MIT"
] | null | null | null | src/Word.py | AlexandreLadriere/ColorfulWords | 48219337946639306a6854ec3b5d8814ce86d609 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3*
import unicodedata | 31.907407 | 108 | 0.546721 |
82e0a5642e6f736fc7177658b00015f1cb62d455 | 2,605 | py | Python | LeetCode/Python3/DynamicProgramming/123. Best Time to Buy and Sell Stock III.py | WatsonWangZh/CodingPractice | dc057dd6ea2fc2034e14fd73e07e73e6364be2ae | [
"MIT"
] | 11 | 2019-09-01T22:36:00.000Z | 2021-11-08T08:57:20.000Z | LeetCode/Python3/DynamicProgramming/123. Best Time to Buy and Sell Stock III.py | WatsonWangZh/LeetCodePractice | dc057dd6ea2fc2034e14fd73e07e73e6364be2ae | [
"MIT"
] | null | null | null | LeetCode/Python3/DynamicProgramming/123. Best Time to Buy and Sell Stock III.py | WatsonWangZh/LeetCodePractice | dc057dd6ea2fc2034e14fd73e07e73e6364be2ae | [
"MIT"
] | 2 | 2020-05-27T14:58:52.000Z | 2020-05-27T15:04:17.000Z | # Say you have an array for which the ith element is the price of a given stock on day i.
# Design an algorithm to find the maximum profit. You may complete at most two transactions.
# Note: You may not engage in multiple transactions at the same time
# (i.e., you must sell the stock before you buy again).
# Example 1:
# Input: [3,3,5,0,0,3,1,4]
# Output: 6
# Explanation: Buy on day 4 (price = 0) and sell on day 6 (price = 3), profit = 3-0 = 3.
# Then buy on day 7 (price = 1) and sell on day 8 (price = 4), profit = 4-1 = 3.
# Example 2:
# Input: [1,2,3,4,5]
# Output: 4
# Explanation: Buy on day 1 (price = 1) and sell on day 5 (price = 5), profit = 5-1 = 4.
# Note that you cannot buy on day 1, buy on day 2 and sell them later, as you are
# engaging multiple transactions at the same time. You must sell before buying again.
# Example 3:
# Input: [7,6,4,3,1]
# Output: 0
# Explanation: In this case, no transaction is done, i.e. max profit = 0.
| 36.180556 | 98 | 0.558925 |
82e0abe3e486e3352d2b626c47850728c42c4ae5 | 2,719 | py | Python | robot_con/baxter/baxter_client.py | takuya-ki/wrs | f6e1009b94332504042fbde9b39323410394ecde | [
"MIT"
] | 23 | 2021-04-02T09:02:04.000Z | 2022-03-22T05:31:03.000Z | robot_con/baxter/baxter_client.py | takuya-ki/wrs | f6e1009b94332504042fbde9b39323410394ecde | [
"MIT"
] | 35 | 2021-04-12T09:41:05.000Z | 2022-03-26T13:32:46.000Z | robot_con/baxter/baxter_client.py | takuya-ki/wrs | f6e1009b94332504042fbde9b39323410394ecde | [
"MIT"
] | 16 | 2021-03-30T11:55:45.000Z | 2022-03-30T07:10:59.000Z | import robotconn.rpc.baxterrobot.baxter_server_pb2 as bxtsp
import robotconn.rpc.baxterrobot.baxter_server_pb2_grpc as bxtspgc
import grpc
import pickle
import numpy as np
if __name__=="__main__":
import time
bc = BaxterClient(host = "10.1.0.24:18300")
# tic = time.time()
# imgx = hcc.getimgbytes()
# toc = time.time()
# td = toc-tic
# tic = time.time()
# imgxs = hcc.getimgstr()
# toc = time.time()
# td2 = toc-tic
# print(td, td2)
angle_rgt = bc.bxt_get_jnts("rgt")
# print angle_rgt
# print(angle_rgt[-1])
#
#
# angle_rgt[-1] = angle_rgt[-1] - 50.0
#
# bc.bxt_movejnts(angle_rgt)
print(bc.bxt_get_jnts(armname="rgt"))
print(bc.bxt_get_jnts(armname="lft"))
import cv2 as cv
cv.imshow("w",bc.bxt_get_image("head_camera"))
cv.waitKey(0)
# print bc.bxt_get_jnts("rgt")
# print(eval("a="+bc.bxt_get_jnts())) | 38.842857 | 154 | 0.668996 |
82e393c148ab09bc52468154e5d5428989e2e585 | 5,232 | py | Python | pw_build/py/pw_build/copy_from_cipd.py | Tiggerlaboratoriet/pigweed | 7d7e7ad6223433f45af680f43ab4d75e23ad3257 | [
"Apache-2.0"
] | 1 | 2022-01-13T10:01:05.000Z | 2022-01-13T10:01:05.000Z | pw_build/py/pw_build/copy_from_cipd.py | Tiggerlaboratoriet/pigweed | 7d7e7ad6223433f45af680f43ab4d75e23ad3257 | [
"Apache-2.0"
] | null | null | null | pw_build/py/pw_build/copy_from_cipd.py | Tiggerlaboratoriet/pigweed | 7d7e7ad6223433f45af680f43ab4d75e23ad3257 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Copies files from CIPD to a specified directory.
By default, Pigweed installs packages from a manifest file to a CIPD
subdirectory as part of environment setup. This script will copy files from this
directory into a specified output directory.
Here's an example of how to use this script:
Let's say you have a package with a static library:
CIPD path: `pigweed/third_party/libsomething`
Files:
./libsomething/include/something.h
./libsomething/libsomething.a
And this package was referenced in my_project_packages.json, which was provided
as a --cipd-package-file in your bootstrap script.
To copy the static libraryto $PW_PROJECT_ROOT/static_libraries, you'd have an
invocation something like this:
copy_from_cipd --package-name=pigweed/third_party/libsomething \
--mainfest=$PW_PROJECT_ROOT/tools/my_project_packages.json \
--file=libsomething/libsomething.a \
--out=$PW_PROJECT_ROOT/static_libraries
"""
import argparse
import json
import logging
import os
import shutil
import subprocess
import sys
from pathlib import Path
import pw_env_setup.cipd_setup.update
logger = logging.getLogger(__name__)
if __name__ == '__main__':
logging.basicConfig()
main()
| 36.082759 | 80 | 0.634939 |
82e3d3ee1d9875b1bc637e5da752761092db4c4c | 1,248 | py | Python | globomap_api/api/v2/parsers/queries.py | pedrokiefer/globomap-api | 68e1e3a623cdb4df78327226eb5c665841d4823f | [
"Apache-2.0"
] | 15 | 2017-08-04T17:09:52.000Z | 2021-03-05T18:11:51.000Z | globomap_api/api/v2/parsers/queries.py | pedrokiefer/globomap-api | 68e1e3a623cdb4df78327226eb5c665841d4823f | [
"Apache-2.0"
] | 2 | 2017-09-03T23:39:35.000Z | 2019-10-07T17:18:35.000Z | globomap_api/api/v2/parsers/queries.py | pedrokiefer/globomap-api | 68e1e3a623cdb4df78327226eb5c665841d4823f | [
"Apache-2.0"
] | 6 | 2017-08-09T13:32:38.000Z | 2020-01-31T23:28:36.000Z | """
Copyright 2018 Globo.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from flask_restplus import reqparse
search_query_parser = reqparse.RequestParser()
search_query_parser.add_argument(
'page',
type=int,
required=False,
default=1,
help='Page number'
)
search_query_parser.add_argument(
'per_page',
type=int,
required=False,
default=10,
help='Items number per page'
)
search_query_parser.add_argument(
'query',
type=str,
required=False,
default='[[{"field":"name","operator":"LIKE","value":""}]]',
help='Query'
)
execute_query_parser = reqparse.RequestParser()
execute_query_parser.add_argument(
'variable',
type=str,
required=False,
help='Variable'
)
| 26 | 75 | 0.710737 |
82e465ccd93333f53c7be0010a34ffe382b2a569 | 5,354 | py | Python | auto_pull_request/parser.py | Ruth-Seven/Auto-git-request | bd058707c174138efed0ffd7109cf70b25796e64 | [
"Apache-2.0"
] | 2 | 2021-10-05T11:12:46.000Z | 2021-10-05T11:12:56.000Z | auto_pull_request/parser.py | Ruth-Seven/Auto-git-request | bd058707c174138efed0ffd7109cf70b25796e64 | [
"Apache-2.0"
] | null | null | null | auto_pull_request/parser.py | Ruth-Seven/Auto-git-request | bd058707c174138efed0ffd7109cf70b25796e64 | [
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import fork
import sys
import click
from loguru import logger
from auto_pull_request.pull_request import Auto
from auto_pull_request import __version__
# Creates a GitHub pull-request. | 49.119266 | 293 | 0.710497 |
82e4981e82370f4b216afc9af7f4136625ccd93f | 3,644 | py | Python | fit1d/common/fit1d.py | michael-amat/fit1d | 0cd42874e3eba4353c564809c317510b626dee25 | [
"BSD-2-Clause"
] | null | null | null | fit1d/common/fit1d.py | michael-amat/fit1d | 0cd42874e3eba4353c564809c317510b626dee25 | [
"BSD-2-Clause"
] | null | null | null | fit1d/common/fit1d.py | michael-amat/fit1d | 0cd42874e3eba4353c564809c317510b626dee25 | [
"BSD-2-Clause"
] | 9 | 2019-02-24T12:51:28.000Z | 2019-03-22T09:25:45.000Z | """
fit1d package is designed to provide an organized toolbox for different types of
1D fits that can be performed.
It is easy to add new fits and other functionalities
"""
from abc import ABC, abstractmethod
import numpy as np
from typing import List,Tuple
from fit1d.common.model import Model, ModelMock
from fit1d.common.outlier import OutLier
from fit1d.common.fit_data import FitData
class Fit1DMock(Fit1D):
""" Mock class. Used only for tests """
| 30.366667 | 114 | 0.638035 |